hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
49dc098c0c8330e1a6b91e5b418f7367a2f98e64
| 77
|
py
|
Python
|
arxivpy/__init__.py
|
titipata/arxiv_parser
|
96e8fde4515b3fdb1896241c5f0b0d9e737b8aec
|
[
"MIT"
] | 53
|
2016-09-28T17:13:01.000Z
|
2022-03-18T03:01:11.000Z
|
arxivpy/__init__.py
|
titipata/arxiv_parser
|
96e8fde4515b3fdb1896241c5f0b0d9e737b8aec
|
[
"MIT"
] | 6
|
2016-09-28T05:37:43.000Z
|
2019-03-08T16:51:23.000Z
|
arxivpy/__init__.py
|
titipata/arxiv_parser
|
96e8fde4515b3fdb1896241c5f0b0d9e737b8aec
|
[
"MIT"
] | 20
|
2016-09-29T05:01:53.000Z
|
2022-03-18T03:01:16.000Z
|
from .arxiv import query, generate_query, generate_query_from_text, download
| 38.5
| 76
| 0.857143
| 11
| 77
| 5.636364
| 0.636364
| 0.419355
| 0.580645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 77
| 1
| 77
| 77
| 0.885714
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
49fdea2d36afc8a87f03008c9fb403933400d510
| 1,411
|
py
|
Python
|
pytealutils/applications/defaults.py
|
barnjamin/pyteal-utils
|
b4dcce3801aeb2a08ff171762d57f37ab8cbb5c1
|
[
"MIT"
] | 6
|
2021-11-08T13:20:53.000Z
|
2022-01-05T13:23:42.000Z
|
pytealutils/applications/defaults.py
|
gmcgoldr/pyteal-utils
|
3716ff74312d5136df89456e3db711037edccdcb
|
[
"MIT"
] | null | null | null |
pytealutils/applications/defaults.py
|
gmcgoldr/pyteal-utils
|
3716ff74312d5136df89456e3db711037edccdcb
|
[
"MIT"
] | 1
|
2021-12-10T12:37:53.000Z
|
2021-12-10T12:37:53.000Z
|
from pyteal import Subroutine, Expr, TealType, Approve, Reject
from .application import Application
class DefaultApprove(Application):
@staticmethod
@Subroutine(TealType.uint64)
def create() -> Expr:
return Approve()
@staticmethod
@Subroutine(TealType.uint64)
def update() -> Expr:
return Approve()
@staticmethod
@Subroutine(TealType.uint64)
def delete() -> Expr:
return Approve()
@staticmethod
@Subroutine(TealType.uint64)
def optIn() -> Expr:
return Approve()
@staticmethod
@Subroutine(TealType.uint64)
def closeOut() -> Expr:
return Approve()
@staticmethod
@Subroutine(TealType.uint64)
def clearState() -> Expr:
return Approve()
class DefaultReject(Application):
@staticmethod
@Subroutine(TealType.uint64)
def create() -> Expr:
return Reject()
@staticmethod
@Subroutine(TealType.uint64)
def update() -> Expr:
return Reject()
@staticmethod
@Subroutine(TealType.uint64)
def delete() -> Expr:
return Reject()
@staticmethod
@Subroutine(TealType.uint64)
def optIn() -> Expr:
return Reject()
@staticmethod
@Subroutine(TealType.uint64)
def closeOut() -> Expr:
return Reject()
@staticmethod
@Subroutine(TealType.uint64)
def clearState() -> Expr:
return Reject()
| 21.059701
| 62
| 0.630758
| 126
| 1,411
| 7.063492
| 0.174603
| 0.296629
| 0.404494
| 0.485393
| 0.850562
| 0.850562
| 0.850562
| 0.850562
| 0.148315
| 0
| 0
| 0.022967
| 0.259391
| 1,411
| 66
| 63
| 21.378788
| 0.828708
| 0
| 0
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| true
| 0
| 0.038462
| 0.230769
| 0.538462
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 12
|
b717efb94d05b636e9ed03fde67aceef4563c02a
| 3,020
|
py
|
Python
|
test/test_core.py
|
mcieslik-mctp/moke
|
5768245b66d35d23bf2d2c918657aa2ce0061197
|
[
"MIT"
] | 1
|
2018-11-20T20:39:28.000Z
|
2018-11-20T20:39:28.000Z
|
test/test_core.py
|
mcieslik-mctp/moke
|
5768245b66d35d23bf2d2c918657aa2ce0061197
|
[
"MIT"
] | 1
|
2018-12-13T21:06:54.000Z
|
2018-12-13T21:06:54.000Z
|
test/test_core.py
|
mcieslik-mctp/moke
|
5768245b66d35d23bf2d2c918657aa2ce0061197
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for ``moke.core``
"""
import unittest
import moke.core
import os
from moke.util import run_app
class Test_util(unittest.TestCase):
def test_path(self):
assert moke.util.path
def setUp(self):
os.chdir("scripts")
def tearDown(self):
os.chdir("..")
def test_run0(self):
os.chdir("..")
ret, out, err, cmd = run_app("../bin/moke")
assert ret == 1
assert out == ""
assert err
assert cmd == "../bin/moke"
os.chdir("scripts")
def test_run1(self):
ret, out, err, cmd = run_app("../../bin/moke")
assert ret == 2
assert cmd == "../../bin/moke"
assert out == ""
assert err
def test_run2(self):
ret, out, err, cmd = run_app("../../bin/moke")
assert ret == 2
assert out == ""
assert "usage: mokefile.py" in err
assert cmd == "../../bin/moke"
def test_grop1(self):
ret, out, err, cmd = run_app("./grop.py")
assert ret == 2
assert out == ""
assert "usage: grop.py" in err
assert cmd == './grop.py'
def test_grop2(self):
ret, out, err, cmd = run_app('cat ../data/grop.inp | ./grop.py ".*\(\d{2}\).*"')
assert out == "a line with a number (42)\n"
assert ret == 0
def test_grop1(self):
ret, out, err, cmd = run_app("./grop.py")
assert ret == 2
assert out == ""
assert "usage: grop.py" in err
assert cmd == './grop.py'
def test_grop2(self):
ret, out, err, cmd = run_app('cat ../data/grop.inp | ./grop.py ".*\(\d{2}\).*"')
assert out == "a line with a number (42)\n"
assert ret == 0
def test_mf1(self):
ret, out, err, cmd = run_app("moke fromdef_int")
assert ret == 0
def test_mf2(self):
ret, out, err, cmd = run_app("moke fromdef_float")
assert ret == 0
def test_mf3(self):
ret, out, err, cmd = run_app("echo 1 | moke fromdef_path_r")
assert ret == 0
def test_mf3(self):
ret, out, err, cmd = run_app("echo 1 | moke fromdef_path_w")
assert ret == 0
def test_mf4(self):
ret, out, err, cmd = run_app("moke fromdoc_none_int -i 10")
assert ret == 0, err
def test_mf1(self):
ret, out, err, cmd = run_app("moke fromdef_int")
assert ret == 0
def test_mf2(self):
ret, out, err, cmd = run_app("moke fromdef_float")
assert ret == 0
def test_mf3(self):
ret, out, err, cmd = run_app("echo 1 | moke fromdef_path_r")
assert ret == 0
def test_mf3(self):
ret, out, err, cmd = run_app("echo 1 | moke fromdef_path_w")
assert ret == 0
def test_mf4(self):
ret, out, err, cmd = run_app("../../bin/moke fromdoc_none_int -i 10")
assert ret == 0, err
if __name__ == "__main__":
unittest.main()
| 26.964286
| 88
| 0.522848
| 426
| 3,020
| 3.56338
| 0.171362
| 0.071146
| 0.100791
| 0.134387
| 0.774045
| 0.746377
| 0.746377
| 0.737154
| 0.737154
| 0.735178
| 0
| 0.023937
| 0.322185
| 3,020
| 112
| 89
| 26.964286
| 0.717636
| 0.021854
| 0
| 0.759036
| 0
| 0
| 0.196877
| 0
| 0
| 0
| 0
| 0
| 0.421687
| 1
| 0.240964
| false
| 0
| 0.048193
| 0
| 0.301205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3fd7144dc66d51a1d8592434bb64a49d9e5b5bbc
| 25,893
|
py
|
Python
|
src/python_pachyderm/proto/v2/identity/identity_pb2_grpc.py
|
sjezewski/pypachy
|
4bc022d0c73140475f9bd0acd5c0e7204609de26
|
[
"Apache-2.0"
] | 57
|
2018-02-25T16:23:47.000Z
|
2022-02-08T08:48:12.000Z
|
src/python_pachyderm/proto/v2/identity/identity_pb2_grpc.py
|
sjezewski/pypachy
|
4bc022d0c73140475f9bd0acd5c0e7204609de26
|
[
"Apache-2.0"
] | 209
|
2018-02-16T14:31:25.000Z
|
2022-03-15T15:24:19.000Z
|
src/python_pachyderm/proto/v2/identity/identity_pb2_grpc.py
|
sjezewski/pypachy
|
4bc022d0c73140475f9bd0acd5c0e7204609de26
|
[
"Apache-2.0"
] | 23
|
2018-02-16T15:31:46.000Z
|
2022-03-09T20:41:31.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from python_pachyderm.proto.v2.identity import identity_pb2 as python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2
class APIStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SetIdentityServerConfig = channel.unary_unary(
'/identity_v2.API/SetIdentityServerConfig',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.SetIdentityServerConfigRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.SetIdentityServerConfigResponse.FromString,
)
self.GetIdentityServerConfig = channel.unary_unary(
'/identity_v2.API/GetIdentityServerConfig',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIdentityServerConfigRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIdentityServerConfigResponse.FromString,
)
self.CreateIDPConnector = channel.unary_unary(
'/identity_v2.API/CreateIDPConnector',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateIDPConnectorRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateIDPConnectorResponse.FromString,
)
self.UpdateIDPConnector = channel.unary_unary(
'/identity_v2.API/UpdateIDPConnector',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateIDPConnectorRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateIDPConnectorResponse.FromString,
)
self.ListIDPConnectors = channel.unary_unary(
'/identity_v2.API/ListIDPConnectors',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListIDPConnectorsRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListIDPConnectorsResponse.FromString,
)
self.GetIDPConnector = channel.unary_unary(
'/identity_v2.API/GetIDPConnector',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIDPConnectorRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIDPConnectorResponse.FromString,
)
self.DeleteIDPConnector = channel.unary_unary(
'/identity_v2.API/DeleteIDPConnector',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteIDPConnectorRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteIDPConnectorResponse.FromString,
)
self.CreateOIDCClient = channel.unary_unary(
'/identity_v2.API/CreateOIDCClient',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateOIDCClientRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateOIDCClientResponse.FromString,
)
self.UpdateOIDCClient = channel.unary_unary(
'/identity_v2.API/UpdateOIDCClient',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateOIDCClientRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateOIDCClientResponse.FromString,
)
self.GetOIDCClient = channel.unary_unary(
'/identity_v2.API/GetOIDCClient',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetOIDCClientRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetOIDCClientResponse.FromString,
)
self.ListOIDCClients = channel.unary_unary(
'/identity_v2.API/ListOIDCClients',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListOIDCClientsRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListOIDCClientsResponse.FromString,
)
self.DeleteOIDCClient = channel.unary_unary(
'/identity_v2.API/DeleteOIDCClient',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteOIDCClientRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteOIDCClientResponse.FromString,
)
self.DeleteAll = channel.unary_unary(
'/identity_v2.API/DeleteAll',
request_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteAllRequest.SerializeToString,
response_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteAllResponse.FromString,
)
class APIServicer(object):
"""Missing associated documentation comment in .proto file."""
def SetIdentityServerConfig(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIdentityServerConfig(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateIDPConnector(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateIDPConnector(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListIDPConnectors(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetIDPConnector(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteIDPConnector(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateOIDCClient(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateOIDCClient(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetOIDCClient(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListOIDCClients(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteOIDCClient(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteAll(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_APIServicer_to_server(servicer, server):
rpc_method_handlers = {
'SetIdentityServerConfig': grpc.unary_unary_rpc_method_handler(
servicer.SetIdentityServerConfig,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.SetIdentityServerConfigRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.SetIdentityServerConfigResponse.SerializeToString,
),
'GetIdentityServerConfig': grpc.unary_unary_rpc_method_handler(
servicer.GetIdentityServerConfig,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIdentityServerConfigRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIdentityServerConfigResponse.SerializeToString,
),
'CreateIDPConnector': grpc.unary_unary_rpc_method_handler(
servicer.CreateIDPConnector,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateIDPConnectorRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateIDPConnectorResponse.SerializeToString,
),
'UpdateIDPConnector': grpc.unary_unary_rpc_method_handler(
servicer.UpdateIDPConnector,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateIDPConnectorRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateIDPConnectorResponse.SerializeToString,
),
'ListIDPConnectors': grpc.unary_unary_rpc_method_handler(
servicer.ListIDPConnectors,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListIDPConnectorsRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListIDPConnectorsResponse.SerializeToString,
),
'GetIDPConnector': grpc.unary_unary_rpc_method_handler(
servicer.GetIDPConnector,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIDPConnectorRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIDPConnectorResponse.SerializeToString,
),
'DeleteIDPConnector': grpc.unary_unary_rpc_method_handler(
servicer.DeleteIDPConnector,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteIDPConnectorRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteIDPConnectorResponse.SerializeToString,
),
'CreateOIDCClient': grpc.unary_unary_rpc_method_handler(
servicer.CreateOIDCClient,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateOIDCClientRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateOIDCClientResponse.SerializeToString,
),
'UpdateOIDCClient': grpc.unary_unary_rpc_method_handler(
servicer.UpdateOIDCClient,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateOIDCClientRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateOIDCClientResponse.SerializeToString,
),
'GetOIDCClient': grpc.unary_unary_rpc_method_handler(
servicer.GetOIDCClient,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetOIDCClientRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetOIDCClientResponse.SerializeToString,
),
'ListOIDCClients': grpc.unary_unary_rpc_method_handler(
servicer.ListOIDCClients,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListOIDCClientsRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListOIDCClientsResponse.SerializeToString,
),
'DeleteOIDCClient': grpc.unary_unary_rpc_method_handler(
servicer.DeleteOIDCClient,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteOIDCClientRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteOIDCClientResponse.SerializeToString,
),
'DeleteAll': grpc.unary_unary_rpc_method_handler(
servicer.DeleteAll,
request_deserializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteAllRequest.FromString,
response_serializer=python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteAllResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'identity_v2.API', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class API(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def SetIdentityServerConfig(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/SetIdentityServerConfig',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.SetIdentityServerConfigRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.SetIdentityServerConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetIdentityServerConfig(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/GetIdentityServerConfig',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIdentityServerConfigRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIdentityServerConfigResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateIDPConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/CreateIDPConnector',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateIDPConnectorRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateIDPConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateIDPConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/UpdateIDPConnector',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateIDPConnectorRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateIDPConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListIDPConnectors(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/ListIDPConnectors',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListIDPConnectorsRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListIDPConnectorsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetIDPConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/GetIDPConnector',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIDPConnectorRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetIDPConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteIDPConnector(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/DeleteIDPConnector',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteIDPConnectorRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteIDPConnectorResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def CreateOIDCClient(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/CreateOIDCClient',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateOIDCClientRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.CreateOIDCClientResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def UpdateOIDCClient(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/UpdateOIDCClient',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateOIDCClientRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.UpdateOIDCClientResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GetOIDCClient(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/GetOIDCClient',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetOIDCClientRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.GetOIDCClientResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListOIDCClients(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/ListOIDCClients',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListOIDCClientsRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.ListOIDCClientsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteOIDCClient(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/DeleteOIDCClient',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteOIDCClientRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteOIDCClientResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DeleteAll(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/identity_v2.API/DeleteAll',
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteAllRequest.SerializeToString,
python__pachyderm_dot_proto_dot_v2_dot_identity_dot_identity__pb2.DeleteAllResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 55.924406
| 156
| 0.720233
| 2,461
| 25,893
| 7.067452
| 0.052824
| 0.099925
| 0.081757
| 0.104467
| 0.869258
| 0.869258
| 0.846835
| 0.818433
| 0.815328
| 0.808774
| 0
| 0.009256
| 0.219751
| 25,893
| 462
| 157
| 56.045455
| 0.851656
| 0.044105
| 0
| 0.517413
| 1
| 0
| 0.069254
| 0.037428
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069652
| false
| 0
| 0.004975
| 0.032338
| 0.114428
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ffea17658edd656c315ff8718911b71cac901da
| 8,327
|
py
|
Python
|
forecasting/short_term_forecasting.py
|
nareshram256/EnergyManagementSystem
|
2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa
|
[
"MIT"
] | 9
|
2020-04-24T14:34:16.000Z
|
2022-01-25T07:16:03.000Z
|
forecasting/short_term_forecasting.py
|
casemsee/EnergyManagementSystem
|
2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa
|
[
"MIT"
] | null | null | null |
forecasting/short_term_forecasting.py
|
casemsee/EnergyManagementSystem
|
2a48ba3b9bf7ff3003c197ee43ea9efbfbe42baa
|
[
"MIT"
] | 7
|
2019-09-19T13:26:02.000Z
|
2021-11-27T09:53:54.000Z
|
# Short_term forecasting for local energy management system
# Include the pv forecasting, wp forecasting,
# In this forecasting system, the tensor flow will be deployed and used.
# The offline training and on-line forecasting are adopted.
from modelling.database.database_format import db_short_term_forecasting,one_minute_history_data
import random
from configuration.configuration_time_line import default_time
def blank_forecasting_result(*args):
Target_time = args[0]
default_result = db_short_term_forecasting \
(TIME_STAMP=Target_time,
AC_PD=0,
NAC_PD=0,
DC_PD=0,
NDC_PD=0,
PV_PG=0,
WP_PG=0,
PRICE=0, )
return default_result
def short_term_forecasting_pv(*args):
# Short term forecasting for photovoltaic
session = args[0]
Target_Time = args[1]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
PV_PG = random.random()
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.PV_PG = PV_PG
session.commit()
return PV_PG
def short_term_forecasting_wp(*args):
# Short term forecasting for wind power
session = args[0]
Target_Time = args[1]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
WP_PG = random.random()
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.WP_PG = WP_PG
session.commit()
return WP_PG
def short_term_forecasting_load_ac(*args):
# Short term forecasting for critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
AC_PD = random.random()
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.AC_PD = AC_PD
session.commit()
return AC_PD
def short_term_forecasting_load_uac(*args):
# Short term forecasting for non-critical AC load
session = args[0]
Target_Time = args[1]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
UAC_PD = random.random()
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.UAC_PD = UAC_PD
session.commit()
return UAC_PD
def short_term_forecasting_load_dc(*args):
# Short term forecasting for critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
DC_PD = random.random()
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.DC_PD = DC_PD
session.commit()
return DC_PD
def short_term_forecasting_load_udc(*args):
# Short term forecasting for non-critical DC load
session = args[0]
Target_Time = args[1]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
UDC_PD = random.random()
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.UDC_PD = UDC_PD
session.commit()
return UDC_PD
def short_term_forecasting_pv_history(*args):
# Short term forecasting for photovoltaic
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
row_source = session_source.query(one_minute_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_opf"])).first()
PV_PG = row_source.PV_PG
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.PV_PG = PV_PG
session.commit()
session_source.close()
return PV_PG
def short_term_forecasting_wp_history(*args):
# Short term forecasting for wind power
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
row_source = session_source.query(one_minute_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_opf"])).first()
WP_PG = row_source.WP_PG
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.WP_PG = WP_PG
session.commit()
session_source.close()
return WP_PG
def short_term_forecasting_load_ac_history(*args):
# Short term forecasting for critical AC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
row_source = session_source.query(one_minute_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_opf"])).first()
AC_PD = row_source.AC_PD
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.AC_PD = AC_PD
session.commit()
session_source.close()
return AC_PD
def short_term_forecasting_load_nac_history(*args):
# Short term forecasting for non-critical AC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
row_source = session_source.query(one_minute_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_opf"])).first()
NAC_PD = row_source.NAC_PD
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.NAC_PD = NAC_PD
session.commit()
session_source.close()
return NAC_PD
def short_term_forecasting_load_dc_history(*args):
# Short term forecasting for critical DC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
row_source = session_source.query(one_minute_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_opf"])).first()
DC_PD= row_source.DC_PD
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.DC_PD = DC_PD
session.commit()
session_source.close()
return DC_PD
def short_term_forecasting_load_ndc_history(*args):
# Short term forecasting for non-critical DC load
session = args[0]
session_source = args[1]
Target_Time = args[2]
if session.query(db_short_term_forecasting).filter(
db_short_term_forecasting.TIME_STAMP == Target_Time).count() == 0:
blank_row = blank_forecasting_result(Target_Time)
session.add(blank_row)
session.commit()
row_source = session_source.query(one_minute_history_data).filter_by(
TIME_STAMP=int((Target_Time - default_time["Base_time"]) / default_time["Time_step_opf"])).first()
NDC_PD = row_source.NDC_PD
row = session.query(db_short_term_forecasting).filter_by(TIME_STAMP=Target_Time).first()
row.NDC_PD = NDC_PD
session.commit()
session_source.close()
return NDC_PD
| 30.726937
| 100
| 0.778312
| 1,271
| 8,327
| 4.731707
| 0.064516
| 0.09428
| 0.209511
| 0.139009
| 0.889757
| 0.881111
| 0.878783
| 0.841204
| 0.79581
| 0.76987
| 0
| 0.006793
| 0.116128
| 8,327
| 271
| 101
| 30.726937
| 0.810326
| 0.090549
| 0
| 0.758974
| 0
| 0
| 0.017467
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.015385
| 0
| 0.148718
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b208b9baa3be87aab480c225cedd40b67aee0cc9
| 96
|
py
|
Python
|
palettable/colorbrewer/sequential.py
|
chebee7i/palettable
|
9e6202080837efc6ce55d9c040ffa73b47cb6795
|
[
"MIT"
] | null | null | null |
palettable/colorbrewer/sequential.py
|
chebee7i/palettable
|
9e6202080837efc6ce55d9c040ffa73b47cb6795
|
[
"MIT"
] | null | null | null |
palettable/colorbrewer/sequential.py
|
chebee7i/palettable
|
9e6202080837efc6ce55d9c040ffa73b47cb6795
|
[
"MIT"
] | 1
|
2022-02-09T07:06:24.000Z
|
2022-02-09T07:06:24.000Z
|
from .colorbrewer import _load_maps_by_type
globals().update(_load_maps_by_type('sequential'))
| 24
| 50
| 0.833333
| 14
| 96
| 5.142857
| 0.714286
| 0.222222
| 0.277778
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 96
| 3
| 51
| 32
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b7a248dc007d30eea41405be7519737dc8451723
| 771
|
py
|
Python
|
tests/unit_tests/gov_uk_dashboards/formatting/test_round_thousands_to_1dp.py
|
communitiesuk/pkg_gov_uk_dashboards
|
b92fc51cde31d929dd11ba3acb9256b93f865986
|
[
"MIT"
] | 1
|
2022-01-31T10:15:33.000Z
|
2022-01-31T10:15:33.000Z
|
tests/unit_tests/gov_uk_dashboards/formatting/test_round_thousands_to_1dp.py
|
communitiesuk/GOV_UK_Colours
|
5509b452358f345b370be8fcfd708e898961c03a
|
[
"MIT"
] | 2
|
2022-02-04T12:38:37.000Z
|
2022-03-21T09:25:27.000Z
|
tests/unit_tests/gov_uk_dashboards/formatting/test_round_thousands_to_1dp.py
|
communitiesuk/Plotly_utilities
|
f1dfb48bec17b3b089b2760a132ba2a31942a39a
|
[
"MIT"
] | 1
|
2022-03-31T12:25:40.000Z
|
2022-03-31T12:25:40.000Z
|
from gov_uk_dashboards.formatting.rounding import round_thousands_to_1dp
def test_round_thousands_to_1dp_returns_rounded_billions():
assert round_thousands_to_1dp(1_234_567_890) == 1_200_000_000
assert round_thousands_to_1dp(45_678_987_654) == 45_700_000_000
def test_round_thousands_to_1dp_returns_rounded_millions():
assert round_thousands_to_1dp(1_234_567) == 1_200_000
assert round_thousands_to_1dp(45_678_987) == 45_700_000
def test_round_thousands_to_1dp_returns_rounded_thousands():
assert round_thousands_to_1dp(1_234) == 1_200
assert round_thousands_to_1dp(45_678) == 45_700
def test_round_thousands_to_1dp_returns_rounded_units():
assert round_thousands_to_1dp(1.234) == 1.2
assert round_thousands_to_1dp(45.678) == 45.7
| 35.045455
| 72
| 0.8262
| 131
| 771
| 4.244275
| 0.244275
| 0.327338
| 0.374101
| 0.444245
| 0.766187
| 0.766187
| 0.766187
| 0.766187
| 0.284173
| 0
| 0
| 0.170306
| 0.108949
| 771
| 21
| 73
| 36.714286
| 0.63901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.615385
| 1
| 0.307692
| true
| 0
| 0.076923
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b7a4a7f579b22ee53f15f355e23da97821c91ead
| 6,332
|
py
|
Python
|
pkgs/conf-pkg/src/genie/libs/conf/keychains/nxos/tests/test_keychains.py
|
CiscoTestAutomation/genielibs
|
becee8a1a85f4973e00859e3244e2c8fe45a394c
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/conf-pkg/src/genie/libs/conf/keychains/nxos/tests/test_keychains.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/conf-pkg/src/genie/libs/conf/keychains/nxos/tests/test_keychains.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
#!/usr/bin/env python
# import python
import unittest
# import genie
from genie.tests.conf import TestCase
from genie.conf import Genie
from genie.conf.base import Testbed, Device, Interface
# import genie.libs
from genie.libs.conf.keychains import Keychains
class test_keychains(TestCase):
def test_keychains_cfg(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='nxos')
keychains = Keychains()
self.assertIs(keychains.testbed, testbed)
dev1.add_feature(keychains)
keychains.device_attr[dev1].keychain_attr['1'].key_id_attr[
'2'].key_string = 'test'
cfgs = keychains.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(
str(cfgs[dev1.name]), '\n'.join([
'key chain 1', ' key 2', ' key-string test', ' exit', ' exit'
]))
un_cfgs = keychains.build_unconfig(apply=False)
self.assertCountEqual(un_cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertEqual(str(un_cfgs[dev1.name]),
'\n'.join(['no key chain 1']))
keychains.device_attr[dev1].keychain_attr['1'].key_id_attr[
'2'].key_string = 'test'
keychains.device_attr[dev1].keychain_attr['1'].key_id_attr[
'2'].key_enc_type = 7
cfgs = keychains.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(
str(cfgs[dev1.name]), '\n'.join([
'key chain 1', ' key 2', ' key-string 7 test', ' exit',
' exit'
]))
un_cfgs = keychains.build_unconfig(apply=False)
self.assertCountEqual(un_cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertEqual(str(un_cfgs[dev1.name]),
'\n'.join(['no key chain 1']))
def test_ms_keychains_cfg(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='nxos')
keychains = Keychains()
self.assertIs(keychains.testbed, testbed)
dev1.add_feature(keychains)
keychains.device_attr[dev1].ms_keychain_attr['1'].key_id_attr[
'2'].key_string = 'test'
cfgs = keychains.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(
str(cfgs[dev1.name]), '\n'.join([
'key chain 1 macsec', ' key 2', ' key-octet-string test',
' exit', ' exit'
]))
un_cfgs = keychains.build_unconfig(apply=False)
self.assertCountEqual(un_cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertEqual(str(un_cfgs[dev1.name]),
'\n'.join(['no key chain 1 macsec']))
keychains.device_attr[dev1].ms_keychain_attr['1'].key_id_attr[
'2'].key_string = 'test'
keychains.device_attr[dev1].ms_keychain_attr['1'].key_id_attr[
'2'].key_enc_type = 7
keychains.device_attr[dev1].ms_keychain_attr['1'].key_id_attr[
'2'].crypto_algo = 'aes-128-cmac'
cfgs = keychains.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(
str(cfgs[dev1.name]), '\n'.join([
'key chain 1 macsec', ' key 2',
' key-octet-string 7 test cryptographic-algorithm AES_128_CMAC',
' exit', ' exit'
]))
un_cfgs = keychains.build_unconfig(apply=False)
self.assertCountEqual(un_cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertEqual(str(un_cfgs[dev1.name]),
'\n'.join(['no key chain 1 macsec']))
def test_te_keychains_cfg(self):
Genie.testbed = testbed = Testbed()
dev1 = Device(testbed=testbed, name='PE1', os='nxos')
keychains = Keychains()
self.assertIs(keychains.testbed, testbed)
dev1.add_feature(keychains)
keychains.device_attr[dev1].te_keychain_attr['1'].key_id_attr[
'2'].key_string = 'test'
cfgs = keychains.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(
str(cfgs[dev1.name]), '\n'.join([
'key chain 1 tunnel-encryption', ' key 2',
' key-octet-string test', ' exit', ' exit'
]))
un_cfgs = keychains.build_unconfig(apply=False)
self.assertCountEqual(un_cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertEqual(str(un_cfgs[dev1.name]),
'\n'.join(['no key chain 1 tunnel-encryption']))
keychains.device_attr[dev1].te_keychain_attr['1'].key_id_attr[
'2'].key_string = 'test'
keychains.device_attr[dev1].te_keychain_attr['1'].key_id_attr[
'2'].key_enc_type = 7
keychains.device_attr[dev1].te_keychain_attr['1'].key_id_attr[
'2'].crypto_algo = 'aes-128-cmac'
keychains.device_attr[dev1].te_keychain_attr['1'].key_id_attr[
'2'].lifetime_start = '23:00:00 Jul 31 2021'
keychains.device_attr[dev1].te_keychain_attr['1'].key_id_attr[
'2'].lifetime_duration = 1800
cfgs = keychains.build_config(apply=False)
self.assertCountEqual(cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertMultiLineEqual(
str(cfgs[dev1.name]), '\n'.join([
'key chain 1 tunnel-encryption', ' key 2',
' key-octet-string 7 test cryptographic-algorithm AES_128_CMAC',
' send-lifetime 23:00:00 Jul 31 2021 duration 1800', ' exit',
' exit'
]))
un_cfgs = keychains.build_unconfig(apply=False)
self.assertCountEqual(un_cfgs.keys(), [dev1.name])
self.maxDiff = None
self.assertEqual(str(un_cfgs[dev1.name]),
'\n'.join(['no key chain 1 tunnel-encryption']))
if __name__ == '__main__':
unittest.main()
| 37.247059
| 81
| 0.587808
| 755
| 6,332
| 4.754967
| 0.111258
| 0.053482
| 0.068802
| 0.083287
| 0.909749
| 0.909749
| 0.901393
| 0.901393
| 0.901393
| 0.901393
| 0
| 0.030501
| 0.275111
| 6,332
| 169
| 82
| 37.467456
| 0.751634
| 0.010265
| 0
| 0.856061
| 0
| 0
| 0.120549
| 0.007345
| 0
| 0
| 0
| 0
| 0.204545
| 1
| 0.022727
| false
| 0
| 0.037879
| 0
| 0.068182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7abd8c8cbd6e992516860ba84ea777cc38c24ed
| 147
|
py
|
Python
|
srcWatteco/TICs/__init__.py
|
OStephan29/Codec-Python
|
76d651bb23daf1d9307c8b84533d9f24a59cea28
|
[
"BSD-3-Clause"
] | 1
|
2022-01-12T15:46:58.000Z
|
2022-01-12T15:46:58.000Z
|
srcWatteco/TICs/__init__.py
|
OStephan29/Codec-Python
|
76d651bb23daf1d9307c8b84533d9f24a59cea28
|
[
"BSD-3-Clause"
] | null | null | null |
srcWatteco/TICs/__init__.py
|
OStephan29/Codec-Python
|
76d651bb23daf1d9307c8b84533d9f24a59cea28
|
[
"BSD-3-Clause"
] | 1
|
2021-10-05T08:40:15.000Z
|
2021-10-05T08:40:15.000Z
|
from ._TIC_Tools import *
from ._TIC_Types import *
from .TIC_CBE import *
from .TIC_STD import *
from .TIC_PMEPMI import *
from .TIC_ICE import *
| 21
| 25
| 0.755102
| 24
| 147
| 4.291667
| 0.375
| 0.407767
| 0.631068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 147
| 6
| 26
| 24.5
| 0.837398
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b7e38ecd30b1b096ee4a54bbb0c3790a89cf0061
| 4,650
|
py
|
Python
|
scripts/rpc/deploy_contract.py
|
gl12138/ann
|
9ebdae5bf08e2402881fbfb7eabbb06fc7295fdd
|
[
"Apache-2.0"
] | 44
|
2018-08-16T09:50:36.000Z
|
2019-11-18T11:29:00.000Z
|
scripts/rpc/deploy_contract.py
|
gl12138/ann
|
9ebdae5bf08e2402881fbfb7eabbb06fc7295fdd
|
[
"Apache-2.0"
] | 5
|
2019-02-13T07:12:46.000Z
|
2019-11-22T05:45:49.000Z
|
scripts/rpc/deploy_contract.py
|
gl12138/ann
|
9ebdae5bf08e2402881fbfb7eabbb06fc7295fdd
|
[
"Apache-2.0"
] | 13
|
2018-08-28T09:30:03.000Z
|
2019-11-13T05:35:13.000Z
|
from requests import Session
0x0104994735dfcf60eb43bb5286334a7a83b622685fc3feb92247c65cc4b4cd55497fbc767f2e59a222abb650e5f411d9f7b6266b49439cdb348d859e33e89846bcaf
d = {
'nonce': "1",
'from': '0x',
'value': "100000",
'pubkey': '0x0104994735dfcf60eb43bb5286334a7a83b622685fc3feb92247c65cc4b4cd55497fbc767f2e59a222abb650e5f411d9f7b6266b49439cdb348d859e33e89846bcaf',
'signature':'',
'data': '0x608060405234801561001057600080fd5b506040805190810160405280600e81526020017f68656c6c6f206861636b65723a200000000000000000000000000000000000008152506000908051906020019061005c929190610062565b50610107565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f106100a357805160ff19168380011785556100d1565b828001600101855582156100d1579182015b828111156100d05782518255916020019190600101906100b5565b5b5090506100de91906100e2565b5090565b61010491905b808211156101005760008160009055506001016100e8565b5090565b90565b6106e8806101166000396000f3fe608060405260043610610046576000357c0100000000000000000000000000000000000000000000000000000000900480638e7d4b1d1461004b578063e978c36f1461009e575b600080fd5b34801561005757600080fd5b506100846004803603602081101561006e57600080fd5b8101908080359060200190929190505050610166565b604051808215151515815260200191505060405180910390f35b3480156100aa57600080fd5b50610164600480360360208110156100c157600080fd5b81019080803590602001906401000000008111156100de57600080fd5b8201836020820111156100f057600080fd5b8035906020019184600183028401116401000000008311171561011257600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600081840152601f19601f820116905080830192505050505050509192919290505050610186565b005b60016020528060005260406000206000915054906101000a900460ff1681565b606061025f60008054600181600116156101000203166002900480601f0160208091040260200160405190810160405280929190818152602001828054600181600116156101000203166002900480156102215780601f106101f657610100808354040283529160200191610221565b820191906000526020600020905b81548152906001019060200180831161020457829003601f168201915b50505050508360206040519081016040528060008152506020604051908101604052806000815250602060405190810160405280600081525061029d565b9050600081805190602001209050600180600083815260200190815260200160002060006101000a81548160ff021916908315150217905550505050565b6060808690506060869050606086905060608690506060869050606081518351855187518951010101016040519080825280601f01601f1916602001820160405280156102f95781602001600182028038833980820191505090505b5090506060819050600080905060008090505b88518110156103bf57888181518110151561032357fe5b9060200101517f010000000000000000000000000000000000000000000000000000000000000090047f010000000000000000000000000000000000000000000000000000000000000002838380600101945081518110151561038257fe5b9060200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a905350808060010191505061030c565b5060008090505b87518110156104795787818151811015156103dd57fe5b9060200101517f010000000000000000000000000000000000000000000000000000000000000090047f010000000000000000000000000000000000000000000000000000000000000002838380600101945081518110151561043c57fe5b9060200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535080806001019150506103c6565b5060008090505b865181101561053357868181518110151561049757fe5b9060200101517f010000000000000000000000000000000000000000000000000000000000000090047f01000000000000000000000000000000000000000000000000000000000000000283838060010194508151811015156104f657fe5b9060200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508080600101915050610480565b5060008090505b85518110156105ed57858181518110151561055157fe5b9060200101517f010000000000000000000000000000000000000000000000000000000000000090047f01000000000000000000000000000000000000000000000000000000000000000283838060010194508151811015156105b057fe5b9060200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a905350808060010191505061053a565b5060008090505b84518110156106a757848181518110151561060b57fe5b9060200101517f010000000000000000000000000000000000000000000000000000000000000090047f010000000000000000000000000000000000000000000000000000000000000002838380600101945081518110151561066a57fe5b9060200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535080806001019150506105f4565b5081985050505050505050509594505050505056fea165627a7a723058205480dee1c28455684e95fc8c6d3f48d9e64269e891d00cf57b4db368343303480029',
}
if __name__ == '__main__':
s = Session()
resp = s.post('http://127.0.0.1:8000/new_transaction', json=d)
print(resp.text)
| 244.736842
| 4,109
| 0.970323
| 38
| 4,650
| 118.5
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.821538
| 0.015484
| 4,650
| 18
| 4,110
| 258.333333
| 0.16208
| 0
| 0
| 0
| 0
| 0
| 0.927957
| 0.909247
| 0
| 1
| 0.938065
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0.071429
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4d26479da4ea9aab165f20ff1147d7f039b817a2
| 157
|
py
|
Python
|
app/generators/__init__.py
|
LizaKoval/task_1
|
197981e7fcc22c7d168f1105f283c6bdbd570d7d
|
[
"Unlicense"
] | null | null | null |
app/generators/__init__.py
|
LizaKoval/task_1
|
197981e7fcc22c7d168f1105f283c6bdbd570d7d
|
[
"Unlicense"
] | null | null | null |
app/generators/__init__.py
|
LizaKoval/task_1
|
197981e7fcc22c7d168f1105f283c6bdbd570d7d
|
[
"Unlicense"
] | 1
|
2022-03-13T14:41:28.000Z
|
2022-03-13T14:41:28.000Z
|
import app.generators.abstractstatsservice, app.generators.general_stats_generator, app.generators.usage_stats_generator, app.generators.bike_stats_generator
| 157
| 157
| 0.904459
| 19
| 157
| 7.157895
| 0.473684
| 0.382353
| 0.25
| 0.397059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025478
| 157
| 1
| 157
| 157
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
4d63bfaa581a01634e7e689ce4b388d88d073c99
| 14,294
|
py
|
Python
|
snypy/teams/tests/test_team_snippet_permissions.py
|
sterapps/snypy-backend
|
e4733a1b7bf041c79c66ce74e64cc428d3c6ba5d
|
[
"MIT"
] | 2
|
2018-06-21T07:51:30.000Z
|
2019-06-01T14:17:07.000Z
|
snypy/teams/tests/test_team_snippet_permissions.py
|
nezhar/snypy-backend
|
0673b7dc7dc8b730639e0f634dcaa8b8178151e0
|
[
"MIT"
] | 33
|
2018-05-10T10:37:46.000Z
|
2021-10-30T11:07:22.000Z
|
snypy/teams/tests/test_team_snippet_permissions.py
|
sterapps/snypy-backend
|
e4733a1b7bf041c79c66ce74e64cc428d3c6ba5d
|
[
"MIT"
] | 3
|
2019-06-12T08:53:37.000Z
|
2020-10-28T17:21:02.000Z
|
import json
from django.contrib.auth.models import Permission
from django.urls import reverse
from core.tests import BaseAPITestCase
from snippets.models import Snippet
from teams.models import Team, UserTeam
class BaseTeamApiTestCase(BaseAPITestCase):
url = reverse("snippet-list")
def setUp(self):
super().setUp()
# Initial Snippets for each user
Snippet.objects.create(user=self.user1, title="Python snippet user 1")
Snippet.objects.create(user=self.user2, title="Python snippet user 2")
self.team1 = Team.objects.create(name="Team 1")
self.team2 = Team.objects.create(name="Team 2")
self.team1_snippet = Snippet.objects.create(user=self.user1, title="Python snippet team 1", team=self.team1)
team_1_member_count = UserTeam.objects.filter(team=self.team1).count()
self.assertEquals(team_1_member_count, 0)
team_2_member_count = UserTeam.objects.filter(team=self.team2).count()
self.assertEquals(team_2_member_count, 0)
class TeamSnippetListAPIViewTestCase(BaseTeamApiTestCase):
"""
Snippets can be viewable by their owner and by users that belong to the same team
that the snippet is assigned to. The visibility of the snippets is not related to the role in the team.
"""
def setUp(self):
super().setUp()
self.user1.user_permissions.add(Permission.objects.get(codename='view_snippet'))
self.user2.user_permissions.add(Permission.objects.get(codename='view_snippet'))
def test_team_snippet_owner(self):
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 2)
response = self.client.get("%s?team_is_null=True" % self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team1.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team2.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 0)
def test_team_snippet_other_user_unassigned(self):
self.api_authentication(self.token2)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team_is_null=True" % self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team1.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 0)
response = self.client.get("%s?team=%d" % (self.url, self.team2.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 0)
def test_team_snippet_other_user_assigned_as_subscriber(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_SUBSCRIBER)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 2)
response = self.client.get("%s?team_is_null=True" % self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team1.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team2.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 0)
def test_team_snippet_other_user_assigned_as_contributor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_CONTRIBUTOR)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 2)
response = self.client.get("%s?team_is_null=True" % self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team1.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team2.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 0)
def test_team_snippet_other_user_assigned_as_editor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_EDITOR)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 2)
response = self.client.get("%s?team_is_null=True" % self.url)
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team1.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 1)
response = self.client.get("%s?team=%d" % (self.url, self.team2.pk))
self.assertEquals(response.status_code, 200)
self.assertEquals(len(json.loads(response.content)), 0)
class TeamSnippetListAPICreateTestCase(BaseTeamApiTestCase):
"""
Snippets can be added only on teams the user is assigned when to role is contributor or editor.
"""
def setUp(self):
super().setUp()
self.user1.user_permissions.add(Permission.objects.get(codename='add_snippet'))
self.user2.user_permissions.add(Permission.objects.get(codename='add_snippet'))
self.create_data = {
"title": "Python snippet",
"description": "",
"team": self.team1.pk,
}
def assert_create_response(self, response):
self.assertEqual(response.status_code, 201)
self.assertEqual(response.data['user'], self.user1.pk)
self.assertEqual(response.data['title'], self.create_data['title'])
self.assertEqual(response.data['description'], self.create_data['description'])
self.assertEqual(response.data['visibility'], Snippet.VISIBILITY_PRIVATE)
self.assertEqual(response.data['team'], self.team1.pk)
self.assertEqual(response.data['user_display'], self.user1.username)
self.assertListEqual(response.data['files'], [])
self.assertListEqual(response.data['labels'], [])
def test_team_snippet_unassigned(self):
response = self.client.post(self.url, self.create_data)
self.assertEqual(response.status_code, 400)
def test_team_snippet_other_user_assigned_subscriber(self):
UserTeam.objects.create(team=self.team1, user=self.user1, role=UserTeam.ROLE_SUBSCRIBER)
response = self.client.post(self.url, self.create_data)
self.assertEqual(response.status_code, 400)
def test_team_snippet_other_user_assigned_contributor(self):
UserTeam.objects.create(team=self.team1, user=self.user1, role=UserTeam.ROLE_CONTRIBUTOR)
response = self.client.post(self.url, self.create_data)
self.assert_create_response(response)
def test_team_snippet_other_user_assigned_editor(self):
UserTeam.objects.create(team=self.team1, user=self.user1, role=UserTeam.ROLE_EDITOR)
response = self.client.post(self.url, self.create_data)
self.assert_create_response(response)
class TeamSnippetDetailAPIViewTestCase(BaseTeamApiTestCase):
"""
Snippets can be viewable by their owner and by users that belong to the same team
that the snippet is assigned to. The visibility of the snippets is not related to the role in the team.
"""
def setUp(self):
super().setUp()
self.user1.user_permissions.add(Permission.objects.get(codename='view_snippet'))
self.user2.user_permissions.add(Permission.objects.get(codename='view_snippet'))
self.url = reverse("snippet-detail", kwargs={'pk': self.team1_snippet.pk})
def test_team_snippet_owner(self):
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
def test_team_snippet_other_user_unassigned(self):
self.api_authentication(self.token2)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 404)
def test_team_snippet_other_user_assigned_as_subscriber(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_SUBSCRIBER)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
def test_team_snippet_other_user_assigned_as_contributor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_CONTRIBUTOR)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
def test_team_snippet_other_user_assigned_as_editor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_EDITOR)
response = self.client.get(self.url)
self.assertEquals(response.status_code, 200)
class TeamSnippetDetailAPIEditTestCase(BaseTeamApiTestCase):
"""
Snippets can be edited only on teams the user is assigned to with the role editor.
"""
def setUp(self):
super().setUp()
self.user1.user_permissions.add(Permission.objects.get(codename='change_snippet'))
self.user2.user_permissions.add(Permission.objects.get(codename='change_snippet'))
self.url = reverse("snippet-detail", kwargs={'pk': self.team1_snippet.pk})
self.patch_data = {'title': "Python snippet edited"}
def assert_patch_response(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['user'], self.user1.pk)
self.assertEqual(response.data['title'], self.patch_data['title'])
self.assertEqual(response.data['description'], "")
self.assertEqual(response.data['visibility'], Snippet.VISIBILITY_PRIVATE)
self.assertEqual(response.data['team'], self.team1.pk)
self.assertEqual(response.data['user_display'], self.user1.username)
self.assertListEqual(response.data['files'], [])
self.assertListEqual(response.data['labels'], [])
def test_team_snippet_owner(self):
response = self.client.patch(self.url, self.patch_data)
self.assert_patch_response(response)
def test_team_snippet_other_user_unassigned(self):
self.api_authentication(self.token2)
response = self.client.patch(self.url, self.patch_data)
self.assertEquals(response.status_code, 404)
def test_team_snippet_other_user_assigned_as_subscriber(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_SUBSCRIBER)
response = self.client.patch(self.url, self.patch_data)
self.assertEquals(response.status_code, 403)
def test_team_snippet_other_user_assigned_as_contributor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_CONTRIBUTOR)
response = self.client.patch(self.url, self.patch_data)
self.assertEquals(response.status_code, 403)
def test_team_snippet_other_user_assigned_as_editor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_EDITOR)
response = self.client.patch(self.url, self.patch_data)
self.assert_patch_response(response)
class TeamSnippetDetailAPIDeleteTestCase(BaseTeamApiTestCase):
"""
Snippets can be deleted only on teams the user is assigned to with the role editor.
"""
def setUp(self):
super().setUp()
self.user1.user_permissions.add(Permission.objects.get(codename='delete_snippet'))
self.user2.user_permissions.add(Permission.objects.get(codename='delete_snippet'))
self.url = reverse("snippet-detail", kwargs={'pk': self.team1_snippet.pk})
def test_team_snippet_owner(self):
response = self.client.delete(self.url)
self.assertEquals(response.status_code, 204)
def test_team_snippet_other_user_unassigned(self):
self.api_authentication(self.token2)
response = self.client.delete(self.url)
self.assertEquals(response.status_code, 404)
def test_team_snippet_other_user_assigned_as_subscriber(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_SUBSCRIBER)
response = self.client.delete(self.url)
self.assertEquals(response.status_code, 403)
def test_team_snippet_other_user_assigned_as_contributor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_CONTRIBUTOR)
response = self.client.delete(self.url)
self.assertEquals(response.status_code, 403)
def test_team_snippet_other_user_assigned_as_editor(self):
self.api_authentication(self.token2)
UserTeam.objects.create(team=self.team1, user=self.user2, role=UserTeam.ROLE_EDITOR)
response = self.client.delete(self.url)
self.assertEquals(response.status_code, 204)
| 42.165192
| 116
| 0.708339
| 1,823
| 14,294
| 5.39989
| 0.070762
| 0.089395
| 0.071312
| 0.100569
| 0.903291
| 0.892828
| 0.892828
| 0.878302
| 0.856359
| 0.845083
| 0
| 0.01919
| 0.17245
| 14,294
| 338
| 117
| 42.289941
| 0.813002
| 0.046593
| 0
| 0.783186
| 0
| 0
| 0.048435
| 0
| 0
| 0
| 0
| 0
| 0.358407
| 1
| 0.141593
| false
| 0
| 0.026549
| 0
| 0.199115
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d6b23c9188f3bb21de6715e0d9e3e2da7ecafd7
| 6,191
|
py
|
Python
|
mmdet/core/bbox/geometry.py
|
qilei123/mmdetection_rop
|
cbdbb2b521c94c2f3eeebb2f2069663199f679bc
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/bbox/geometry.py
|
qilei123/mmdetection_rop
|
cbdbb2b521c94c2f3eeebb2f2069663199f679bc
|
[
"Apache-2.0"
] | null | null | null |
mmdet/core/bbox/geometry.py
|
qilei123/mmdetection_rop
|
cbdbb2b521c94c2f3eeebb2f2069663199f679bc
|
[
"Apache-2.0"
] | null | null | null |
import torch
def bbox_overlaps(bboxes1, bboxes2, mode='iou', is_aligned=False):
"""Calculate overlap between two set of bboxes.
If ``is_aligned`` is ``False``, then calculate the ious between each bbox
of bboxes1 and bboxes2, otherwise the ious between each aligned pair of
bboxes1 and bboxes2.
Args:
bboxes1 (Tensor): shape (m, 4)
bboxes2 (Tensor): shape (n, 4), if is_aligned is ``True``, then m and n
must be equal.
mode (str): "iou" (intersection over union) or iof (intersection over
foreground).
Returns:
ious(Tensor): shape (m, n) if is_aligned == False else shape (m, 1)
"""
assert mode in ['iou', 'iof']
rows = bboxes1.size(0)
cols = bboxes2.size(0)
if is_aligned:
assert rows == cols
if rows * cols == 0:
return bboxes1.new(rows, 1) if is_aligned else bboxes1.new(rows, cols)
if is_aligned:
lt = torch.max(bboxes1[:, :2], bboxes2[:, :2]) # [rows, 2]
rb = torch.min(bboxes1[:, 2:], bboxes2[:, 2:]) # [rows, 2]
wh = (rb - lt + 1).clamp(min=0) # [rows, 2]
overlap = wh[:, 0] * wh[:, 1]
area1 = (bboxes1[:, 2] - bboxes1[:, 0] + 1) * (
bboxes1[:, 3] - bboxes1[:, 1] + 1)
if mode == 'iou':
area2 = (bboxes2[:, 2] - bboxes2[:, 0] + 1) * (
bboxes2[:, 3] - bboxes2[:, 1] + 1)
ious = overlap / (area1 + area2 - overlap)
else:
ious = overlap / area1
else:
lt = torch.max(bboxes1[:, None, :2], bboxes2[:, :2]) # [rows, cols, 2]
rb = torch.min(bboxes1[:, None, 2:], bboxes2[:, 2:]) # [rows, cols, 2]
wh = (rb - lt + 1).clamp(min=0) # [rows, cols, 2]
overlap = wh[:, :, 0] * wh[:, :, 1]
area1 = (bboxes1[:, 2] - bboxes1[:, 0] + 1) * (
bboxes1[:, 3] - bboxes1[:, 1] + 1)
if mode == 'iou':
area2 = (bboxes2[:, 2] - bboxes2[:, 0] + 1) * (
bboxes2[:, 3] - bboxes2[:, 1] + 1)
ious = overlap / (area1[:, None] + area2 - overlap)
else:
ious = overlap / (area1[:, None])
return ious
def bbox_overlaps2(bboxes1, bboxes2, mode='iou', is_aligned=False):
"""Calculate overlap between two set of bboxes.
If ``is_aligned`` is ``False``, then calculate the ious between each bbox
of bboxes1 and bboxes2, otherwise the ious between each aligned pair of
bboxes1 and bboxes2.
Args:
bboxes1 (Tensor): shape (m, 4)
bboxes2 (Tensor): shape (n, 4), if is_aligned is ``True``, then m and n
must be equal.
mode (str): "iou" (intersection over union) or iof (intersection over
foreground).
Returns:
ious(Tensor): shape (m, n) if is_aligned == False else shape (m, 1)
"""
assert mode in ['iou', 'iof']
rows = bboxes1.size(0)
cols = bboxes2.size(0)
if is_aligned:
assert rows == cols
if rows * cols == 0:
return bboxes1.new(rows, 1) if is_aligned else bboxes1.new(rows, cols)
if is_aligned:
lt = torch.max(bboxes1[:, :2], bboxes2[:, :2]) # [rows, 2]
rb = torch.min(bboxes1[:, 2:], bboxes2[:, 2:]) # [rows, 2]
wh = (rb - lt + 1).clamp(min=0) # [rows, 2]
overlap = wh[:, 0] * wh[:, 1]
area1 = (bboxes1[:, 2] - bboxes1[:, 0] + 1) * (
bboxes1[:, 3] - bboxes1[:, 1] + 1)
if mode == 'iou':
area2 = (bboxes2[:, 2] - bboxes2[:, 0] + 1) * (
bboxes2[:, 3] - bboxes2[:, 1] + 1)
ious = overlap / (area1 + area2 - overlap)
else:
ious = overlap / area1
else:
lt = torch.max(bboxes1[:, None, :2], bboxes2[:, :2]) # [rows, cols, 2]
rb = torch.min(bboxes1[:, None, 2:], bboxes2[:, 2:]) # [rows, cols, 2]
wh = (rb - lt + 1).clamp(min=0) # [rows, cols, 2]
overlap = wh[:, :, 0] * wh[:, :, 1]
area1 = (bboxes1[:, 2] - bboxes1[:, 0] + 1) * (
bboxes1[:, 3] - bboxes1[:, 1] + 1)
if mode == 'iou':
area2 = (bboxes2[:, 2] - bboxes2[:, 0] + 1) * (
bboxes2[:, 3] - bboxes2[:, 1] + 1)
ious = overlap / (area1[:, None] + area2 - overlap)
ious2 = overlap/area2
centers = torch.cat((((bboxes2[:,0]+bboxes2[:,2])/2).reshape(-1,1),((bboxes2[:,1]+bboxes2[:,3])/2).reshape(-1,1)),dim=1)
centers = centers.repeat(rows,1,1)
bboxes1_gt = bboxes1.repeat(cols,1,1)
bboxes1_gt = bboxes1_gt.permute(1,0,2)
centers_in_gt = (bboxes1_gt[:,:,0]<centers[:,:,0])
centers_in_gt = centers_in_gt*(bboxes1_gt[:,:,2]>centers[:,:,0])
centers_in_gt = centers_in_gt*(bboxes1_gt[:,:,1]<centers[:,:,1])
centers_in_gt = centers_in_gt*(bboxes1_gt[:,:,3]>centers[:,:,1])
#print(centers_in_gt.size())
'''
####
bboxes_center = [(bboxes2[:,0]+bboxes2[:,2])/2,(bboxes2[:,1]+bboxes2[:,3])/2]
print(bboxes_center)
centers_in_gt = torch.zeros(rows,cols)
print(bboxes1[:,None,2]>((bboxes2[:,0]+bboxes2[:,2])/2) and bboxes1[:,None,0]<((bboxes2[:,0]+bboxes2[:,2])/2))
print(bboxes1)
print(bboxes2)
print(cols)
centers_in_gt_np = centers_in_gt.numpy()
print(centers_in_gt_np)
#bboxes_center = [(bboxes2[:,0]+bboxes2[:,2])/2,(bboxes2[:,1]+bboxes2[:,3])/2]
bboxes1_np = bboxes1.cpu().numpy()
bboxes2_np = bboxes2.cpu().numpy()
for i in range(rows):
for j in range(cols):
if bboxes1_np[i,2]>((bboxes2_np[j,0]+bboxes2_np[j,2])/2) and bboxes1_np[i,0]<((bboxes2_np[j,0]+bboxes2_np[j,2])/2):
if bboxes1_np[i,3]>((bboxes2_np[j,1]+bboxes2_np[j,3])/2) and bboxes1_np[i,1]<((bboxes2_np[j,1]+bboxes2_np[j,3])/2):
centers_in_gt_np[i,j] = 1
print(centers_in_gt_np)
#####
'''
return ious,ious2,centers_in_gt
else:
ious = overlap / (area1[:, None])
return ious
| 37.981595
| 139
| 0.504442
| 804
| 6,191
| 3.792289
| 0.105721
| 0.044605
| 0.054116
| 0.03411
| 0.851755
| 0.784847
| 0.784847
| 0.762545
| 0.752378
| 0.722204
| 0
| 0.072877
| 0.315135
| 6,191
| 163
| 140
| 37.981595
| 0.646226
| 0.201098
| 0
| 0.839506
| 0
| 0
| 0.008172
| 0
| 0
| 0
| 0
| 0
| 0.049383
| 1
| 0.024691
| false
| 0
| 0.012346
| 0
| 0.098765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d94b51c294da1e5fc1b4d5800daeda5a4012c3a
| 7,753
|
py
|
Python
|
tests/conftest.py
|
akebrissman/gateway
|
84f8093af854418f64175dd499c021b6d71f85d2
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
akebrissman/gateway
|
84f8093af854418f64175dd499c021b6d71f85d2
|
[
"MIT"
] | 2
|
2019-10-25T18:58:18.000Z
|
2019-11-16T13:10:38.000Z
|
tests/conftest.py
|
akebrissman/gateway
|
84f8093af854418f64175dd499c021b6d71f85d2
|
[
"MIT"
] | 2
|
2019-10-17T09:06:09.000Z
|
2019-10-18T10:09:13.000Z
|
import pytest
import os
from datetime import datetime, timedelta
from jose import jwt
from gateway import create_app
from gateway import db
from gateway.models.firebase import FirebaseModel
def read_file(file_name: str) -> str:
try:
f = open(file_name, "r")
data = f.read()
f.close()
except Exception as e:
data = ""
return data
def get_access_token():
claims = {'iss': 'https://abrissman.auth.com/',
'sub': '123456789',
'aud': 'my-gateway-api',
'iat': datetime.utcnow(),
'exp': datetime.utcnow() + timedelta(seconds=10),
'scope': 'read:group write:group'}
headers = {"kid": "123456789"}
# TODO: Must be a better way to find the path to the file
if os.getcwd().find('tests') >= 0:
# Started from the IDE
key = read_file("jwtRS256.key")
else:
# Started from the Terminal
key = read_file("tests/jwtRS256.key")
token = jwt.encode(claims=claims, key=key, algorithm='RS256', headers=headers)
return f"Bearer {token}"
def get_expired_access_token():
claims = {'iss': 'https://abrissman.auth.com/',
'sub': '123456789',
'aud': 'my-gateway-api',
'iat': datetime.utcnow() - timedelta(seconds=15),
'exp': datetime.utcnow() - timedelta(seconds=5),
'scope': 'read:group write:group'}
headers = {"kid": "123456789"}
# TODO: Must be a better way to find the path to the file
if os.getcwd().find('tests') >= 0:
# Started from the IDE
key = read_file("jwtRS256.key")
else:
# Started from the Terminal
key = read_file("tests/jwtRS256.key")
token = jwt.encode(claims=claims, key=key, algorithm='RS256', headers=headers)
return f"Bearer {token}"
def get_missing_kid_in_token():
claims = {'iss': 'https://abrissman.auth.com/',
'sub': '123456789',
'aud': 'my-gateway-api',
'iat': datetime.utcnow(),
'exp': datetime.utcnow() + timedelta(seconds=10),
'scope': 'read:group write:group'}
headers = {} # {"kid": "123456789"}
# TODO: Must be a better way to find the path to the file
if os.getcwd().find('tests') >= 0:
# Started from the IDE
key = read_file("jwtRS256.key")
else:
# Started from the Terminal
key = read_file("tests/jwtRS256.key")
token = jwt.encode(claims=claims, key=key, algorithm='RS256', headers=headers)
return f"Bearer {token}"
def get_missing_scope_in_token():
claims = {'iss': 'https://abrissman.auth.com/',
'sub': '123456789',
'aud': 'my-gateway-api',
'iat': datetime.utcnow(),
'exp': datetime.utcnow() + timedelta(seconds=30),
'scope': 'read:device write:device'}
headers = {"kid": "123456789"}
# TODO: Must be a better way to find the path to the file
if os.getcwd().find('tests') >= 0:
# Started from the IDE
key = read_file("jwtRS256.key")
else:
# Started from the Terminal
key = read_file("tests/jwtRS256.key")
token = jwt.encode(claims=claims, key=key, algorithm='RS256', headers=headers)
return f"Bearer {token}"
def get_invalid_aud_in_token():
claims = {'iss': 'https://abrissman.auth.com/',
'sub': '123456789',
'aud': 'INVALID',
'iat': datetime.utcnow(),
'exp': datetime.utcnow() + timedelta(seconds=10),
'scope': 'read:group write:group'}
headers = {"kid": "123456789"}
# TODO: Must be a better way to find the path to the file
if os.getcwd().find('tests') >= 0:
# Started from the IDE
key = read_file("jwtRS256.key")
else:
# Started from the Terminal
key = read_file("tests/jwtRS256.key")
token = jwt.encode(claims=claims, key=key, algorithm='RS256', headers=headers)
return f"Bearer {token}"
def get_invalid_signature_in_token():
claims = {'iss': 'https://abrissman.auth.com/',
'sub': '123456789',
'aud': 'my-gateway-api',
'iat': datetime.utcnow(),
'exp': datetime.utcnow() + timedelta(seconds=10),
'scope': 'read:group write:group'}
headers = {"kid": "123456789"}
# TODO: Must be a better way to find the path to the file
if os.getcwd().find('tests') >= 0:
# Started from the IDE
key = read_file("jwtRS256.key")
else:
# Started from the Terminal
key = read_file("tests/jwtRS256.key")
token = jwt.encode(claims=claims, key=key, algorithm='RS256', headers=headers)
token = token + 'a'
return f"Bearer {token}"
@pytest.fixture(scope='module')
def test_client():
app = create_app('flask_test.cfg')
app.bearer = get_access_token()
with app.app_context():
db.create_all()
yield app.test_client() # this is where the testing happens!
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def test_client_expired_token():
app = create_app('flask_test.cfg')
app.bearer = get_expired_access_token()
with app.app_context():
db.create_all()
yield app.test_client() # this is where the testing happens!
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def test_client_missing_kid_in_token():
app = create_app('flask_test.cfg')
app.bearer = get_missing_kid_in_token()
with app.app_context():
db.create_all()
yield app.test_client() # this is where the testing happens!
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def test_client_missing_scope_in_token():
app = create_app('flask_test.cfg')
app.bearer = get_missing_scope_in_token()
with app.app_context():
db.create_all()
yield app.test_client() # this is where the testing happens!
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def test_client_invalid_aud_in_token():
app = create_app('flask_test.cfg')
app.bearer = get_invalid_aud_in_token()
with app.app_context():
db.create_all()
yield app.test_client() # this is where the testing happens!
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def test_client_invalid_signature_in_token():
app = create_app('flask_test.cfg')
app.bearer = get_invalid_signature_in_token()
with app.app_context():
db.create_all()
yield app.test_client() # this is where the testing happens!
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def test_client_no_db():
app = create_app('flask_test.cfg')
app.bearer = get_access_token()
with app.app_context():
#db.create_all()
yield app.test_client() # this is where the testing happens!
#db.session.remove()
#db.drop_all()
@pytest.fixture()
def app():
app = create_app('flask_test.cfg')
with app.app_context():
db.create_all()
yield app
db.session.remove()
db.drop_all()
@pytest.fixture(scope='module')
def init_database():
# Create the database and the database table
db.create_all()
# Insert user data
# user1 = User(email='patkennedy79@gmail.com', plaintext_password='FlaskIsAwesome')
# user2 = User(email='kennedyfamilyrecipes@gmail.com', plaintext_password='PaSsWoRd')
# db.session.add(user1)
# db.session.add(user2)
# Commit the changes for the users
# db.session.commit()
yield db # this is where the testing happens!
db.drop_all()
| 29.934363
| 89
| 0.608668
| 995
| 7,753
| 4.603015
| 0.127638
| 0.030568
| 0.036681
| 0.041921
| 0.850437
| 0.834498
| 0.829258
| 0.814629
| 0.814629
| 0.806769
| 0
| 0.032319
| 0.253708
| 7,753
| 258
| 90
| 30.050388
| 0.759246
| 0.166129
| 0
| 0.739884
| 0
| 0
| 0.17009
| 0
| 0
| 0
| 0
| 0.003876
| 0
| 1
| 0.092486
| false
| 0
| 0.040462
| 0
| 0.17341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12a6d345ea7c2b866dc1c0b193cd833d6cdbbcb2
| 4,883
|
py
|
Python
|
networkapi/api_equipment/v4/tests/sanity/test_equipment_delete.py
|
vinicius-marinho/GloboNetworkAPI
|
94651d3b4dd180769bc40ec966814f3427ccfb5b
|
[
"Apache-2.0"
] | 73
|
2015-04-13T17:56:11.000Z
|
2022-03-24T06:13:07.000Z
|
networkapi/api_equipment/v4/tests/sanity/test_equipment_delete.py
|
leopoldomauricio/GloboNetworkAPI
|
3b5b2e336d9eb53b2c113977bfe466b23a50aa29
|
[
"Apache-2.0"
] | 99
|
2015-04-03T01:04:46.000Z
|
2021-10-03T23:24:48.000Z
|
networkapi/api_equipment/v4/tests/sanity/test_equipment_delete.py
|
shildenbrand/GloboNetworkAPI
|
515d5e961456cee657c08c275faa1b69b7452719
|
[
"Apache-2.0"
] | 64
|
2015-08-05T21:26:29.000Z
|
2022-03-22T01:06:28.000Z
|
# -*- coding: utf-8 -*-
import logging
from django.test.client import Client
from networkapi.test.test_case import NetworkApiTestCase
log = logging.getLogger(__name__)
class EquipmentDeleteTestCase(NetworkApiTestCase):
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
'networkapi/api_equipment/v4/fixtures/initial_pre_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_asn.json',
'networkapi/api_equipment/v4/fixtures/initial_asn_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_vrf.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv4.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv4_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv6.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv6_equipment.json',
]
def setUp(self):
self.client = Client()
self.authorization = self.get_http_authorization('test')
def tearDown(self):
pass
def test_delete_one_equipment_success(self):
"""V4 Test of success to delete of one equipment."""
response = self.client.delete(
'/api/v4/equipment/1/',
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
response = self.client.get(
'/api/v4/equipment/1/',
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(404, response.status_code)
self.compare_values(
'Dont there is a equipament by pk = 1.',
response.data['detail'])
def test_delete_one_equipment_with_associated_as(self):
"""V4 Test of success to delete equipment with associates AS."""
response = self.client.delete(
'/api/v4/equipment/4/',
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(200, response.status_code)
response = self.client.get(
'/api/v4/equipment/4/',
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(404, response.status_code)
self.compare_values(
'Dont there is a equipament by pk = 4.',
response.data['detail'])
# Check if AS was also deleted
response = self.client.get(
'/api/v4/as/4/',
HTTP_AUTHORIZATION=self.authorization
)
self.compare_status(404, response.status_code)
self.compare_values(
u'ASN 4 do not exist.',
response.data['detail']
)
class EquipmentDeleteErrorTestCase(NetworkApiTestCase):
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
'networkapi/api_equipment/v4/fixtures/initial_pre_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_asn.json',
'networkapi/api_equipment/v4/fixtures/initial_asn_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_vrf.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv4.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv4_equipment.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv6.json',
'networkapi/api_equipment/v4/fixtures/initial_ipv6_equipment.json',
]
def setUp(self):
self.client = Client()
self.authorization = self.get_http_authorization('test')
def tearDown(self):
pass
def test_delete_one_inexistent_equipment(self):
"""V4 Test of error to delete of one inexistent equipment."""
response = self.client.delete(
'/api/v4/equipment/10/',
content_type='application/json',
HTTP_AUTHORIZATION=self.authorization)
self.compare_status(404, response.status_code)
self.compare_values(
'Dont there is a equipament by pk = 10.',
response.data['detail'])
| 35.904412
| 76
| 0.676224
| 536
| 4,883
| 5.958955
| 0.164179
| 0.140889
| 0.095805
| 0.146525
| 0.868503
| 0.857858
| 0.849718
| 0.825297
| 0.80025
| 0.80025
| 0
| 0.017277
| 0.217694
| 4,883
| 135
| 77
| 36.17037
| 0.818848
| 0.043621
| 0
| 0.78125
| 0
| 0
| 0.436493
| 0.364281
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072917
| false
| 0.020833
| 0.03125
| 0
| 0.145833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12e9bdc0be320959bdcf6f251049975fade2cea0
| 4,155
|
py
|
Python
|
netbox/extras/migrations/0036_contenttype_filters_to_q_objects.py
|
BrnoPCmaniak/netbox
|
7b517abdb68a6324950dfd0375861163c7bfff00
|
[
"Apache-2.0"
] | 2
|
2021-06-02T03:00:05.000Z
|
2021-07-30T18:52:32.000Z
|
netbox/extras/migrations/0036_contenttype_filters_to_q_objects.py
|
emersonfelipesp/netbox
|
fecca5ad83fb6b48a2f15982dfd3242653f105f9
|
[
"Apache-2.0"
] | 4
|
2021-06-08T22:29:06.000Z
|
2022-03-12T00:48:51.000Z
|
netbox/extras/migrations/0036_contenttype_filters_to_q_objects.py
|
emersonfelipesp/netbox
|
fecca5ad83fb6b48a2f15982dfd3242653f105f9
|
[
"Apache-2.0"
] | 1
|
2018-12-05T12:03:21.000Z
|
2018-12-05T12:03:21.000Z
|
# Generated by Django 2.2.8 on 2020-01-15 21:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('extras', '0035_deterministic_ordering'),
]
operations = [
migrations.AlterField(
model_name='customfield',
name='obj_type',
field=models.ManyToManyField(limit_choices_to=models.Q(models.Q(models.Q(('app_label', 'circuits'), ('model__in', ['circuit', 'provider'])), models.Q(('app_label', 'dcim'), ('model__in', ['device', 'devicetype', 'powerfeed', 'rack', 'site'])), models.Q(('app_label', 'ipam'), ('model__in', ['aggregate', 'ipaddress', 'prefix', 'service', 'vlan', 'vrf'])), models.Q(('app_label', 'secrets'), ('model__in', ['secret'])), models.Q(('app_label', 'tenancy'), ('model__in', ['tenant'])), models.Q(('app_label', 'virtualization'), ('model__in', ['cluster', 'virtualmachine'])), _connector='OR')), related_name='custom_fields', to='contenttypes.ContentType'),
),
migrations.AlterField(
model_name='customlink',
name='content_type',
field=models.ForeignKey(limit_choices_to=models.Q(models.Q(models.Q(('app_label', 'circuits'), ('model__in', ['circuit', 'provider'])), models.Q(('app_label', 'dcim'), ('model__in', ['cable', 'device', 'devicetype', 'powerpanel', 'powerfeed', 'rack', 'site'])), models.Q(('app_label', 'ipam'), ('model__in', ['aggregate', 'ipaddress', 'prefix', 'service', 'vlan', 'vrf'])), models.Q(('app_label', 'secrets'), ('model__in', ['secret'])), models.Q(('app_label', 'tenancy'), ('model__in', ['tenant'])), models.Q(('app_label', 'virtualization'), ('model__in', ['cluster', 'virtualmachine'])), _connector='OR')), on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'),
),
migrations.AlterField(
model_name='exporttemplate',
name='content_type',
field=models.ForeignKey(limit_choices_to=models.Q(models.Q(models.Q(('app_label', 'circuits'), ('model__in', ['circuit', 'provider'])), models.Q(('app_label', 'dcim'), ('model__in', ['cable', 'consoleport', 'device', 'devicetype', 'interface', 'inventoryitem', 'manufacturer', 'powerpanel', 'powerport', 'powerfeed', 'rack', 'rackgroup', 'region', 'site', 'virtualchassis'])), models.Q(('app_label', 'ipam'), ('model__in', ['aggregate', 'ipaddress', 'prefix', 'service', 'vlan', 'vrf'])), models.Q(('app_label', 'secrets'), ('model__in', ['secret'])), models.Q(('app_label', 'tenancy'), ('model__in', ['tenant'])), models.Q(('app_label', 'virtualization'), ('model__in', ['cluster', 'virtualmachine'])), _connector='OR')), on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'),
),
migrations.AlterField(
model_name='graph',
name='type',
field=models.ForeignKey(limit_choices_to=models.Q(models.Q(models.Q(('app_label', 'circuits'), ('model__in', ['provider'])), models.Q(('app_label', 'dcim'), ('model__in', ['device', 'interface', 'site'])), _connector='OR')), on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'),
),
migrations.AlterField(
model_name='webhook',
name='obj_type',
field=models.ManyToManyField(limit_choices_to=models.Q(models.Q(models.Q(('app_label', 'circuits'), ('model__in', ['circuit', 'provider'])), models.Q(('app_label', 'dcim'), ('model__in', ['cable', 'consoleport', 'consoleserverport', 'device', 'devicebay', 'devicetype', 'frontport', 'interface', 'inventoryitem', 'manufacturer', 'poweroutlet', 'powerpanel', 'powerport', 'powerfeed', 'rack', 'rearport', 'region', 'site', 'virtualchassis'])), models.Q(('app_label', 'ipam'), ('model__in', ['aggregate', 'ipaddress', 'prefix', 'service', 'vlan', 'vrf'])), models.Q(('app_label', 'secrets'), ('model__in', ['secret'])), models.Q(('app_label', 'tenancy'), ('model__in', ['tenant'])), models.Q(('app_label', 'virtualization'), ('model__in', ['cluster', 'virtualmachine'])), _connector='OR')), related_name='webhooks', to='contenttypes.ContentType'),
),
]
| 103.875
| 857
| 0.638026
| 455
| 4,155
| 5.584615
| 0.206593
| 0.099174
| 0.102322
| 0.153483
| 0.770563
| 0.770563
| 0.770563
| 0.749311
| 0.749311
| 0.731208
| 0
| 0.005237
| 0.126835
| 4,155
| 39
| 858
| 106.538462
| 0.695149
| 0.01083
| 0
| 0.424242
| 1
| 0
| 0.411392
| 0.035784
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12ecd07de05a69ba95f69272f3f91241bc10e50a
| 4,626
|
py
|
Python
|
lightseq/training/ops/pytorch/layer_base.py
|
hutao965/lightseq
|
0e3050c4f872ce3581b663a055be032c72102196
|
[
"Apache-2.0"
] | 1
|
2022-03-27T17:16:16.000Z
|
2022-03-27T17:16:16.000Z
|
lightseq/training/ops/pytorch/layer_base.py
|
iRmantou/lightseq
|
9a617306fa711a3d6a25ef3eab9bfbe408692189
|
[
"Apache-2.0"
] | null | null | null |
lightseq/training/ops/pytorch/layer_base.py
|
iRmantou/lightseq
|
9a617306fa711a3d6a25ef3eab9bfbe408692189
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from torch import nn
from lightseq.training.ops.pytorch.util import MODEL_ARCH, check_config
class TransformerEncoderLayerBase(nn.Module):
"""Initialize the Lightseq Transformer Encoder Layer.
Static variable:
layer_id: The layer-index counter starting from 0 and incrementing by 1 every time a layer object is instantiated,
e.g. if a model has 24 transformer layers, layer_id goes from 0 to 23.
Arguments:
config: An object of LSTransformerEncoderLayer config, see get_config
initial_weights: Optional: Only used for unit test
initial_biases: Optional: Only used for unit test
"""
@staticmethod
def get_config(**kwargs):
@dataclass
class Config:
max_batch_tokens: int # max batch token numbers
max_seq_len: int # max sequence length
hidden_size: int # size of transformer hidden layers
intermediate_size: int # size of ffn inner size
nhead: int # number of heads in attention
attn_prob_dropout_ratio: float # attention score dropout ratio
activation_dropout_ratio: float # ffn activation dropout ratio
hidden_dropout_ratio: float # dropout ration before residual
pre_layer_norm: bool # pre layer norm or post
fp16: bool # fp16 presion
local_rank: int # rank in local node
activation_fn: str = "relu" # relu or gelu
if "model" in kwargs:
if kwargs["model"] not in MODEL_ARCH:
raise ValueError("{} architecture is not supported.")
MODEL_ARCH[kwargs["model"]](kwargs)
del kwargs["model"]
config = Config(**kwargs)
check_config(config)
return config
class TransformerDecoderLayerBase(nn.Module):
"""Initialize the Lightseq Transformer Encoder Layer.
Static variable:
layer_id: The layer-index counter starting from 0 and incrementing by 1 every time a layer object is instantiated,
e.g. if a model has 24 transformer layers, layer_id goes from 0 to 23.
Arguments:
config: An object of LSTransformerEncoderLayer config, see get_config
initial_weights: Optional: Only used for unit test
initial_biases: Optional: Only used for unit test
"""
@staticmethod
def get_config(**kwargs):
@dataclass
class Config:
max_batch_tokens: int # max batch token numbers
max_seq_len: int # max sequence length
hidden_size: int # size of transformer hidden layers
intermediate_size: int # size of ffn inner size
nhead: int # number of heads in attention
attn_prob_dropout_ratio: float # attention score dropout ratio
activation_dropout_ratio: float # ffn activation dropout ratio
hidden_dropout_ratio: float # dropout ration before residual
pre_layer_norm: bool # pre layer norm or post
fp16: bool # fp16 presion
local_rank: int # rank in local node
nlayer: int # number of layers
activation_fn: str = "relu" # relu or gelu
if "model" in kwargs:
if kwargs["model"] not in MODEL_ARCH:
raise ValueError("{} architecture is not supported.")
MODEL_ARCH[kwargs["model"]](kwargs)
del kwargs["model"]
config = Config(**kwargs)
check_config(config)
return config
class TransformerEmbeddingLayerBase(nn.Module):
"""Initialize the Lightseq Transformer Encoder Layer.
Static variable:
layer_id: The layer-index counter starting from 0 and incrementing by 1 every time a layer object is instantiated,
e.g. if a model has 24 transformer layers, layer_id goes from 0 to 23.
Arguments:
config: An object of LSTransformerEncoderLayer config, see get_config
initial_weights: Optional: Only used for unit test
initial_biases: Optional: Only used for unit test
"""
@staticmethod
def get_config(**kwargs):
@dataclass
class Config:
vocab_size: int # vocabulary size
embedding_dim: int # embedding size
max_batch_tokens: int # max batch token numbers
max_seq_len: int # max sequence length
padding_idx: int # padding token id in vocabulary
dropout: float # embedding dropout ration
fp16: bool # fp16 presion
local_rank: int # rank in local node
return Config(**kwargs)
| 38.87395
| 122
| 0.648725
| 568
| 4,626
| 5.163732
| 0.223592
| 0.040914
| 0.032731
| 0.038868
| 0.876236
| 0.876236
| 0.876236
| 0.876236
| 0.876236
| 0.876236
| 0
| 0.010148
| 0.297017
| 4,626
| 118
| 123
| 39.20339
| 0.891759
| 0.461089
| 0
| 0.823529
| 0
| 0
| 0.048593
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044118
| false
| 0
| 0.044118
| 0
| 0.220588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42295055d024cef7149d4ff8c7380724e12b1cd1
| 185
|
py
|
Python
|
Python/String_Split_and_Join/main.py
|
hugolribeiro/hackerrank_exercises
|
d2757b24479c26ec39e01091e3a15e8980e97864
|
[
"MIT"
] | null | null | null |
Python/String_Split_and_Join/main.py
|
hugolribeiro/hackerrank_exercises
|
d2757b24479c26ec39e01091e3a15e8980e97864
|
[
"MIT"
] | null | null | null |
Python/String_Split_and_Join/main.py
|
hugolribeiro/hackerrank_exercises
|
d2757b24479c26ec39e01091e3a15e8980e97864
|
[
"MIT"
] | null | null | null |
def split_and_join(line):
# write your code here
line_splitted = line.split(' ')
return ('-').join(line_splitted)
# Or in one line:
# return ('-').join(line.split(' ')
| 23.125
| 36
| 0.610811
| 25
| 185
| 4.36
| 0.56
| 0.220183
| 0.256881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210811
| 185
| 7
| 37
| 26.428571
| 0.746575
| 0.378378
| 0
| 0
| 0
| 0
| 0.018018
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
424d6488a491c011bfe9dcae8522f6f5123e6c67
| 169
|
py
|
Python
|
tests/parser/true_negation.4b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/true_negation.4b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/true_negation.4b.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
up(L,0) | -up(L,0) :- latch(L).
latch(a).
latch(b).
latch(c).
"""
output = """
up(L,0) | -up(L,0) :- latch(L).
latch(a).
latch(b).
latch(c).
"""
| 11.266667
| 32
| 0.443787
| 30
| 169
| 2.5
| 0.3
| 0.16
| 0.213333
| 0.16
| 0.853333
| 0.853333
| 0.853333
| 0.853333
| 0.853333
| 0.853333
| 0
| 0.030075
| 0.213018
| 169
| 14
| 33
| 12.071429
| 0.533835
| 0
| 0
| 0.833333
| 0
| 0
| 0.805031
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c41c430f5eeea4516f2cd6f8fab39f65275ba2fc
| 141
|
py
|
Python
|
src/opendr/perception/object_detection_2d/utils/__init__.py
|
passalis/demos
|
d8aeb045ee1832418fa232bc1c73783d72d10cf7
|
[
"Apache-2.0"
] | 1
|
2021-08-18T22:07:40.000Z
|
2021-08-18T22:07:40.000Z
|
src/opendr/perception/object_detection_2d/utils/__init__.py
|
passalis/demos
|
d8aeb045ee1832418fa232bc1c73783d72d10cf7
|
[
"Apache-2.0"
] | null | null | null |
src/opendr/perception/object_detection_2d/utils/__init__.py
|
passalis/demos
|
d8aeb045ee1832418fa232bc1c73783d72d10cf7
|
[
"Apache-2.0"
] | null | null | null |
from .eval_utils import DetectionDatasetCOCOEval
from .eval_utils import *
from .vis_utils import *
__all__ = ['DetectionDatasetCOCOEval', ]
| 28.2
| 48
| 0.808511
| 15
| 141
| 7.133333
| 0.466667
| 0.308411
| 0.242991
| 0.35514
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113475
| 141
| 4
| 49
| 35.25
| 0.856
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 0.170213
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c4868b8da1cb2e85dae94b57dea847fa3a54f5d1
| 339
|
py
|
Python
|
django_sso_app/models.py
|
paiuolo/django-sso-app
|
75b96c669dc0b176dc77e08f018a3e97d259f636
|
[
"MIT"
] | 1
|
2021-11-16T15:16:08.000Z
|
2021-11-16T15:16:08.000Z
|
django_sso_app/models.py
|
paiuolo/django-sso-app
|
75b96c669dc0b176dc77e08f018a3e97d259f636
|
[
"MIT"
] | null | null | null |
django_sso_app/models.py
|
paiuolo/django-sso-app
|
75b96c669dc0b176dc77e08f018a3e97d259f636
|
[
"MIT"
] | null | null | null |
# Importing core models in order to enable "dumpdata"
from .core.apps.devices.models import *
from .core.apps.groups.models import *
from .core.apps.passepartout.models import *
from .core.apps.profiles.models import *
from .core.apps.services.models import *
from .core.apps.status.models import *
from .core.apps.events.models import *
| 33.9
| 53
| 0.775811
| 50
| 339
| 5.26
| 0.36
| 0.212928
| 0.319392
| 0.456274
| 0.547529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112094
| 339
| 9
| 54
| 37.666667
| 0.873754
| 0.150442
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.142857
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
67617a55f2f0a2690c147e49939f871bcc5623a5
| 210
|
py
|
Python
|
filesystem_crawler/__init__.py
|
heliosantos/filesystem_crawler
|
218504f01b8ba68fee7f6397c0bcdf7e6d8ecd8e
|
[
"MIT"
] | null | null | null |
filesystem_crawler/__init__.py
|
heliosantos/filesystem_crawler
|
218504f01b8ba68fee7f6397c0bcdf7e6d8ecd8e
|
[
"MIT"
] | null | null | null |
filesystem_crawler/__init__.py
|
heliosantos/filesystem_crawler
|
218504f01b8ba68fee7f6397c0bcdf7e6d8ecd8e
|
[
"MIT"
] | null | null | null |
from .filesystem_crawler import FilesystemCrawler
from .match_rule import MatchRule
from .filesystem_crawler_parsers import parse_match_rules_from_file
from .filesystem_crawler_parsers import parse_match_rules
| 42
| 67
| 0.904762
| 28
| 210
| 6.357143
| 0.428571
| 0.235955
| 0.353933
| 0.314607
| 0.550562
| 0.550562
| 0.550562
| 0.550562
| 0
| 0
| 0
| 0
| 0.07619
| 210
| 4
| 68
| 52.5
| 0.917526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
678cba02f3d7195268029865efcdb8c58464e551
| 208
|
py
|
Python
|
tests/context/__init__.py
|
s3131212/python-safe-eval
|
aaeb2c181ad31f3ef38bb06db36ac9f337609ce0
|
[
"MIT"
] | null | null | null |
tests/context/__init__.py
|
s3131212/python-safe-eval
|
aaeb2c181ad31f3ef38bb06db36ac9f337609ce0
|
[
"MIT"
] | null | null | null |
tests/context/__init__.py
|
s3131212/python-safe-eval
|
aaeb2c181ad31f3ef38bb06db36ac9f337609ce0
|
[
"MIT"
] | null | null | null |
import sys
import os
print(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
import PythonSafeEval
| 34.666667
| 88
| 0.692308
| 31
| 208
| 4.387097
| 0.387097
| 0.264706
| 0.191176
| 0.220588
| 0.588235
| 0.588235
| 0.588235
| 0.588235
| 0.588235
| 0.588235
| 0
| 0.005128
| 0.0625
| 208
| 6
| 89
| 34.666667
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0.038278
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0.2
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
67924e32ae82fd933bb5781e9649adfe17f0c816
| 26,518
|
py
|
Python
|
my_functions1.py
|
skoltech-nlp/coqas
|
bf0d8d3d1fffa0039114d7d64bacc020f5085c66
|
[
"Apache-2.0"
] | 1
|
2021-12-02T09:19:39.000Z
|
2021-12-02T09:19:39.000Z
|
my_functions1.py
|
skoltech-nlp/coqas
|
bf0d8d3d1fffa0039114d7d64bacc020f5085c66
|
[
"Apache-2.0"
] | 2
|
2021-04-14T13:46:09.000Z
|
2021-04-14T13:49:29.000Z
|
my_functions1.py
|
skoltech-nlp/coqas
|
bf0d8d3d1fffa0039114d7d64bacc020f5085c66
|
[
"Apache-2.0"
] | null | null | null |
def do_sum(number1, number2):
return number1 + number2
import os.path
import torch
import sys
import spacy
import en_core_web_sm
# for extractor class
sys.path.append('/home/vika/targer')
sys.path.append('/notebook/cqas')
from src.factories.factory_tagger import TaggerFactory
from src.layers import layer_context_word_embeddings_bert
# for responser class
import json
import requests
# for generate answer
from generation.generation import diviner
import os
current_directory_path = os.path.dirname(os.path.realpath(__file__))
# pathes to pretrained extraction model
PATH_TO_PRETRAINED = '/external_pretrained_models/'
MODEL_NAMES = ['bertttt.hdf5']
def load(checkpoint_fn, gpu=-1):
if not os.path.isfile(PATH_TO_PRETRAINED + checkpoint_fn):
raise ValueError('Can''t find tagger in file "%s". Please, run the main script with non-empty \
"--save-best-path" param to create it.' % checkpoint_fn)
tagger = torch.load(PATH_TO_PRETRAINED + checkpoint_fn)
tagger.gpu = gpu
tagger.word_seq_indexer.gpu = gpu # hotfix
tagger.tag_seq_indexer.gpu = gpu # hotfix
if hasattr(tagger, 'char_embeddings_layer'):# very hot hotfix
tagger.char_embeddings_layer.char_seq_indexer.gpu = gpu # hotfix
tagger.self_ensure_gpu()
return tagger
import nltk
def create_sequence_from_sentence(str_sentences):
return [nltk.word_tokenize(str_sentence) for str_sentence in str_sentences]
class extractor:
def __init__(self, my_device = 6, model_name = 'bertttt.hdf5', model_path = current_directory_path + '/external_pretrained_models/'):
self.answ = "UNKNOWN ERROR"
self.model_name = model_name
self.model_path = model_path
self.first_object = ''
self.second_object = ''
self.predicates = ''
self.aspects = []
self.spans = [] # we can't use set because span object is dict and dict is unchashable. We add function add_span to keep non-repeatability
try:
print (my_device)
self.model = TaggerFactory.load(self.model_path + self.model_name, my_device)
self.model.cuda(device=my_device)
self.model.gpu = my_device
except:
raise RuntimeError("Init extractor: can't map to gpu. Maybe it is OOM")
def add_span(self, span_obj):
if span_obj not in self.spans:
self.spans.append(span_obj)
def get_words():
return self.words
def get_tags():
return self.tags
def from_string(self, input_sentence):
self.input_str = input_sentence
self.first_object = ''
self.second_object = ''
self.predicates = ''
self.aspects = []
self.spans = []
def get_objects_predicates(self, list_words, list_tags):
obj_list = []
pred_list = []
for ind, elem in enumerate(list_tags):
if elem == 'B-OBJ':
obj_list.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({'end': start + len(list_words[ind]), 'start': start, 'type': "OBJ" })
if elem == 'I-OBJ':
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "OBJ" })
if elem == 'B-PREDFULL':
pred_list.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "PRED" })
if elem == 'I-PREDFULL':
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "PRED" })
return obj_list, pred_list
def get_aspects(self, list_words, list_tags):
for ind, elem in enumerate(list_tags):
if elem == 'B-ASP':
self.aspects.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({'end': start + len(list_words[ind]), 'start': start, 'type': "ASP" })
if elem == 'I-ASP':
self.aspects.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "ASP" })
return self.aspects
def extract_objects_predicates(self, input_sentence):
words = create_sequence_from_sentence([input_sentence])
tags = self.model.predict_tags_from_words(words)
print ("extract_objects_predicates tags", tags[0])
print ("extract_objects_predicates words", words[0])
objects, predicates = self.get_objects_predicates(words[0], tags[0])
aspects = self.get_aspects(words[0], tags[0])
print (objects)
print (predicates)
print (aspects)
self.predicates = predicates
print ("len(objects)", len(objects))
if len(objects) >= 2:
self.first_object = objects[0]
self.second_object = objects[1]
else: # try to use spacy
if len(objects) == 1:
self.first_object = objects[0]
self.second_object = ''
print("We try to use spacy")
nlp = spacy.load("en_core_web_sm")
doc = nlp(input_sentence)
tokens = [token.text for token in doc]
split_sent = words[0]
print("We try to use spacy")
if (len(self.predicates) == 0):
for ind, token in enumerate(doc):
if (doc[ind].tag_ == 'JJR' or doc[ind].tag_ == 'RBR'):
self.predicates = doc[ind].text
self.add_span({'end': self.input_str.lower().find(doc[ind].text) + len(doc[ind].text), 'start': self.input_str.lower().find(doc[ind].text), 'type': "PRED" })
break
print ("split_sent", split_sent)
print ('or' in split_sent)
if 'or' in split_sent:
comp_elem = 'or'
elif 'and' in split_sent:
comp_elem = 'and'
elif 'vs' in split_sent:
comp_elem = 'vs'
elif 'vs.' in split_sent:
comp_elem = 'vs.'
else:
self.answ = "We can't recognize two objects for compare 0"
return
print ("comp_elem", comp_elem)
print ("tokens", tokens)
if (comp_elem in tokens):
print ("comp elem in tokens")
or_index = tokens.index(comp_elem)
if (len (doc.ents) >= 2):
print ("or doc ents", or_index)
for ent in doc.ents:
print ("doc ent text", ent.text, ent.start, ent.end, or_index)
if (ent.end == or_index):
self.first_object = ent.text
self.add_span({'end': ent.end,'start': ent.start, 'type': "OBJ" })
if (ent.start == or_index + 1):
self.second_object = ent.text
self.add_span({'end': ent.end, 'start': ent.start, 'type': "OBJ" })
else:
print ("or simple split_sent", or_index)
try:
obj1 = tokens[or_index - 1] # tokens are uppercase. self.input_str is uppercase
obj2 = tokens[or_index + 1]
print (obj1, obj2)
self.first_object = obj1
self.second_object = obj2
self.add_span({'end': self.input_str.find(obj1) + len(obj1), 'start': self.input_str.find(obj1), 'type': "OBJ" })
self.add_span({'end': self.input_str.find(obj2) + len(obj2), 'start': self.input_str.find(obj2), 'type': "OBJ" })
except:
self.answ = "We can't recognize two objects for compare 1"
else:
self.answ = "We can't recognize two objects for compare 2"
def get_params(self):
print ("in extractor get params 0")
#try:
self.extract_objects_predicates(self.input_str)
#except:
#raise RuntimeError("Can't map to gpu. Maybe it is OOM")
return self.first_object.strip(".,!/?"), self.second_object.strip(".,!/?"), self.predicates
def get_aspect(self):
return self.aspects
class extractorAurora(extractor):
def __init__(self, my_device = 6, model_name = 'Aurora.hdf5', model_path = current_directory_path + '/external_pretrained_models/'):
self.answ = "UNKNOWN ERROR"
self.model_name = model_name
self.model_path = model_path
self.first_object = ''
self.second_object = ''
self.predicates = ''
self.spans = [] # we can't use set because span object is dict and dict is unchashable. We add function add_span to keep non-repeatability
try:
self.model = TaggerFactory.load(self.model_path + self.model_name, my_device)
self.model.cuda(device=my_device)
self.model.gpu = my_device
print ("extract_objects_predicates gpu", self.model.gpu)
except:
raise RuntimeError("Init extractor: can't map to gpu. Maybe it is OOM")
def get_objects_predicates(self, list_words, list_tags):
obj_list = []
pred_list = []
asp_list = []
for ind, elem in enumerate(list_tags):
if elem == 'PROD1':
obj_list.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({'end': start + len(list_words[ind]), 'start': start, 'type': "OBJ" })
if elem == 'PROD2':
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "OBJ" })
if elem == 'PRED':
pred_list.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "PRED" })
if elem == 'ASP':
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "ASP" })
return obj_list, pred_list
class responser:
def __init__(self):
self.URL = 'http://ltdemos.informatik.uni-hamburg.de/cam-api'
self.proxies = {"http": "http://185.46.212.97:10015/","https": "https://185.46.212.98:10015/",}
def get_response(self, first_object, second_object, fast_search=True,
aspects=None, weights=None):
print ("aspects", aspects)
print ("weights", weights)
num_aspects = len(aspects) if aspects is not None else 0
num_weights = len(weights) if weights is not None else 0
if num_aspects != num_weights:
raise ValueError(
"Number of weights should be equal to the number of aspects")
params = {
'objectA': first_object,
'objectB': second_object,
'fs': str(fast_search).lower()
}
if num_aspects:
params.update({'aspect{}'.format(i + 1): aspect
for i, aspect in enumerate(aspects)})
params.update({'weight{}'.format(i + 1): weight
for i, weight in enumerate(weights)})
print ("get url")
print ("params", params)
response = requests.get(url=self.URL, params=params)
return response
def answerer(input_string, tp = 'big'):
my_extractor = extractor()
my_extractor.from_string(input_string)
my_responser = responser()
obj1, obj2, predicates = my_extractor.get_params()
print ("len(obj1), len(obj2)", len(obj1), len(obj2))
print ("obj1, obj2, predicates", obj1, obj2, predicates)
if (len(obj1) > 0 and len(obj2) > 0):
response = my_responser.get_response(first_object = obj1, second_object = obj2, fast_search=True, aspects = predicates, weights = [1 for predicate in predicates])
try:
response_json = response.json()
except:
return ("smth wrong in response, please try again")
try:
my_diviner = diviner(tp = tp)
print (1)
my_diviner.create_from_json(response_json, predicates)
print (2)
except:
#del my_extractor,my_diviner, my_responser
return ("smth wrong in diviner, please try again")
try:
answer = my_diviner.generate_advice()
print ("answer", answer)
#del my_extractor,my_diviner, my_responser
return answer
except:
#del my_extractor,my_diviner, my_responser
return ("smth wrong in answer generation, please try again")
elif (len(obj1) > 0 and len(obj2) == 0):
print ("len(obj1) > 0 and len(obj2) == 0")
response = my_responser.get_response(first_object = obj1, second_object = 'and', fast_search=True, aspects = predicates, weights = [1 for predicate in predicates])
try:
response_json = response.json()
my_diviner = diviner(tp = "big")
my_diviner.create_from_json(response_json, predicates)
answer = my_diviner.generate_advice(is_object_single = True)
print ("answer", answer)
#del my_extractor,my_diviner, my_responser
return answer
except:
#del my_extractor,my_diviner, my_responser
return ("smth wrong in response, please try again")
elif (len(obj2) > 0 and len(obj1) == 0):
print ("len(obj2) > 0 and len(obj1) == 0")
response = my_responser.get_response(first_object = obj2, second_object = 'and', fast_search=True, aspects = predicates, weights = [1 for predicate in predicates])
try:
response_json = response.json()
my_diviner = diviner(tp = "big")
my_diviner.create_from_json(response_json, predicates)
answer = my_diviner.generate_advice(is_object_single = True)
print ("answer", answer)
#del my_extractor,my_diviner, my_responser
return answer
except:
#del my_extractor,my_diviner, my_responser
return ("smth wrong in response, please try again")
else:
return ("We can't recognize objects for comparision")
class extractorArora(extractor):
def __init__(self, my_device = 6, model_name = 'aurora_berts_simple.hdf5', model_path = current_directory_path + '/external_pretrained_models/'):
self.answ = "UNKNOWN ERROR"
self.model_name = model_name
self.model_path = model_path
self.first_object = ''
self.second_object = ''
self.predicates = ''
self.spans = [] # we can't use set because span object is dict and dict is unchashable. We add function add_span to keep non-repeatability
try:
self.model = TaggerFactory.load(self.model_path + self.model_name, my_device)
self.model.cuda(device=my_device)
self.model.gpu = my_device
print ("extract_objects_predicates gpu", self.model.gpu)
except:
e = sys.exc_info()[0]
print ("exeption while mapping to gpu in extractorArora ", e)
raise RuntimeError("Init extractor: can't map to gpu. Maybe it is OOM")
def get_objects_predicates(self, list_words, list_tags):
obj_list = []
pred_list = []
asp_list = []
for ind, elem in enumerate(list_tags):
if elem == 'PROD1':
obj_list.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({'end': start + len(list_words[ind]), 'start': start, 'type': "OBJ" })
if elem == 'PROD2':
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "OBJ" })
if elem == 'PRED':
pred_list.append(list_words[ind])
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "PRED" })
if elem == 'ASP':
start = self.input_str.lower().find(list_words[ind])
self.spans.append({ 'end': start + len(list_words[ind]), 'start': start, 'type': "ASP" })
return obj_list, pred_list
class responser:
def __init__(self):
self.URL = 'http://ltdemos.informatik.uni-hamburg.de/cam-api'
self.proxies = {"http": "http://185.46.212.97:10015/","https": "https://185.46.212.98:10015/",}
def get_response(self, first_object, second_object, fast_search=True,
aspects=None, weights=None):
print ("aspects", aspects)
print ("weights", weights)
num_aspects = len(aspects) if aspects is not None else 0
num_weights = len(weights) if weights is not None else 0
if num_aspects != num_weights:
raise ValueError(
"Number of weights should be equal to the number of aspects")
params = {
'objectA': first_object,
'objectB': second_object,
'fs': str(fast_search).lower()
}
if num_aspects:
params.update({'aspect{}'.format(i + 1): aspect
for i, aspect in enumerate(aspects)})
params.update({'weight{}'.format(i + 1): weight
for i, weight in enumerate(weights)})
print ("get url")
print ("params", params)
response = requests.get(url=self.URL, params=params, timeout=70)
return response
def answerer(input_string, tp = 'big'):
my_extractor = extractor()
my_extractor.from_string(input_string)
my_responser = responser()
obj1, obj2, predicates = my_extractor.get_params()
print ("len(obj1), len(obj2)", len(obj1), len(obj2))
print ("obj1, obj2, predicates", obj1, obj2, predicates)
if (len(obj1) > 0 and len(obj2) > 0):
response = my_responser.get_response(first_object = obj1, second_object = obj2, fast_search=True, aspects = predicates, weights = [1 for predicate in predicates])
try:
response_json = response.json()
except:
return ("smth wrong in response, please try again")
try:
my_diviner = diviner(tp = tp)
print (1)
my_diviner.create_from_json(response_json, predicates)
print (2)
except:
return ("smth wrong in diviner, please try again")
try:
answer = my_diviner.generate_advice()
print ("answer", answer)
#del my_extractor,my_diviner, my_responser
return answer
except:
#del my_extractor,my_diviner, my_responser
return ("smth wrong in answer generation, please try again")
elif (len(obj1) > 0 and len(obj2) == 0):
print ("len(obj1) > 0 and len(obj2) == 0")
response = my_responser.get_response(first_object = obj1, second_object = 'and', fast_search=True, aspects = predicates, weights = [1 for predicate in predicates])
try:
response_json = response.json()
my_diviner = diviner(tp = "big")
my_diviner.create_from_json(response_json, predicates)
answer = my_diviner.generate_advice(is_object_single = True)
print ("answer", answer)
#del my_extractor,my_diviner, my_responser
return answer
except:
#del my_extractor,my_diviner, my_responser
return ("smth wrong in response, please try again")
else:
return ("We can't recognize objects for comparision")
class extractorArtemArora(extractorArora):
def __init__(self, my_device = 1, model_name = "artem_bert_arora.hdf5", model_path = current_directory_path + '/external_pretrained_models/'):
self.answ = "UNKNOWN ERROR"
self.model_name = model_name
self.model_path = model_path
self.first_object = ''
self.second_object = ''
self.predicates = ''
self.spans = [] # we can't use set because span object is dict and dict is unchashable. We add function add_span to keep non-repeatability
try:
print (self.model_path + self.model_name)
tagger = torch.load(self.model_path + self.model_name)
self.model = tagger
except: # catch *all* exceptions
e = sys.exc_info()[0]
print ("exeption while extracting to gpu ", str(sys.exc_info()))
try:
print (111)
self.model.cuda(device=my_device)
self.model.gpu = my_device
print (111)
print ("extract_objects_predicates gpu", str(sys.exc_info()))
except:
e = sys.exc_info()[0]
print (type(sys.exc_info()))
print (type(e))
print (str(sys.exc_info()))
print (str(e))
print ("exeption while mapping to gpu in SeqBert ", str(sys.exc_info()))
raise RuntimeError("Init extractor. Maybe it is OOM")
def predict_string(self, tokens):
print ("tokens")
print (tokens)
_, max_len, token_ids, token_masks, bpe_masks = self.model._make_tokens_tensors([tokens], self.model._max_len)
label_ids = None
loss_masks = None
with torch.no_grad():
token_ids = token_ids.cuda(device=self.model.gpu)
token_masks = token_masks.cuda(device=self.model.gpu)
#loss_masks = loss_masks.cuda(device=self.model.gpu)
print ("x")
logits = self.model._bert_model(token_ids,
token_type_ids=None,
attention_mask=token_masks,
labels=label_ids,
loss_mask=loss_masks)
print ("xxx")
logits = logits[0]
print ("xxxx")
b_preds, prob = self.model._logits_to_preds(logits.cpu(), bpe_masks, tokens)
print ("bpreds", b_preds)
return b_preds
def extract_objects_predicates(self, input_sentence):
words = create_sequence_from_sentence([input_sentence])
tags = self.predict_string(words[0])
print ("extract_objects_predicates tags", tags[0])
print ("extract_objects_predicates words", words[0])
objects, predicates = self.get_objects_predicates(words[0], tags[0])
print (objects)
print (predicates)
self.predicates = predicates
print ("len(objects)", len(objects))
if len(objects) >= 2:
self.first_object = objects[0]
self.second_object = objects[1]
else: # try to use spacy
if len(objects) == 1:
self.first_object = objects[0]
self.second_object = ''
print("We try to use spacy")
nlp = spacy.load("en_core_web_sm")
doc = nlp(input_sentence)
tokens = [token.text for token in doc]
split_sent = words[0]
if (len(self.predicates) == 0):
print ("pand")
for ind, token in enumerate(doc):
if (doc[ind].tag_ == 'JJR' or doc[ind].tag_ == 'RBR'):
print ("pand 0")
self.predicates = [doc[ind].text]
self.add_span({'end': self.input_str.lower().find(doc[ind].text) + len(doc[ind].text), 'start': self.input_str.lower().find(doc[ind].text), 'type': "PRED" })
break
if 'or' in split_sent:
comp_elem = 'or'
elif 'vs' in split_sent:
comp_elem = 'vs'
elif 'vs.' in split_sent:
comp_elem = 'vs.'
else:
self.answ = "We can't recognize two objects for compare 0"
return
print ("comp_elem", comp_elem)
print ("tokens", tokens)
if (comp_elem in tokens):
print ("comp elem in tokens")
or_index = tokens.index(comp_elem)
if (len (doc.ents) >= 2):
for ent in doc.ents:
if (ent.end == or_index):
self.first_object = ent.text
self.add_span({'end': ent.end,'start': ent.start, 'type': "OBJ" })
if (ent.start == or_index + 1):
self.second_object = ent.text
self.add_span({'end': ent.end, 'start': ent.start, 'type': "OBJ" })
else:
print ("or simple split_sent", or_index)
try:
obj1 = tokens[or_index - 1] # tokens are uppercase. self.input_str is uppercase
obj2 = tokens[or_index + 1]
print (obj1, obj2)
self.first_object = obj1
self.second_object = obj2
self.add_span({'end': self.input_str.find(obj1) + len(obj1), 'start': self.input_str.find(obj1), 'type': "OBJ" })
self.add_span({'end': self.input_str.find(obj2) + len(obj2), 'start': self.input_str.find(obj2), 'type': "OBJ" })
except:
self.answ = "We can't recognize two objects for compare 1"
else:
self.answ = "We can't recognize two objects for compare 2"
| 45.407534
| 181
| 0.563391
| 3,177
| 26,518
| 4.519043
| 0.089078
| 0.025075
| 0.03009
| 0.02605
| 0.840914
| 0.815909
| 0.804695
| 0.795222
| 0.78359
| 0.77551
| 0
| 0.012874
| 0.323365
| 26,518
| 584
| 182
| 45.407534
| 0.787271
| 0.050645
| 0
| 0.770588
| 0
| 0
| 0.122499
| 0.015432
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05098
| false
| 0
| 0.023529
| 0.009804
| 0.14902
| 0.147059
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db352a6fb594d925279af150baee24c2400c9483
| 77
|
py
|
Python
|
learn_python_packaging/text.py
|
indranisen/learn_python_packaging
|
83912030ffe0281af83069e612f2386d25e65f10
|
[
"MIT"
] | null | null | null |
learn_python_packaging/text.py
|
indranisen/learn_python_packaging
|
83912030ffe0281af83069e612f2386d25e65f10
|
[
"MIT"
] | null | null | null |
learn_python_packaging/text.py
|
indranisen/learn_python_packaging
|
83912030ffe0281af83069e612f2386d25e65f10
|
[
"MIT"
] | null | null | null |
def learn():
return 'You Have Successfully Learned Python Packaging now'
| 25.666667
| 63
| 0.753247
| 10
| 77
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 77
| 2
| 64
| 38.5
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0.649351
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
e1f055c9b64ae4cf48a9b798e7e8bab1737505b8
| 131
|
py
|
Python
|
forks/baselines/baselines/bench/__init__.py
|
AndrewPaulChester/sage-code
|
9fe676bfbcbc6f642eca29b30a1027fba2a426a0
|
[
"MIT"
] | null | null | null |
forks/baselines/baselines/bench/__init__.py
|
AndrewPaulChester/sage-code
|
9fe676bfbcbc6f642eca29b30a1027fba2a426a0
|
[
"MIT"
] | null | null | null |
forks/baselines/baselines/bench/__init__.py
|
AndrewPaulChester/sage-code
|
9fe676bfbcbc6f642eca29b30a1027fba2a426a0
|
[
"MIT"
] | null | null | null |
# flake8: noqa F403
from forks.baselines.baselines.bench.benchmarks import *
from forks.baselines.baselines.bench.monitor import *
| 32.75
| 56
| 0.816794
| 17
| 131
| 6.294118
| 0.588235
| 0.168224
| 0.336449
| 0.504673
| 0.598131
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033613
| 0.091603
| 131
| 3
| 57
| 43.666667
| 0.865546
| 0.129771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c04482dd1905856cc6b2e8db56aeb99f435b3c35
| 12,856
|
py
|
Python
|
tools/git-importer/git_importer/tests.py
|
thewizardplusplus/wizard-diary
|
a41b110d095a9060449da23c7c19fc4cd47e27f4
|
[
"MIT"
] | null | null | null |
tools/git-importer/git_importer/tests.py
|
thewizardplusplus/wizard-diary
|
a41b110d095a9060449da23c7c19fc4cd47e27f4
|
[
"MIT"
] | 242
|
2015-01-11T23:16:33.000Z
|
2022-03-06T22:54:11.000Z
|
tools/git-importer/git_importer/tests.py
|
thewizardplusplus/wizard-diary
|
a41b110d095a9060449da23c7c19fc4cd47e27f4
|
[
"MIT"
] | null | null | null |
import unittest
import datetime
import sys
from . import input_
from . import process
from . import format_
class TestProcessCommitMessage(unittest.TestCase):
def test_empty_message(self):
expected_result = {}
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'',
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
' \n',
), expected_result)
def test_merge_message(self):
expected_result = {}
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
"Merge branch 'development'\n",
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
"Merge branch 'issue-23' into development\n",
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
"Merge the branch 'issue-23' into the branch 'development'\n",
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
" Merge branch 'development'\n",
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
" Merge branch 'issue-23' into development\n",
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
" Merge the branch 'issue-23' into the branch 'development'\n",
), expected_result)
def test_message_without_issue_mark(self):
expected_result = {process.SPECIAL_ISSUE: ['update the change log']}
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'Update the change log\n',
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
' Update the change log\n',
), expected_result)
def test_multiline_message(self):
expected_result = {process.SPECIAL_ISSUE: [
'revert "Issue #12: add the FizzBuzz class"',
]}
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'''Revert "Issue #12: add the FizzBuzz class"
This reverts commit 43065958923a14a05936887ccbb876d9dd5438f98923a14a05936887ccbb876d9dd5438f9.
''',
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'''
Revert "Issue #12: add the FizzBuzz class"
This reverts commit 43065958923a14a05936887ccbb876d9dd5438f98923a14a05936887ccbb876d9dd5438f9.
''',
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'''
{0}Revert "Issue #12: add the FizzBuzz class"{0}
This reverts commit 43065958923a14a05936887ccbb876d9dd5438f98923a14a05936887ccbb876d9dd5438f9.
'''.format(' '),
), expected_result)
def test_message_with_one_issue_mark(self):
expected_result = {'issue #12': ['add the FizzBuzz class']}
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'Issue #12: add the FizzBuzz class\n',
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
' Issue #12: add the FizzBuzz class\n',
), expected_result)
def test_message_with_some_issues_marks(self):
expected_result = {
'issue #5': ['add the FizzBuzz class'],
'issue #12': ['add the FizzBuzz class'],
}
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
'Issue #5, issue #12: add the FizzBuzz class\n',
), expected_result)
self.assertEqual(process._process_commit_message(
'43065958923a14a05936887ccbb876d9dd5438f9',
' Issue #5, issue #12: add the FizzBuzz class\n',
), expected_result)
class TestProcessGitHistory(unittest.TestCase):
def test_empty_commit_list(self):
self.assertEqual(process.process_git_history([]), {})
def test_unique_commits(self):
timestamp_1 = datetime.datetime(2017, 5, 5)
timestamp_2 = datetime.datetime(2017, 5, 12)
self.assertEqual(process.process_git_history([
input_.Commit(
'43065958923a14a05936887ccbb876d9dd5438f9',
timestamp_1,
'Issue #5: add the FizzBuzz class',
),
input_.Commit(
'7299cd3a63ca2553f5910c4f8a170f847bae419e',
timestamp_2,
'Issue #12: add the LinkedList class',
),
]), {
timestamp_1.date(): {'issue #5': ['add the FizzBuzz class']},
timestamp_2.date(): {'issue #12': ['add the LinkedList class']},
})
def test_commits_with_same_timestamps(self):
timestamp_1 = datetime.datetime(2017, 5, 5)
timestamp_2 = datetime.datetime(2017, 5, 12, 2, 4, 6)
timestamp_3 = datetime.datetime(2017, 5, 12, 12, 34, 56)
self.assertEqual(process.process_git_history([
input_.Commit(
'43065958923a14a05936887ccbb876d9dd5438f9',
timestamp_1,
'Issue #5: add the FizzBuzz class',
),
input_.Commit(
'7299cd3a63ca2553f5910c4f8a170f847bae419e',
timestamp_1,
'Issue #12: add the LinkedList class',
),
input_.Commit(
'b05b839efa17e9be1519eaa9271cc008c236037e',
timestamp_2,
'Issue #5: add the FizzBuzz class',
),
input_.Commit(
'54f532c2c628ddfca8629cd0d906201119a5fe4b',
timestamp_3,
'Issue #12: add the LinkedList class',
),
]), {
timestamp_1.date(): {
'issue #5': ['add the FizzBuzz class'],
'issue #12': ['add the LinkedList class'],
},
timestamp_2.date(): {
'issue #5': ['add the FizzBuzz class'],
'issue #12': ['add the LinkedList class'],
},
})
def test_commits_with_same_issues_marks(self):
timestamp = datetime.datetime(2017, 5, 5)
self.assertEqual(process.process_git_history([
input_.Commit(
'43065958923a14a05936887ccbb876d9dd5438f9',
timestamp,
'Issue #5: add the FizzBuzz class',
),
input_.Commit(
'7299cd3a63ca2553f5910c4f8a170f847bae419e',
timestamp,
'Issue #5: add the LinkedList class',
),
]), {timestamp.date(): {'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
]}})
def test_commits_with_some_issues_marks(self):
timestamp = datetime.datetime(2017, 5, 5)
self.assertEqual(process.process_git_history([
input_.Commit(
'43065958923a14a05936887ccbb876d9dd5438f9',
timestamp,
'Issue #5, issue #12: add the FizzBuzz class',
),
input_.Commit(
'7299cd3a63ca2553f5910c4f8a170f847bae419e',
timestamp,
'Issue #5, issue #12: add the LinkedList class',
),
]), {timestamp.date(): {
'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
],
'issue #12': [
'add the FizzBuzz class',
'add the LinkedList class',
],
}})
class TestUniqueGitHistory(unittest.TestCase):
def test_without_duplicates(self):
data = {
datetime.datetime(2017, 5, 5): {'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
datetime.datetime(2017, 5, 12): {'issue #12': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
}
self.assertEqual(process.unique_git_history(data), data)
def test_with_duplicates(self):
timestamp_1 = datetime.datetime(2017, 5, 5)
timestamp_2 = datetime.datetime(2017, 5, 12)
self.assertEqual(process.unique_git_history({
timestamp_1: {'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
'add the FizzBuzz class',
]},
timestamp_2: {'issue #12': [
'add the LinkedList class',
'add the FizzBuzz class',
'add the LinkedList class',
]},
}), {
timestamp_1: {'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
timestamp_2: {'issue #12': [
'add the LinkedList class',
'add the FizzBuzz class',
]},
})
class TestFormatMessages(unittest.TestCase):
def test_one_messages(self):
self.assertEqual(format_._format_messages(
'Test Project, ',
'issue #12',
['add the FizzBuzz class'],
), 'Test Project, issue #12, add the FizzBuzz class')
def test_some_messages(self):
self.assertEqual(format_._format_messages(
'Test Project, ',
'issue #12',
[
'add the FizzBuzz class',
'add the LinkedList class',
],
), '''Test Project, issue #12, add the FizzBuzz class
add the LinkedList class''')
class TestGetIssueMarkKey(unittest.TestCase):
def test_common_issue(self):
self.assertEqual(format_._get_issue_mark_key(('issue #12',)), 12)
def test_special_issue(self):
self.assertEqual(format_._get_issue_mark_key(
(process.SPECIAL_ISSUE,),
), sys.maxsize)
class TestFormatIssuesMarks(unittest.TestCase):
def test_one_issue_mark(self):
self.assertEqual(format_._format_issues_marks(
'Test Project',
datetime.datetime(2017, 5, 5),
{'issue #12': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
), '''## 2017-05-05
```
Test Project, issue #12, add the FizzBuzz class
add the LinkedList class
```''')
def test_some_issues_marks(self):
self.assertEqual(format_._format_issues_marks(
'Test Project',
datetime.datetime(2017, 5, 5),
{
'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
],
'issue #12': [
'add the FizzBuzz class',
'add the LinkedList class',
],
},
), '''## 2017-05-05
```
Test Project, issue #5, add the FizzBuzz class
add the LinkedList class
issue #12, add the FizzBuzz class
add the LinkedList class
```''')
class TestFormatGitHistory(unittest.TestCase):
def test_one_timestamp(self):
self.assertEqual(format_.format_git_history('Test Project', {
datetime.datetime(2017, 5, 5): {'issue #12': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
}), '''# Test Project
## 2017-05-05
```
Test Project, issue #12, add the FizzBuzz class
add the LinkedList class
```
''')
def test_some_timestamps(self):
self.assertEqual(format_.format_git_history('Test Project', {
datetime.datetime(2017, 5, 5): {'issue #5': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
datetime.datetime(2017, 5, 12): {'issue #12': [
'add the FizzBuzz class',
'add the LinkedList class',
]},
}), '''# Test Project
## 2017-05-05
```
Test Project, issue #5, add the FizzBuzz class
add the LinkedList class
```
## 2017-05-12
```
Test Project, issue #12, add the FizzBuzz class
add the LinkedList class
```
''')
| 35.221918
| 94
| 0.574751
| 1,156
| 12,856
| 6.205882
| 0.074394
| 0.064399
| 0.087817
| 0.11918
| 0.889323
| 0.854614
| 0.816978
| 0.809451
| 0.799833
| 0.765821
| 0
| 0.140147
| 0.323429
| 12,856
| 364
| 95
| 35.318681
| 0.68464
| 0
| 0
| 0.73125
| 0
| 0
| 0.32584
| 0.08702
| 0
| 0
| 0
| 0
| 0.1
| 1
| 0.065625
| false
| 0
| 0.01875
| 0
| 0.10625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c06a2b9213572cede6ddff41228e4ed4752fb369
| 2,171
|
py
|
Python
|
eventtig/tests/test_event_load_yaml_file.py
|
eventtig/eventtig-gitengine
|
b1e619d8385691b4b61dc4dd33b6cb08b9ad56cc
|
[
"MIT"
] | null | null | null |
eventtig/tests/test_event_load_yaml_file.py
|
eventtig/eventtig-gitengine
|
b1e619d8385691b4b61dc4dd33b6cb08b9ad56cc
|
[
"MIT"
] | null | null | null |
eventtig/tests/test_event_load_yaml_file.py
|
eventtig/eventtig-gitengine
|
b1e619d8385691b4b61dc4dd33b6cb08b9ad56cc
|
[
"MIT"
] | null | null | null |
import pytest
from eventtig.event import Event
from eventtig.exceptions import EndIsBeforeStartException
def test_start_1():
event = Event()
event.load_from_yaml_data(
"id",
{"start": "2021-01-03 07:08", "end": "2021-01-03 08:08"},
"events/id/event.yaml",
)
assert event.start_year == 2021
assert event.start_month == 1
assert event.start_day == 3
assert event.start_hour == 7
assert event.start_minute == 8
def test_start_and_end_same_1():
event = Event()
event.load_from_yaml_data(
"id",
{"start": "2021-01-03 07:08", "end": "2021-01-03 07:08"},
"events/id/event.yaml",
)
assert event.start_year == 2021
assert event.start_month == 1
assert event.start_day == 3
assert event.start_hour == 7
assert event.start_minute == 8
assert event.end_year == 2021
assert event.end_month == 1
assert event.end_day == 3
assert event.end_hour == 7
assert event.end_minute == 8
def test_start_end_no_padding_1():
event = Event()
event.load_from_yaml_data(
"id",
{"start": "2021-01-03 7:8", "end": "2021-01-03 8:8"},
"events/id/event.yaml",
)
assert event.start_year == 2021
assert event.start_month == 1
assert event.start_day == 3
assert event.start_hour == 7
assert event.start_minute == 8
def test_start_with_no_end_1():
event = Event()
event.load_from_yaml_data(
"id", {"start": "2021-01-03 07:08"}, "events/id/event.yaml"
)
assert event.start_year == 2021
assert event.start_month == 1
assert event.start_day == 3
assert event.start_hour == 7
assert event.start_minute == 8
# End just the same as the start
assert event.end_year == 2021
assert event.end_month == 1
assert event.end_day == 3
assert event.end_hour == 7
assert event.end_minute == 8
def test_end_is_before_start_1():
event = Event()
with pytest.raises(EndIsBeforeStartException):
event.load_from_yaml_data(
"id",
{"start": "2021-01-03 7:8", "end": "2021-01-03 7:7"},
"events/id/event.yaml",
)
| 27.1375
| 67
| 0.625979
| 319
| 2,171
| 4.050157
| 0.131661
| 0.255418
| 0.247678
| 0.088235
| 0.805728
| 0.801858
| 0.801858
| 0.801858
| 0.801858
| 0.801858
| 0
| 0.093521
| 0.24643
| 2,171
| 79
| 68
| 27.481013
| 0.69621
| 0.013819
| 0
| 0.716418
| 0
| 0
| 0.132305
| 0
| 0
| 0
| 0
| 0
| 0.447761
| 1
| 0.074627
| false
| 0
| 0.044776
| 0
| 0.119403
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
2229eb95ee390baf47c1a861d5cabcc54a1e9cf4
| 725
|
py
|
Python
|
tests/test_provider_camptocamp_puppetdb.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_camptocamp_puppetdb.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_camptocamp_puppetdb.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_camptocamp_puppetdb.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:25:11 UTC)
def test_provider_import():
import terrascript.provider.camptocamp.puppetdb
def test_resource_import():
from terrascript.resource.camptocamp.puppetdb import puppetdb_node
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.camptocamp.puppetdb
#
# t = terrascript.provider.camptocamp.puppetdb.puppetdb()
# s = str(t)
#
# assert 'https://github.com/camptocamp/terraform-provider-puppetdb' in s
# assert '1.2.0' in s
| 29
| 80
| 0.754483
| 95
| 725
| 5.642105
| 0.6
| 0.16791
| 0.19403
| 0.20709
| 0.160448
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024351
| 0.150345
| 725
| 24
| 81
| 30.208333
| 0.845779
| 0.706207
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3f105db3d8daa855d45e55de6f239a61b5036acd
| 36,123
|
py
|
Python
|
adjutant/actions/v1/tests/test_user_actions.py
|
knikolla/adjutant
|
ad19ed13b65b583e65b5a19e04a0f0403c366b09
|
[
"Apache-2.0"
] | null | null | null |
adjutant/actions/v1/tests/test_user_actions.py
|
knikolla/adjutant
|
ad19ed13b65b583e65b5a19e04a0f0403c366b09
|
[
"Apache-2.0"
] | null | null | null |
adjutant/actions/v1/tests/test_user_actions.py
|
knikolla/adjutant
|
ad19ed13b65b583e65b5a19e04a0f0403c366b09
|
[
"Apache-2.0"
] | 1
|
2022-02-16T22:26:15.000Z
|
2022-02-16T22:26:15.000Z
|
# Copyright (C) 2015 Catalyst IT Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from confspirator.tests import utils as conf_utils
from adjutant.actions.v1.users import (
EditUserRolesAction,
NewUserAction,
ResetUserPasswordAction,
UpdateUserEmailAction,
)
from adjutant.api.models import Task
from adjutant.common.tests import fake_clients
from adjutant.common.tests.fake_clients import setup_identity_cache
from adjutant.common.tests.utils import AdjutantTestCase
from adjutant.config import CONF
@mock.patch("adjutant.common.user_store.IdentityManager", fake_clients.FakeManager)
@conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.role_mapping": [
{
"operation": "override",
"value": {
"admin": [
"project_admin",
"project_mod",
"member",
"heat_stack_owner",
],
"project_admin": [
"project_mod",
"member",
"heat_stack_owner",
"project_admin",
],
"project_mod": [
"member",
"heat_stack_owner",
"project_mod",
],
},
},
],
},
)
class UserActionTests(AdjutantTestCase):
def test_new_user(self):
"""
Test the default case, all valid.
No existing user, valid tenant.
"""
project = fake_clients.FakeProject(name="test_project")
setup_identity_cache(projects=[project])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(len(fake_clients.identity_cache["new_users"]), 1)
fake_client = fake_clients.FakeManager()
user = fake_client.find_user(name="test@example.com", domain="default")
self.assertEqual(user.email, "test@example.com")
self.assertEqual(user.password, "123456")
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
def test_new_user_existing(self):
"""
Existing user, valid tenant, no role.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
setup_identity_cache(projects=[project], users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
def test_new_user_disabled(self):
"""
Disabled user, valid existing tenant, no role.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com",
password="123",
email="test@example.com",
enabled=False,
)
setup_identity_cache(projects=[project], users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(len(fake_clients.identity_cache["users"]), 2)
fake_client = fake_clients.FakeManager()
user = fake_client.find_user(name="test@example.com", domain="default")
self.assertEqual(user.email, "test@example.com")
self.assertEqual(user.password, "123456")
self.assertTrue(user.enabled)
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
def test_new_user_existing_role(self):
"""
Existing user, valid tenant, has role.
Should complete the action as if no role,
but actually do nothing.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignment = fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="member",
user={"id": user.id},
)
setup_identity_cache(
projects=[project], users=[user], role_assignments=[assignment]
)
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
self.assertEqual(action.action.state, "complete")
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
def test_new_user_no_tenant(self):
"""
No user, no tenant.
"""
setup_identity_cache()
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": "test_project_id",
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, False)
action.approve()
self.assertEqual(action.valid, False)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, False)
def test_new_user_wrong_project(self):
"""
Existing user, valid project, project does not match keystone user.
Action should be invalid.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
setup_identity_cache(projects=[project], users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": "test_project_id_1",
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, False)
def test_new_user_only_member(self):
"""
Existing user, valid project, no edit permissions.
Action should be invalid.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
setup_identity_cache(projects=[project], users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["member"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertFalse(action.valid)
def test_new_user_wrong_domain(self):
"""
Existing user, valid project, invalid domain.
Action should be invalid.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignment = fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="member",
user={"id": user.id},
)
setup_identity_cache(
projects=[project], users=[user], role_assignments=[assignment]
)
task = Task.objects.create(
keystone_user={
"roles": ["project_admin"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "not_default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertFalse(action.valid)
def test_reset_user_password(self):
"""
Base case, existing user.
"""
user = fake_clients.FakeUser(
name="test@example.com", password="gibberish", email="test@example.com"
)
setup_identity_cache(users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"domain_name": "Default",
"email": "test@example.com",
}
action = ResetUserPasswordAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].password, "123456"
)
def test_reset_user_password_case_insensitive(self):
"""
Existing user, ensure action is case insensitive.
USERNAME_IS_EMAIL=True
"""
user = fake_clients.FakeUser(
name="test@example.com", password="gibberish", email="test@example.com"
)
setup_identity_cache(users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"domain_name": "Default",
"email": "TEST@example.com",
}
action = ResetUserPasswordAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].password, "123456"
)
def test_reset_user_password_no_user(self):
"""
Reset password for a non-existant user.
"""
setup_identity_cache()
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"domain_name": "Default",
"email": "test@example.com",
}
action = ResetUserPasswordAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, False)
action.approve()
self.assertEqual(action.valid, False)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, False)
def test_edit_user_roles_add(self):
"""
Add roles to existing user.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
setup_identity_cache(projects=[project], users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["member", "project_mod"],
"inherited_roles": [],
"remove": False,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(sorted(roles), sorted(["member", "project_mod"]))
def test_edit_user_roles_add_complete(self):
"""
Add roles to existing user.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignments = [
fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="member",
user={"id": user.id},
),
fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="project_mod",
user={"id": user.id},
),
]
setup_identity_cache(
projects=[project], users=[user], role_assignments=assignments
)
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["member", "project_mod"],
"inherited_roles": [],
"remove": False,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
self.assertEqual(action.action.state, "complete")
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member", "project_mod"])
def test_edit_user_roles_remove(self):
"""
Remove roles from existing user.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignments = [
fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="member",
user={"id": user.id},
),
fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="project_mod",
user={"id": user.id},
),
]
setup_identity_cache(
projects=[project], users=[user], role_assignments=assignments
)
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["project_mod"],
"inherited_roles": [],
"remove": True,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
def test_edit_user_roles_remove_complete(self):
"""
Remove roles from user that does not have them.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignment = fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="member",
user={"id": user.id},
)
setup_identity_cache(
projects=[project], users=[user], role_assignments=[assignment]
)
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["project_mod"],
"inherited_roles": [],
"remove": True,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
self.assertEqual(action.action.state, "complete")
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
def test_edit_user_roles_can_manage_all(self):
"""
Confirm that you cannot edit a user unless all their roles
can be managed by you.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignments = [
fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="member",
user={"id": user.id},
),
fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="project_admin",
user={"id": user.id},
),
]
setup_identity_cache(
projects=[project], users=[user], role_assignments=assignments
)
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["project_mod"],
"inherited_roles": [],
"remove": False,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, False)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member", "project_admin"])
def test_edit_user_roles_modified_config(self):
"""
Tests that the role mappings do come from config and that they
are enforced.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignment = fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="project_mod",
user={"id": user.id},
)
setup_identity_cache(
projects=[project], users=[user], role_assignments=[assignment]
)
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["heat_stack_owner"],
"inherited_roles": [],
"remove": False,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
# Change config
with conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.role_mapping": [
{
"operation": "update",
"value": {
"project_mod": [
"member",
"project_mod",
],
},
},
],
},
):
action.approve()
self.assertEqual(action.valid, False)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, False)
# After Settings Reset
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["project_mod", "heat_stack_owner"])
@conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.role_mapping": [
{
"operation": "update",
"value": {
"project_mod": [
"member",
"heat_stack_owner",
"project_mod",
"new_role",
],
},
},
],
},
)
def test_edit_user_roles_modified_config_add(self):
"""
Tests that the role mappings do come from config and a new role
added there will be allowed.
"""
project = fake_clients.FakeProject(name="test_project")
user = fake_clients.FakeUser(
name="test@example.com", password="123", email="test@example.com"
)
assignment = fake_clients.FakeRoleAssignment(
scope={"project": {"id": project.id}},
role_name="project_mod",
user={"id": user.id},
)
setup_identity_cache(
projects=[project], users=[user], role_assignments=[assignment]
)
new_role = fake_clients.FakeRole("new_role")
fake_clients.identity_cache["roles"][new_role.id] = new_role
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"domain_id": "default",
"user_id": user.id,
"project_id": project.id,
"roles": ["new_role"],
"inherited_roles": [],
"remove": False,
}
action = EditUserRolesAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["project_mod", "new_role"])
# Simple positive tests for when USERNAME_IS_EMAIL=False
@conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.username_is_email": [
{"operation": "override", "value": False},
],
},
)
def test_create_user_email_not_username(self):
"""
Test the default case, all valid.
No existing user, valid tenant.
Different username from email address
"""
project = fake_clients.FakeProject(name="test_project")
setup_identity_cache(projects=[project])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": project.id,
"project_domain_id": "default",
}
)
data = {
"username": "test_user",
"email": "test@example.com",
"project_id": project.id,
"roles": ["member"],
"inherited_roles": [],
"domain_id": "default",
}
action = NewUserAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(len(fake_clients.identity_cache["users"]), 2)
fake_client = fake_clients.FakeManager()
user = fake_client.find_user(name="test_user", domain="default")
self.assertEqual(user.email, "test@example.com")
self.assertEqual(user.password, "123456")
self.assertTrue(user.enabled)
roles = fake_client._get_roles_as_names(user, project)
self.assertEqual(roles, ["member"])
@conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.username_is_email": [
{"operation": "override", "value": False},
],
},
)
def test_reset_user_email_not_username(self):
"""
Base case, existing user.
Username not email address
"""
user = fake_clients.FakeUser(
name="test_user", password="gibberish", email="test@example.com"
)
setup_identity_cache(users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"username": "test_user",
"domain_name": "Default",
"email": "test@example.com",
}
action = ResetUserPasswordAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
fake_client = fake_clients.FakeManager()
user = fake_client.find_user(name="test_user", domain="default")
self.assertEqual(user.email, "test@example.com")
self.assertEqual(user.password, "123456")
@conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.username_is_email": [
{"operation": "override", "value": False},
],
},
)
def test_reset_user_password_case_insensitive_not_username(self):
"""
Existing user, ensure action is case insensitive.
USERNAME_IS_EMAIL=False
"""
user = fake_clients.FakeUser(
name="test_USER", password="gibberish", email="test@example.com"
)
setup_identity_cache(users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["admin", "project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"domain_name": "Default",
"username": "test_USER",
"email": "TEST@example.com",
}
action = ResetUserPasswordAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"password": "123456"}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].password, "123456"
)
def test_update_email(self):
"""
Base test case for user updating email address.
"""
user = fake_clients.FakeUser(
name="test@example.com", password="gibberish", email="test@example.com"
)
setup_identity_cache(users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"new_email": "new_test@example.com",
"user_id": user.id,
}
action = UpdateUserEmailAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
token_data = {"confirm": True}
action.submit(token_data)
self.assertEqual(action.valid, True)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].email, "new_test@example.com"
)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].name, "new_test@example.com"
)
def test_update_email_invalid_user(self):
"""
Test case for an invalid user being updated.
"""
setup_identity_cache()
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"new_email": "new_test@example.com",
"user_id": "non_user_id",
}
action = UpdateUserEmailAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, False)
action.approve()
self.assertEqual(action.valid, False)
token_data = {"confirm": True}
action.submit(token_data)
self.assertEqual(action.valid, False)
@conf_utils.modify_conf(
CONF,
operations={
"adjutant.identity.username_is_email": [
{"operation": "override", "value": False},
],
},
)
def test_update_email_username_not_email(self):
"""
Test case for a user attempting to update with an invalid email.
"""
user = fake_clients.FakeUser(
name="test_user", password="gibberish", email="test@example.com"
)
setup_identity_cache(users=[user])
task = Task.objects.create(
keystone_user={
"roles": ["project_mod"],
"project_id": "test_project_id",
"project_domain_id": "default",
}
)
data = {
"new_email": "new_testexample.com",
"user_id": user.id,
}
action = UpdateUserEmailAction(data, task=task, order=1)
action.prepare()
self.assertEqual(action.valid, True)
action.approve()
self.assertEqual(action.valid, True)
action.submit({"confirm": True})
self.assertEqual(action.valid, True)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].email, "new_testexample.com"
)
self.assertEqual(
fake_clients.identity_cache["users"][user.id].name, "test_user"
)
| 28.669048
| 87
| 0.537552
| 3,452
| 36,123
| 5.422364
| 0.065469
| 0.049044
| 0.075168
| 0.088898
| 0.891548
| 0.877604
| 0.875628
| 0.86911
| 0.863233
| 0.862111
| 0
| 0.00672
| 0.340835
| 36,123
| 1,259
| 88
| 28.691819
| 0.779388
| 0.058633
| 0
| 0.765766
| 0
| 0
| 0.15937
| 0.008162
| 0
| 0
| 0
| 0
| 0.113739
| 1
| 0.027027
| false
| 0.048423
| 0.009009
| 0
| 0.037162
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f25a67fcde697a2ea0a0efacc2a51a59356ddb9
| 16,015
|
py
|
Python
|
kafka_python_handler/__init__.py
|
ezhil-g/kafka-python-handler
|
4815dde8bfc974af69115cc2d3cdd2feac731a52
|
[
"Apache-2.0"
] | null | null | null |
kafka_python_handler/__init__.py
|
ezhil-g/kafka-python-handler
|
4815dde8bfc974af69115cc2d3cdd2feac731a52
|
[
"Apache-2.0"
] | null | null | null |
kafka_python_handler/__init__.py
|
ezhil-g/kafka-python-handler
|
4815dde8bfc974af69115cc2d3cdd2feac731a52
|
[
"Apache-2.0"
] | null | null | null |
from kafka_python_handler.handler import Handler
from kafka_python_handler.producer import Producer
| 4,003.75
| 15,914
| 0.00562
| 14
| 16,015
| 6.142857
| 0.428571
| 0.209302
| 0.348837
| 0.511628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.994255
| 16,015
| 3
| 15,915
| 5,338.333333
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
58ba1c0a5b42ccd1eebf1d15715d3d759792100d
| 116
|
py
|
Python
|
dqn_tetris/gym_tetris/envs/__init__.py
|
joemeyer1/keras-rl-tetris
|
bd24fa245ff2d7b98a6390cc4f55e34a38443642
|
[
"MIT"
] | null | null | null |
dqn_tetris/gym_tetris/envs/__init__.py
|
joemeyer1/keras-rl-tetris
|
bd24fa245ff2d7b98a6390cc4f55e34a38443642
|
[
"MIT"
] | null | null | null |
dqn_tetris/gym_tetris/envs/__init__.py
|
joemeyer1/keras-rl-tetris
|
bd24fa245ff2d7b98a6390cc4f55e34a38443642
|
[
"MIT"
] | null | null | null |
from gym_tetris.envs.tetris_env import TetrisEnv
from gym_tetris.envs.tetris_extrahard_env import TetrisExtraHardEnv
| 58
| 67
| 0.905172
| 17
| 116
| 5.882353
| 0.529412
| 0.14
| 0.26
| 0.34
| 0.46
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060345
| 116
| 2
| 67
| 58
| 0.917431
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
58e1d46b2f2dd109c85d287273bcdd08b29e6ce1
| 7,072
|
py
|
Python
|
src/zfr/dataobjects/plan.py
|
nathonfowlie/python.zfr
|
3158fa41278e862341c2676df6a6b5924b1c27a1
|
[
"MIT"
] | null | null | null |
src/zfr/dataobjects/plan.py
|
nathonfowlie/python.zfr
|
3158fa41278e862341c2676df6a6b5924b1c27a1
|
[
"MIT"
] | null | null | null |
src/zfr/dataobjects/plan.py
|
nathonfowlie/python.zfr
|
3158fa41278e862341c2676df6a6b5924b1c27a1
|
[
"MIT"
] | null | null | null |
"""Data objects used to manage test plans."""
import datetime
from dataclasses import dataclass, field
from typing import Dict, List, Optional
from zfr.dataobjects import Comment
from zfr.dataobjects.cycle import TestCycle
@dataclass(frozen=True)
class Attachment:
"""Represents a file attachment on a test plan, cycle or case.
_See Also_:
[Plan][zfr.dataobjects.plan.Plan]
"""
id: int = field(default_factory=int)
"""Unique identifier for the attachment."""
url: str = field(default_factory=str)
"""Url that the attachment can be downloaded from."""
filename: str = field(default_factory=str)
"""Name of the attached file."""
filesize: int = field(default_factory=int)
"""Attachment file size (in bytes)."""
@dataclass
class Plan:
"""Represents an existing test plan.
_See Also_:
[Attachment][zfr.dataobjects.plan.Attachment],
[Comment][zfr.dataobjects.Comment],
[TestCycle][zfr.dataobjects.cycle.TestCycle],
[PlanCreate][zfr.dataobjects.plan.PlanCreate],
[PlanUpdate][zfr.dataobjects.plan.PlanUpdate]
"""
attachments: Optional[List[Attachment]]= field(default_factory=list)
"""List of attachments added to the test plan."""
comments: Optional[List[Comment]] = field(default_factory=list)
"""List of comments added by users."""
created_by: str = field(default_factory=str)
"""Username of the user that created the plan."""
created_on: datetime.datetime = None
"""Date and time that the plan was created."""
custom_fields: Optional[Dict[str, str]] = field(default_factory=dict)
"""Custom fields associated with the plan, used to additional metadata."""
folder: str = field(default_factory=str)
"""Folder used to logically group plans."""
issue_links: Optional[List[str]] = field(default_factory=list)
"""Jira issues that are associated with the plan."""
key: str = field(default_factory=str)
"""Unique key for the plan. (eg: MYPROJECT-P29)."""
labels: Optional[List[str]] = field(default_factory=list)
"""Additional labels that can be used to filter plans."""
name: str = field(default_factory=str)
"""Name of the plan."""
objective: str = field(default_factory=str)
"""Plan objective(s).
???+ note "HTML"
This field can accept basic HTML to format test (bold, italic,
underline, links, paragraphs).
"""
owner: str = field(default_factory=str)
"""Username of the user responsible for maintaining the test plan."""
project_key: str = field(default_factory=str)
"""Project key oof the jira project the plan relates to. (eg: MYPROJECT)."""
status: str = field(default_factory=str)
"""Indicates whether the test plan has been approved for use.
Valid values are:
- Draft
- Approved
- Deprecated
"""
test_runs: Optional[List[TestCycle]] = field(default_factory=list)
"""Historical list of test cycles executed against the test plan."""
updated_by: str = field(default_factory=str)
"""Username of the user that last updated the test plan."""
updated_on: datetime.datetime = None
"""Date and time that the test plan was last updated."""
@dataclass
class PlanCreate:
"""Used to create a new test plan.
_See Also_:
[Comment][zfr.dataobjects.Comment],
[TestCycle][zfr.dataobjects.cycle.TestCycle],
[Plan][zfr.dataobjects.plan.Plan],
[PlanUpdate][zfr.dataobjects.plan.PlanUpdate]
"""
attachments: Optional[List[str]] = field(default_factory=list)
"""List of attachments added to the test plan."""
custom_fields: Optional[Dict[str, str]] = field(default_factory=dict)
"""Custom fields associated with the plan, used to additional metadata."""
folder: str = field(default_factory=str)
"""Folder used to logically group plans."""
issue_links: Optional[List[str]] = field(default_factory=list)
"""Jira issues that are associated with the plan."""
labels: Optional[List[str]] = field(default_factory=list)
"""Additional labels that can be used to filter plans."""
name: str = field(default_factory=str)
"""Name of the plan."""
objective: str = field(default_factory=str)
"""Plan objective(s).
???+ note "HTML"
This field can accept basic HTML to format test (bold, italic,
underline, links, paragraphs).
"""
owner: str = field(default_factory=str)
"""Username of the user responsible for maintaining the test plan."""
project_key: str = field(default_factory=str)
"""Project key oof the jira project the plan relates to. (eg: MYPROJECT)."""
status: str = field(default_factory=str)
"""Indicates whether the test plan has been approved for use.
Valid values are:
- Draft
- Approved
- Deprecated
"""
test_run_keys: Optional[List[str]] = field(default_factory=list)
"""Historical list of test cycles executed against the test plan."""
@dataclass
class PlanUpdate:
"""Used to update an existing test plan.
_See Also_:
[Attachment][zfr.dataobjects.plan.Attachment],
[Comment][zfr.dataobjects.Comment],
[TestCycle][zfr.dataobjects.cycle.TestCycle],
[Plan][zfr.dataobjects.plan.Plan],
[PlanCreate][zfr.dataobjects.plan.PlanCreate]
"""
attachments: Optional[List[str]] = field(default_factory=list)
"""List of attachments added to the test plan."""
custom_fields: Optional[Dict[str, str]] = field(default_factory=dict)
"""Custom fields associated with the plan, used to additional metadata."""
folder: str = field(default_factory=str)
"""Folder used to logically group plans."""
issue_links: Optional[List[str]] = field(default_factory=list)
"""Jira issues that are associated with the plan."""
key: str = field(default_factory=str)
"""Test plan key (eg: MYPROJECT-P24)."""
labels: Optional[List[str]] = field(default_factory=list)
"""Additional labels that can be used to filter plans."""
name: str = field(default_factory=str)
"""Name of the plan."""
objective: str = field(default_factory=str)
"""Plan objective(s).
???+ note "HTML"
This field can accept basic HTML to format test (bold, italic,
underline, links, paragraphs).
"""
owner: str = field(default_factory=str)
"""Username of the user responsible for maintaining the test plan."""
status: str = field(default_factory=str)
"""Indicates whether the test plan has been approved for use.
Valid values are:
- Draft
- Approved
- Deprecated
"""
test_runs: Optional[List[str]] = field(default_factory=list)
"""Historical list of test cycles executed against the test plan."""
| 32.440367
| 81
| 0.64918
| 863
| 7,072
| 5.244496
| 0.15759
| 0.108705
| 0.172117
| 0.174989
| 0.834512
| 0.795404
| 0.788997
| 0.788997
| 0.759611
| 0.741935
| 0
| 0.000738
| 0.233739
| 7,072
| 217
| 82
| 32.589862
| 0.834471
| 0.133484
| 0
| 0.589286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.089286
| 0
| 0.928571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
4504296cfb9b5ea548c82947cd2a4afa1c2a1a29
| 2,751
|
py
|
Python
|
ikibardin/power-fist-segmentation/power_fist/models/segmentation/selim_zoo/__init__.py
|
SpaceNetChallenge/SpaceNet_Optimized_Routing_Solutions
|
3fbc215de6b05904a5b54b2c7cde7e61074ae38d
|
[
"Apache-2.0"
] | 27
|
2020-03-04T05:54:48.000Z
|
2022-01-05T07:07:44.000Z
|
ikibardin/power-fist-segmentation/power_fist/models/segmentation/selim_zoo/__init__.py
|
CosmiQ/SpaceNet_Optimized_Routing_Solutions
|
3fbc215de6b05904a5b54b2c7cde7e61074ae38d
|
[
"Apache-2.0"
] | 1
|
2020-07-14T10:35:50.000Z
|
2020-07-14T10:35:50.000Z
|
ikibardin/power-fist-segmentation/power_fist/models/segmentation/selim_zoo/__init__.py
|
SpaceNetChallenge/SpaceNet_Optimized_Routing_Solutions
|
3fbc215de6b05904a5b54b2c7cde7e61074ae38d
|
[
"Apache-2.0"
] | 7
|
2020-03-07T21:42:57.000Z
|
2022-01-07T10:49:50.000Z
|
from . import unet
def dn161_unet(num_classes, num_channels=3, pretrained=True):
return unet.densenet_unet(seg_classes=num_classes, backbone_arch='densenet161')
def dn161_unet_fatter(num_classes, num_channels=3, pretrained=True):
return unet.densenet_unet(seg_classes=num_classes, backbone_arch='densenet161_fatter')
def dn161_sota_unet(num_classes, num_channels=3, pretrained=True):
return unet.densenet_unet(seg_classes=num_classes, backbone_arch='densenet161_sota')
def dn121_unet(num_classes, num_channels=3, pretrained=True):
return unet.densenet_unet(seg_classes=num_classes, backbone_arch='densenet121')
def srx50_unet(num_classes, num_channels=3, pretrained=True):
return unet.scse_unet(seg_classes=num_classes, backbone_arch='seresnext50')
def srx50_unet_dropout(num_classes, num_channels=3, pretrained=True):
return unet.scse_unet_dropout(seg_classes=num_classes, backbone_arch='seresnext50')
def sn154_unet(num_classes, num_channels=3, pretrained=True):
return unet.se_unet(seg_classes=num_classes, backbone_arch='senet154')
def pd_rn154_unet(num_classes, num_channels=3, pretrained=True):
return unet.resnet_unet(seg_classes=num_classes, backbone_arch='pd_resnet154')
def pd_dn161_unet(num_classes, num_channels=3, pretrained=True):
return unet.densenet_unet(seg_classes=num_classes, backbone_arch='pd_densenet161')
def rn50_unet(num_classes, num_channels=3, pretrained=True):
return unet.resnet_unet(seg_classes=num_classes, backbone_arch='resnet50')
def convt_rn50_unet(num_classes, num_channels=3, pretrained=True):
return unet.convt_resnet_unet(seg_classes=num_classes, backbone_arch='resnet50')
def convt_rn34_unet_light(num_classes, num_channels=3, pretrained=True):
return unet.convt_resnet_unet(seg_classes=num_classes, backbone_arch='resnet34_light')
def convt_rn18_unet_light(num_classes, num_channels=3, pretrained=True):
return unet.convt_resnet_unet(seg_classes=num_classes, backbone_arch='resnet18_light')
def rn34_unet(num_classes, num_channels=3, pretrained=True):
return unet.resnet_unet(seg_classes=num_classes, backbone_arch='resnet34')
def rn34_unet_dropout(num_classes, num_channels=3, pretrained=True):
return unet.resnet_unet_dropout(seg_classes=num_classes, backbone_arch='resnet34')
def rn18_unet(num_classes, num_channels=3, pretrained=True):
return unet.resnet_unet(seg_classes=num_classes, backbone_arch='resnet18')
def rx101_unet(num_classes, num_channels=3, pretrained=True):
return unet.resnet_unet(seg_classes=num_classes, backbone_arch='resnext101')
def effnet_b0_unet(num_classes, num_channels=3, pretrained=True):
return unet.effnet_unet(seg_classes=num_classes, backbone_arch='efficientnet_b0')
| 37.175676
| 90
| 0.814976
| 405
| 2,751
| 5.165432
| 0.111111
| 0.172084
| 0.111855
| 0.180688
| 0.889101
| 0.889101
| 0.889101
| 0.854685
| 0.780115
| 0.764818
| 0
| 0.041385
| 0.086514
| 2,751
| 73
| 91
| 37.684932
| 0.791086
| 0
| 0
| 0
| 0
| 0
| 0.074518
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.486486
| false
| 0
| 0.027027
| 0.486486
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
4515bb6c06d9aab085a62b6d98d8a701839cfc02
| 4,844
|
py
|
Python
|
test/test_ridge.py
|
mspronesti/qlkit
|
2bb4dabcf88e63c54f7c57e2e80ad2ca77a04b40
|
[
"Apache-2.0"
] | 5
|
2021-12-26T15:45:00.000Z
|
2022-01-12T10:31:57.000Z
|
test/test_ridge.py
|
mspronesti/qlkit
|
2bb4dabcf88e63c54f7c57e2e80ad2ca77a04b40
|
[
"Apache-2.0"
] | null | null | null |
test/test_ridge.py
|
mspronesti/qlkit
|
2bb4dabcf88e63c54f7c57e2e80ad2ca77a04b40
|
[
"Apache-2.0"
] | 2
|
2022-01-28T22:05:50.000Z
|
2022-02-27T18:50:33.000Z
|
import numpy as np
import pytest
from sklearn.datasets import make_regression
from sklearn.preprocessing import MinMaxScaler
from qlearnkit.algorithms import QRidgeRegressor
from qiskit import Aer
from qiskit.utils import QuantumInstance, algorithm_globals
from qiskit.circuit.library import PauliFeatureMap, ZZFeatureMap
seed = 42
algorithm_globals.random_seed = seed
sv_quantum_instance = QuantumInstance(
Aer.get_backend("aer_simulator_statevector"),
seed_simulator=algorithm_globals.random_seed,
seed_transpiler=algorithm_globals.random_seed,
optimization_level=1
)
qasm_quantum_instance = QuantumInstance(
Aer.get_backend("aer_simulator"),
shots=100,
seed_simulator=algorithm_globals.random_seed,
seed_transpiler=algorithm_globals.random_seed,
optimization_level=1
)
def test_ridge_sv(
quantum_instance=sv_quantum_instance,
quantum_instance_type='statevector',
n_samples=40,
n_features=2,
n_test_pts=10,
random_state=0
):
# Test ridge regression
rng = np.random.RandomState(random_state)
mms = MinMaxScaler()
X, y = make_regression(n_features=n_features,
n_samples=n_samples,
noise=1,
random_state=seed)
X = mms.fit_transform(X)
y_target = y[:n_test_pts]
encoding_map = PauliFeatureMap(n_features)
ridge = QRidgeRegressor(
gamma=1e-3,
quantum_instance=quantum_instance,
encoding_map=encoding_map,
)
ridge.fit(X, y)
epsilon = 1e-6 * (2 * rng.rand(1, n_features) - 1)
score = ridge.score(X[:n_test_pts] + epsilon,y_target)
np.testing.assert_(score >= 0.8, f"Test failed with {quantum_instance_type}.\n"
f"Expected score >= 80%, but it was {score}")
def test_ridge_qasm(
quantum_instance=qasm_quantum_instance,
quantum_instance_type='qasm',
n_samples=40,
n_features=2,
n_test_pts=10,
random_state=0
):
# Test ridge regression
rng = np.random.RandomState(random_state)
mms = MinMaxScaler()
X, y = make_regression(n_features=n_features,
n_samples=n_samples,
noise=1,
random_state=seed)
X = mms.fit_transform(X)
y_target = y[:n_test_pts]
encoding_map = PauliFeatureMap(n_features)
ridge = QRidgeRegressor(
gamma=2.5,
quantum_instance=quantum_instance,
encoding_map=encoding_map,
)
ridge.fit(X, y)
epsilon = 1e-6 * (2 * rng.rand(1, n_features) - 1)
score = ridge.score(X[:n_test_pts] + epsilon,y_target)
np.testing.assert_(score >= 0.8, f"Test failed with {quantum_instance_type}.\n"
f"Expected score >= 80%, but it was {score}")
def test_change_kernel(
quantum_instance=sv_quantum_instance,
quantum_instance_type='statevector',
n_samples=40,
n_features=2,
n_test_pts=10,
random_state=0
):
# Test ridge regression
rng = np.random.RandomState(random_state)
mms = MinMaxScaler()
X, y = make_regression(n_features=n_features,
n_samples=n_samples,
noise=1,
random_state=seed)
X = mms.fit_transform(X)
y_target = y[:n_test_pts]
encoding_map = PauliFeatureMap(n_features)
ridge = QRidgeRegressor(
gamma=1e-3,
)
ridge.quantum_instance = quantum_instance
ridge.encoding_map = encoding_map
ridge.fit(X, y)
epsilon = 1e-6 * (2 * rng.rand(1, n_features) - 1)
score = ridge.score(X[:n_test_pts] + epsilon,y_target)
np.testing.assert_(score >= 0.8, f"Test failed with {quantum_instance_type}.\n"
f"Expected score >= 80%, but it was {score}")
def test_change_gamma(
quantum_instance=sv_quantum_instance,
quantum_instance_type='statevector',
n_samples=40,
n_features=2,
n_test_pts=10,
random_state=0
):
# Test ridge regression
rng = np.random.RandomState(random_state)
mms = MinMaxScaler()
X, y = make_regression(n_features=n_features,
n_samples=n_samples,
noise=1,
random_state=seed)
X = mms.fit_transform(X)
y_target = y[:n_test_pts]
encoding_map = PauliFeatureMap(n_features)
ridge = QRidgeRegressor(
quantum_instance=quantum_instance,
encoding_map=encoding_map,
)
ridge.gamma = 10e-3
ridge.fit(X, y)
epsilon = 1e-6 * (2 * rng.rand(1, n_features) - 1)
score = ridge.score(X[:n_test_pts] + epsilon,y_target)
np.testing.assert_(score >= 0.8, f"Test failed with {quantum_instance_type}.\n"
f"Expected score >= 80%, but it was {score}")
| 29.357576
| 83
| 0.640999
| 617
| 4,844
| 4.755267
| 0.142626
| 0.132924
| 0.03272
| 0.0818
| 0.857192
| 0.835378
| 0.835378
| 0.835378
| 0.797887
| 0.778459
| 0
| 0.022459
| 0.264657
| 4,844
| 164
| 84
| 29.536585
| 0.801235
| 0.01796
| 0
| 0.763359
| 0
| 0
| 0.08649
| 0.027146
| 0
| 0
| 0
| 0
| 0.030534
| 1
| 0.030534
| false
| 0
| 0.061069
| 0
| 0.091603
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18ade12dc32951b3256daf18ea4731d8a7aa00d9
| 227,630
|
py
|
Python
|
kubernetes/client/apis/apps_v1beta1_api.py
|
jraby/kubernetes-client-python
|
e6e7b710d0b15fbde686bc9dccf00da5951bef84
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/apis/apps_v1beta1_api.py
|
jraby/kubernetes-client-python
|
e6e7b710d0b15fbde686bc9dccf00da5951bef84
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/apis/apps_v1beta1_api.py
|
jraby/kubernetes-client-python
|
e6e7b710d0b15fbde686bc9dccf00da5951bef84
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AppsV1beta1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_namespaced_controller_revision(self, namespace, body, **kwargs):
"""
create a ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_controller_revision(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1ControllerRevision body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_controller_revision_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_controller_revision_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_controller_revision_with_http_info(self, namespace, body, **kwargs):
"""
create a ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_controller_revision_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1ControllerRevision body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_controller_revision`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ControllerRevision',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_deployment(self, namespace, body, **kwargs):
"""
create a Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_deployment(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Deployment body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_deployment_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_deployment_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_deployment_with_http_info(self, namespace, body, **kwargs):
"""
create a Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_deployment_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Deployment body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_deployment`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_deployment_rollback_rollback(self, name, namespace, body, **kwargs):
"""
create rollback of a DeploymentRollback
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_deployment_rollback_rollback(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the DeploymentRollback (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1DeploymentRollback body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1DeploymentRollback
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_deployment_rollback_rollback_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.create_namespaced_deployment_rollback_rollback_with_http_info(name, namespace, body, **kwargs)
return data
def create_namespaced_deployment_rollback_rollback_with_http_info(self, name, namespace, body, **kwargs):
"""
create rollback of a DeploymentRollback
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_deployment_rollback_rollback_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the DeploymentRollback (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1DeploymentRollback body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1DeploymentRollback
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_deployment_rollback_rollback" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `create_namespaced_deployment_rollback_rollback`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_deployment_rollback_rollback`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_deployment_rollback_rollback`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/rollback'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1DeploymentRollback',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_namespaced_stateful_set(self, namespace, body, **kwargs):
"""
create a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_stateful_set(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_namespaced_stateful_set_with_http_info(namespace, body, **kwargs)
else:
(data) = self.create_namespaced_stateful_set_with_http_info(namespace, body, **kwargs)
return data
def create_namespaced_stateful_set_with_http_info(self, namespace, body, **kwargs):
"""
create a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_namespaced_stateful_set_with_http_info(namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `create_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_controller_revision(self, namespace, **kwargs):
"""
delete collection of ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_controller_revision(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_controller_revision_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_controller_revision_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_controller_revision_with_http_info(self, namespace, **kwargs):
"""
delete collection of ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_controller_revision_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_deployment(self, namespace, **kwargs):
"""
delete collection of Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_deployment(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_deployment_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_deployment_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_deployment_with_http_info(self, namespace, **kwargs):
"""
delete collection of Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_deployment_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collection_namespaced_stateful_set(self, namespace, **kwargs):
"""
delete collection of StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_stateful_set(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_collection_namespaced_stateful_set_with_http_info(namespace, **kwargs)
else:
(data) = self.delete_collection_namespaced_stateful_set_with_http_info(namespace, **kwargs)
return data
def delete_collection_namespaced_stateful_set_with_http_info(self, namespace, **kwargs):
"""
delete collection of StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_collection_namespaced_stateful_set_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collection_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_collection_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_controller_revision(self, name, namespace, body, **kwargs):
"""
delete a ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_controller_revision(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_controller_revision_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_controller_revision_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_controller_revision_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_controller_revision_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_controller_revision`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_controller_revision`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_deployment(self, name, namespace, body, **kwargs):
"""
delete a Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_deployment(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_deployment_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_deployment_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_deployment_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_deployment_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_deployment`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_deployment`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_namespaced_stateful_set(self, name, namespace, body, **kwargs):
"""
delete a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_stateful_set(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.delete_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
return data
def delete_namespaced_stateful_set_with_http_info(self, name, namespace, body, **kwargs):
"""
delete a StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_namespaced_stateful_set_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1DeleteOptions body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy.
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty', 'grace_period_seconds', 'orphan_dependents', 'propagation_policy']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `delete_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `delete_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'grace_period_seconds' in params:
query_params['gracePeriodSeconds'] = params['grace_period_seconds']
if 'orphan_dependents' in params:
query_params['orphanDependents'] = params['orphan_dependents']
if 'propagation_policy' in params:
query_params['propagationPolicy'] = params['propagation_policy']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1Status',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/apps/v1beta1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_controller_revision_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_controller_revision_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ControllerRevisionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_controller_revision_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_controller_revision_for_all_namespaces_with_http_info(**kwargs)
return data
def list_controller_revision_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_controller_revision_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ControllerRevisionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_controller_revision_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/apps/v1beta1/controllerrevisions'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ControllerRevisionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_deployment_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_deployment_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: AppsV1beta1DeploymentList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_deployment_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_deployment_for_all_namespaces_with_http_info(**kwargs)
return data
def list_deployment_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_deployment_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: AppsV1beta1DeploymentList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_deployment_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/apps/v1beta1/deployments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1DeploymentList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_controller_revision(self, namespace, **kwargs):
"""
list or watch objects of kind ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_controller_revision(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ControllerRevisionList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_controller_revision_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_controller_revision_with_http_info(namespace, **kwargs)
return data
def list_namespaced_controller_revision_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_controller_revision_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1ControllerRevisionList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ControllerRevisionList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_deployment(self, namespace, **kwargs):
"""
list or watch objects of kind Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_deployment(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: AppsV1beta1DeploymentList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_deployment_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_deployment_with_http_info(namespace, **kwargs)
return data
def list_namespaced_deployment_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_deployment_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: AppsV1beta1DeploymentList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1DeploymentList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_namespaced_stateful_set(self, namespace, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_stateful_set(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_namespaced_stateful_set_with_http_info(namespace, **kwargs)
else:
(data) = self.list_namespaced_stateful_set_with_http_info(namespace, **kwargs)
return data
def list_namespaced_stateful_set_with_http_info(self, namespace, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_namespaced_stateful_set_with_http_info(namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['namespace', 'pretty', 'field_selector', 'include_uninitialized', 'label_selector', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `list_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets'.replace('{format}', 'json')
path_params = {}
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSetList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_stateful_set_for_all_namespaces(self, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_stateful_set_for_all_namespaces(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_stateful_set_for_all_namespaces_with_http_info(**kwargs)
else:
(data) = self.list_stateful_set_for_all_namespaces_with_http_info(**kwargs)
return data
def list_stateful_set_for_all_namespaces_with_http_info(self, **kwargs):
"""
list or watch objects of kind StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_stateful_set_for_all_namespaces_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.
:param bool include_uninitialized: If true, partially initialized resources are included in the response.
:param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.
:param str pretty: If 'true', then the output is pretty printed.
:param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.
:param int timeout_seconds: Timeout for the list/watch call.
:param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.
:return: V1beta1StatefulSetList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['field_selector', 'include_uninitialized', 'label_selector', 'pretty', 'resource_version', 'timeout_seconds', 'watch']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_stateful_set_for_all_namespaces" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/apps/v1beta1/statefulsets'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'field_selector' in params:
query_params['fieldSelector'] = params['field_selector']
if 'include_uninitialized' in params:
query_params['includeUninitialized'] = params['include_uninitialized']
if 'label_selector' in params:
query_params['labelSelector'] = params['label_selector']
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'resource_version' in params:
query_params['resourceVersion'] = params['resource_version']
if 'timeout_seconds' in params:
query_params['timeoutSeconds'] = params['timeout_seconds']
if 'watch' in params:
query_params['watch'] = params['watch']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf', 'application/json;stream=watch', 'application/vnd.kubernetes.protobuf;stream=watch'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSetList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_controller_revision(self, name, namespace, body, **kwargs):
"""
partially update the specified ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_controller_revision(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_controller_revision_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_controller_revision_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_controller_revision_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_controller_revision_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_controller_revision`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_controller_revision`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ControllerRevision',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_deployment(self, name, namespace, body, **kwargs):
"""
partially update the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_deployment(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_deployment_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_deployment_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_deployment_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_deployment_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_deployment`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_deployment`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_deployment_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_deployment_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_deployment_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_deployment_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_deployment_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_deployment_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_deployment_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_deployment_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_deployment_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_deployment_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_scale_scale(self, name, namespace, body, **kwargs):
"""
partially update scale of the specified Scale
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scale_scale(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Scale (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Scale
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_scale_scale_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_scale_scale_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_scale_scale_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update scale of the specified Scale
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_scale_scale_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Scale (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Scale
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_scale_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_scale_scale`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_scale_scale`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_scale_scale`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/scale'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Scale',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_stateful_set(self, name, namespace, body, **kwargs):
"""
partially update the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_stateful_set_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_namespaced_stateful_set_status(self, name, namespace, body, **kwargs):
"""
partially update status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
return data
def patch_namespaced_stateful_set_status_with_http_info(self, name, namespace, body, **kwargs):
"""
partially update status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.patch_namespaced_stateful_set_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param object body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_namespaced_stateful_set_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_namespaced_stateful_set_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `patch_namespaced_stateful_set_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_namespaced_stateful_set_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_controller_revision(self, name, namespace, **kwargs):
"""
read the specified ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_controller_revision(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_controller_revision_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_controller_revision_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_controller_revision_with_http_info(self, name, namespace, **kwargs):
"""
read the specified ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_controller_revision_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_controller_revision`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ControllerRevision',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_deployment(self, name, namespace, **kwargs):
"""
read the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_deployment(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_deployment_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_deployment_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_deployment_with_http_info(self, name, namespace, **kwargs):
"""
read the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_deployment_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_deployment`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_deployment_status(self, name, namespace, **kwargs):
"""
read status of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_deployment_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_deployment_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_deployment_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_deployment_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_deployment_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_deployment_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_deployment_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_deployment_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_scale_scale(self, name, namespace, **kwargs):
"""
read scale of the specified Scale
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scale_scale(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Scale (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Scale
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_scale_scale_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_scale_scale_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_scale_scale_with_http_info(self, name, namespace, **kwargs):
"""
read scale of the specified Scale
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_scale_scale_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Scale (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Scale
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_scale_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_scale_scale`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_scale_scale`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/scale'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Scale',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_stateful_set(self, name, namespace, **kwargs):
"""
read the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_stateful_set_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_stateful_set_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_stateful_set_with_http_info(self, name, namespace, **kwargs):
"""
read the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.
:param bool export: Should this value be exported. Export strips fields that a user can not specify.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty', 'exact', 'export']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
if 'exact' in params:
query_params['exact'] = params['exact']
if 'export' in params:
query_params['export'] = params['export']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def read_namespaced_stateful_set_status(self, name, namespace, **kwargs):
"""
read status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set_status(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.read_namespaced_stateful_set_status_with_http_info(name, namespace, **kwargs)
else:
(data) = self.read_namespaced_stateful_set_status_with_http_info(name, namespace, **kwargs)
return data
def read_namespaced_stateful_set_status_with_http_info(self, name, namespace, **kwargs):
"""
read status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.read_namespaced_stateful_set_status_with_http_info(name, namespace, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method read_namespaced_stateful_set_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `read_namespaced_stateful_set_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `read_namespaced_stateful_set_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_controller_revision(self, name, namespace, body, **kwargs):
"""
replace the specified ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_controller_revision(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1ControllerRevision body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_controller_revision_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_controller_revision_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_controller_revision_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified ControllerRevision
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_controller_revision_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the ControllerRevision (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1ControllerRevision body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1ControllerRevision
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_controller_revision" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_controller_revision`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_controller_revision`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_controller_revision`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/controllerrevisions/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1ControllerRevision',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_deployment(self, name, namespace, body, **kwargs):
"""
replace the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_deployment(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Deployment body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_deployment_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_deployment_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_deployment_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_deployment_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Deployment body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_deployment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_deployment`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_deployment`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_deployment`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_deployment_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_deployment_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Deployment body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_deployment_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_deployment_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_deployment_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_deployment_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Deployment (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Deployment body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Deployment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_deployment_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_deployment_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_deployment_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_deployment_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Deployment',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_scale_scale(self, name, namespace, body, **kwargs):
"""
replace scale of the specified Scale
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scale_scale(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Scale (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Scale body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Scale
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_scale_scale_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_scale_scale_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_scale_scale_with_http_info(self, name, namespace, body, **kwargs):
"""
replace scale of the specified Scale
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_scale_scale_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the Scale (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param AppsV1beta1Scale body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: AppsV1beta1Scale
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_scale_scale" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_scale_scale`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_scale_scale`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_scale_scale`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/deployments/{name}/scale'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AppsV1beta1Scale',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_stateful_set(self, name, namespace, body, **kwargs):
"""
replace the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_stateful_set_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_stateful_set_with_http_info(self, name, namespace, body, **kwargs):
"""
replace the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_stateful_set" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_stateful_set`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_stateful_set`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_stateful_set`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def replace_namespaced_stateful_set_status(self, name, namespace, body, **kwargs):
"""
replace status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set_status(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.replace_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_stateful_set_status_with_http_info(name, namespace, body, **kwargs)
return data
def replace_namespaced_stateful_set_status_with_http_info(self, name, namespace, body, **kwargs):
"""
replace status of the specified StatefulSet
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.replace_namespaced_stateful_set_status_with_http_info(name, namespace, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str name: name of the StatefulSet (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param V1beta1StatefulSet body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1StatefulSet
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'namespace', 'body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method replace_namespaced_stateful_set_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params) or (params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `replace_namespaced_stateful_set_status`")
# verify the required parameter 'namespace' is set
if ('namespace' not in params) or (params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `replace_namespaced_stateful_set_status`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `replace_namespaced_stateful_set_status`")
collection_formats = {}
resource_path = '/apis/apps/v1beta1/namespaces/{namespace}/statefulsets/{name}/status'.replace('{format}', 'json')
path_params = {}
if 'name' in params:
path_params['name'] = params['name']
if 'namespace' in params:
path_params['namespace'] = params['namespace']
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1beta1StatefulSet',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.887395
| 457
| 0.615552
| 24,421
| 227,630
| 5.556161
| 0.01339
| 0.041271
| 0.021225
| 0.018572
| 0.995718
| 0.994672
| 0.993124
| 0.991444
| 0.989314
| 0.987582
| 0
| 0.002167
| 0.304679
| 227,630
| 4,386
| 458
| 51.899225
| 0.855114
| 0.379559
| 0
| 0.862472
| 1
| 0
| 0.231511
| 0.086832
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03191
| false
| 0
| 0.003146
| 0
| 0.082697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
18d525962c9868b36ec931acfdb39613123861b9
| 12,191
|
py
|
Python
|
apis/nb/clients/task_service_client/AuditApi.py
|
CiscoDevNet/APIC-EM-Generic-Scripts-
|
74211d9488f1e77cf56ef86dba20ec8e8eb49cc1
|
[
"ECL-2.0",
"Apache-2.0"
] | 45
|
2016-06-09T15:41:25.000Z
|
2019-08-06T17:13:11.000Z
|
apis/nb/clients/task_service_client/AuditApi.py
|
CiscoDevNet/APIC-EM-Generic-Scripts
|
74211d9488f1e77cf56ef86dba20ec8e8eb49cc1
|
[
"ECL-2.0",
"Apache-2.0"
] | 36
|
2016-06-12T03:03:56.000Z
|
2017-03-13T18:20:11.000Z
|
apis/nb/clients/task_service_client/AuditApi.py
|
CiscoDevNet/APIC-EM-Generic-Scripts
|
74211d9488f1e77cf56ef86dba20ec8e8eb49cc1
|
[
"ECL-2.0",
"Apache-2.0"
] | 15
|
2016-06-22T03:51:37.000Z
|
2019-07-10T10:06:02.000Z
|
#!/usr/bin/env python
#pylint: skip-file
# This source code is licensed under the Apache license found in the
# LICENSE file in the root directory of this project.
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class AuditApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getAuditWithFilter(self, **kwargs):
"""Retrieve Audit by flexible search
Args:
auditRequestor, str: This is the user who triggered the event (required)
limit, str: This is the number of records fetched (required)
offset, str: This is the offset used for pagination (required)
auditRecordStartTime, str: This is the epoch start time from which audit records need to be fetched (required)
auditRecordEndTime, str: This is the epoch end time upto which audit records need to be fetched (required)
deviceIP, str: This is the device ip of the device (required)
siteName, str: This is the site name associated to the audit record (required)
deviceName, str: This is the device name assoicated to the audit (required)
applicationName, str: This is the applicaiton name that generated the audit (required)
tag, str: This is the tag defined for audit (required)
severity, str: This is the severity of the audit record (required)
Returns: ListAuditResourceDTOResponse
"""
allParams = ['auditRequestor', 'limit', 'offset', 'auditRecordStartTime', 'auditRecordEndTime', 'deviceIP', 'siteName', 'deviceName', 'applicationName', 'tag', 'severity']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getAuditWithFilter" % key)
params[key] = val
del params['kwargs']
resourcePath = '/audit'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('auditRequestor' in params):
queryParams['auditRequestor'] = self.apiClient.toPathValue(params['auditRequestor'])
if ('limit' in params):
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if ('offset' in params):
queryParams['offset'] = self.apiClient.toPathValue(params['offset'])
if ('auditRecordStartTime' in params):
queryParams['auditRecordStartTime'] = self.apiClient.toPathValue(params['auditRecordStartTime'])
if ('auditRecordEndTime' in params):
queryParams['auditRecordEndTime'] = self.apiClient.toPathValue(params['auditRecordEndTime'])
if ('deviceIP' in params):
queryParams['deviceIP'] = self.apiClient.toPathValue(params['deviceIP'])
if ('siteName' in params):
queryParams['siteName'] = self.apiClient.toPathValue(params['siteName'])
if ('deviceName' in params):
queryParams['deviceName'] = self.apiClient.toPathValue(params['deviceName'])
if ('applicationName' in params):
queryParams['applicationName'] = self.apiClient.toPathValue(params['applicationName'])
if ('tag' in params):
queryParams['tag'] = self.apiClient.toPathValue(params['tag'])
if ('severity' in params):
queryParams['severity'] = self.apiClient.toPathValue(params['severity'])
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'ListAuditResourceDTOResponse')
return responseObject
def getAuditCountWithFilter(self, **kwargs):
"""Retrieve Count of number of records to be fetched by flexible search
Args:
auditRequestor, str: This is the user who triggered the event (required)
auditRecordStartTime, str: This is the epoch start time from which audit records need to be fetched (required)
auditRecordEndTime, str: This is the epoch end time upto which audit records need to be fetched (required)
deviceIP, str: This is the device ip of the device (required)
siteName, str: This is the site name associated to the audit record (required)
deviceName, str: This is the device name assoicated to the audit (required)
applicationName, str: This is the applicaiton name that generated the audit (required)
tag, str: This is the tag defined for audit (required)
severity, str: This is the severity of the audit record (required)
Returns: SuccessResult
"""
allParams = ['auditRequestor', 'auditRecordStartTime', 'auditRecordEndTime', 'deviceIP', 'siteName', 'deviceName', 'applicationName', 'tag', 'severity']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getAuditCountWithFilter" % key)
params[key] = val
del params['kwargs']
resourcePath = '/audit/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('auditRequestor' in params):
queryParams['auditRequestor'] = self.apiClient.toPathValue(params['auditRequestor'])
if ('auditRecordStartTime' in params):
queryParams['auditRecordStartTime'] = self.apiClient.toPathValue(params['auditRecordStartTime'])
if ('auditRecordEndTime' in params):
queryParams['auditRecordEndTime'] = self.apiClient.toPathValue(params['auditRecordEndTime'])
if ('deviceIP' in params):
queryParams['deviceIP'] = self.apiClient.toPathValue(params['deviceIP'])
if ('siteName' in params):
queryParams['siteName'] = self.apiClient.toPathValue(params['siteName'])
if ('deviceName' in params):
queryParams['deviceName'] = self.apiClient.toPathValue(params['deviceName'])
if ('applicationName' in params):
queryParams['applicationName'] = self.apiClient.toPathValue(params['applicationName'])
if ('tag' in params):
queryParams['tag'] = self.apiClient.toPathValue(params['tag'])
if ('severity' in params):
queryParams['severity'] = self.apiClient.toPathValue(params['severity'])
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'SuccessResult')
return responseObject
def downloadAuditLogs(self, **kwargs):
"""Download Audit logs to a file.
Args:
auditRequestor, str: This is the user who triggered the event (required)
auditRecordStartTime, str: This is the epoch start time from which audit records need to be fetched (required)
auditRecordEndTime, str: This is the epoch end time upto which audit records need to be fetched (required)
deviceIP, str: This is the device ip of the device (required)
siteName, str: This is the site name associated to the audit record (required)
deviceName, str: This is the device name assoicated to the audit (required)
applicationName, str: This is the applicaiton name that generated the audit (required)
tag, str: This is the tag defined for audit (required)
severity, str: This is the severity of the audit record (required)
Returns: TaskIdResult
"""
allParams = ['auditRequestor', 'auditRecordStartTime', 'auditRecordEndTime', 'deviceIP', 'siteName', 'deviceName', 'applicationName', 'tag', 'severity']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method downloadAuditLogs" % key)
params[key] = val
del params['kwargs']
resourcePath = '/audit/download'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('auditRequestor' in params):
queryParams['auditRequestor'] = self.apiClient.toPathValue(params['auditRequestor'])
if ('auditRecordStartTime' in params):
queryParams['auditRecordStartTime'] = self.apiClient.toPathValue(params['auditRecordStartTime'])
if ('auditRecordEndTime' in params):
queryParams['auditRecordEndTime'] = self.apiClient.toPathValue(params['auditRecordEndTime'])
if ('deviceIP' in params):
queryParams['deviceIP'] = self.apiClient.toPathValue(params['deviceIP'])
if ('siteName' in params):
queryParams['siteName'] = self.apiClient.toPathValue(params['siteName'])
if ('deviceName' in params):
queryParams['deviceName'] = self.apiClient.toPathValue(params['deviceName'])
if ('applicationName' in params):
queryParams['applicationName'] = self.apiClient.toPathValue(params['applicationName'])
if ('tag' in params):
queryParams['tag'] = self.apiClient.toPathValue(params['tag'])
if ('severity' in params):
queryParams['severity'] = self.apiClient.toPathValue(params['severity'])
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'TaskIdResult')
return responseObject
| 33.4
| 179
| 0.56919
| 1,072
| 12,191
| 6.469216
| 0.13153
| 0.069358
| 0.037635
| 0.05018
| 0.866186
| 0.866186
| 0.866186
| 0.866186
| 0.866186
| 0.845854
| 0
| 0
| 0.339595
| 12,191
| 364
| 180
| 33.491758
| 0.861384
| 0.253794
| 0
| 0.828571
| 0
| 0
| 0.220056
| 0.006177
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0
| 0.028571
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18e8e5326073ec9a19333017c6807d2ee570d54d
| 40,065
|
py
|
Python
|
src/systems/audio_systems.py
|
shgoren/viewmaker
|
d9a7d4b05ac5126fe348c8c5217877ebcff7e2d7
|
[
"MIT"
] | 29
|
2021-04-09T16:02:21.000Z
|
2022-03-08T13:04:45.000Z
|
src/systems/audio_systems.py
|
shgoren/viewmaker
|
d9a7d4b05ac5126fe348c8c5217877ebcff7e2d7
|
[
"MIT"
] | 2
|
2021-06-07T14:49:17.000Z
|
2021-12-11T17:39:01.000Z
|
src/systems/audio_systems.py
|
shgoren/viewmaker
|
d9a7d4b05ac5126fe348c8c5217877ebcff7e2d7
|
[
"MIT"
] | 9
|
2021-04-19T13:12:45.000Z
|
2022-03-07T20:50:28.000Z
|
"""
Try some simple SimCLR inspired audio adaptations. Audio augmentations
include cropping, noise, pitch, and speed. We should fit this on Librispeech.
"""
import os
import math
import random
import librosa
import numpy as np
from dotmap import DotMap
from itertools import chain
from sklearn.metrics import f1_score
from collections import OrderedDict
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader
import torchvision
from src.datasets.librispeech import LibriSpeech, LibriSpeechTwoViews, LibriSpeechTransfer
from src.datasets.voxceleb1 import VoxCeleb1
from src.datasets.audio_mnist import AudioMNIST
from src.datasets.google_speech import GoogleSpeechCommands
from src.datasets.fluent_speech import FluentSpeechCommands
from src.models.transfer import LogisticRegression
from src.models.resnet import resnet18
from src.models import resnet_small
from src.models.viewmaker import Viewmaker
from src.objectives.memory_bank import MemoryBank
from src.utils.utils import l2_normalize, frozen_params, free_params, load_json, compute_accuracy
from src.systems.image_systems import create_dataloader
from src.objectives.simclr import SimCLRObjective
from src.objectives.adversarial import AdversarialSimCLRLoss, AdversarialNCELoss
from src.objectives.infonce import NoiseConstrastiveEstimation
import pytorch_lightning as pl
class PretrainExpertInstDiscSystem(pl.LightningModule):
def __init__(self, config):
super().__init__()
self.config = config
self.batch_size = config.optim_params.batch_size
# self.device = f'cuda:{config.gpu_device}' if config.cuda else 'cpu'
self.train_dataset, self.val_dataset = self.create_datasets()
self.model = self.create_encoder()
self.memory_bank = MemoryBank(len(self.train_dataset),
self.config.model_params.out_dim)
self.train_ordered_labels = self.train_dataset.all_speaker_ids
def create_datasets(self):
print('Initializing train dataset.')
train_dataset = LibriSpeech(
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
small=self.config.data_params.small,
input_size=self.config.data_params.input_size,
)
print('Initializing validation dataset.')
val_dataset = LibriSpeech(
train=False,
spectral_transforms=False,
wavform_transforms=False,
small=self.config.data_params.small,
test_url=self.config.data_params.test_url,
input_size=self.config.data_params.input_size,
)
return train_dataset, val_dataset
def create_encoder(self):
if self.config.model_params.resnet_small:
encoder_model = resnet_small.ResNet18(
self.config.model_params.out_dim,
num_channels=1,
input_size=64,
)
else:
resnet_class = getattr(
torchvision.models,
self.config.model_params.resnet_version,
)
encoder_model = resnet_class(
pretrained=False,
num_classes=self.config.model_params.out_dim,
)
encoder_model.conv1 = nn.Conv2d(1, 64, kernel_size=7, stride=2,
padding=3, bias=False)
if self.config.model_params.projection_head:
mlp_dim = encoder_model.fc.weight.size(1)
encoder_model.fc = nn.Sequential(
nn.Linear(mlp_dim, mlp_dim),
nn.ReLU(),
encoder_model.fc,
)
return encoder_model
def configure_optimizers(self):
optim = torch.optim.SGD(self.model.parameters(),
lr=self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay)
return [optim], []
def forward(self, inputs):
return self.model(inputs)
def get_losses_for_batch(self, batch):
indices, inputs, _ = batch
outputs = self.forward(inputs)
loss_fn = NoiseConstrastiveEstimation(indices, outputs, self.memory_bank,
k=self.config.loss_params.k,
t=self.config.loss_params.t,
m=self.config.loss_params.m)
loss = loss_fn.get_loss()
with torch.no_grad():
new_data_memory = loss_fn.updated_new_data_memory()
self.memory_bank.update(indices, new_data_memory)
return loss
def training_step(self, batch, batch_idx):
loss = self.get_losses_for_batch(batch)
metrics = {'loss': loss}
return {'loss': loss, 'log': metrics}
def get_nearest_neighbor_label(self, embs, labels):
"""
NOTE: ONLY TO BE USED FOR VALIDATION.
For each example in validation, find the nearest example in the
training dataset using the memory bank. Assume its label as
the predicted label.
"""
all_dps = self.memory_bank.get_all_dot_products(embs)
_, neighbor_idxs = torch.topk(all_dps, k=1, sorted=False, dim=1)
neighbor_idxs = neighbor_idxs.squeeze(1)
neighbor_idxs = neighbor_idxs.cpu().numpy()
neighbor_labels = self.train_ordered_labels[neighbor_idxs]
neighbor_labels = torch.from_numpy(neighbor_labels).long()
num_correct = torch.sum(neighbor_labels.cpu() == labels.cpu()).item()
return num_correct, embs.size(0)
def validation_step(self, batch, batch_idx):
_, inputs, speaker_ids = batch
outputs = self.model(inputs)
num_correct, batch_size = self.get_nearest_neighbor_label(outputs, speaker_ids)
num_correct = torch.tensor(num_correct, dtype=float, device=self.device)
batch_size = torch.tensor(batch_size, dtype=float, device=self.device)
return OrderedDict({'val_num_correct': num_correct,
'val_num_total': batch_size})
def validation_epoch_end(self, outputs):
metrics = {}
for key in outputs[0].keys():
metrics[key] = torch.stack([elem[key] for elem in outputs]).mean()
num_correct = torch.stack([out['val_num_correct'] for out in outputs]).sum()
num_total = torch.stack([out['val_num_total'] for out in outputs]).sum()
val_acc = num_correct / float(num_total)
metrics['val_acc'] = val_acc
return {'log': metrics, 'val_acc': val_acc}
def train_dataloader(self):
return create_dataloader(self.train_dataset, self.config, self.batch_size)
def val_dataloader(self):
return create_dataloader(self.val_dataset, self.config, self.batch_size, shuffle=False)
class PretrainExpertSimCLRSystem(PretrainExpertInstDiscSystem):
def create_datasets(self):
train_dataset = LibriSpeechTwoViews(
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
small=self.config.data_params.small,
input_size=self.config.data_params.input_size,
)
val_dataset = LibriSpeech(
train=False,
spectral_transforms=False,
wavform_transforms=False,
small=self.config.data_params.small,
test_url=self.config.data_params.test_url,
input_size=self.config.data_params.input_size,
)
return train_dataset, val_dataset
def get_losses_for_batch(self, batch):
indices, inputs1, inputs2, _ = batch
outputs1 = self.forward(inputs1)
outputs2 = self.forward(inputs2)
loss_fn = SimCLRObjective(outputs1, outputs2,
t=self.config.loss_params.t)
loss = loss_fn.get_loss()
with torch.no_grad(): # for nearest neighbor
new_data_memory = (l2_normalize(outputs1, dim=1) +
l2_normalize(outputs2, dim=1)) / 2.
self.memory_bank.update(indices, new_data_memory)
return loss
class PretrainViewMakerInstDiscSystem(PretrainExpertInstDiscSystem):
"""
InstDisc + Viewmaker
"""
def __init__(self, config):
super().__init__(config)
self.view = self.create_viewmaker()
def create_datasets(self):
train_dataset = LibriSpeech(
train=True,
spectral_transforms=False,
wavform_transforms=False,
small=self.config.data_params.small,
input_size=self.config.data_params.input_size,
)
val_dataset = LibriSpeech(
train=False,
spectral_transforms=False,
wavform_transforms=False,
small=self.config.data_params.small,
test_url=self.config.data_params.test_url,
input_size=self.config.data_params.input_size,
)
return train_dataset, val_dataset
def create_viewmaker(self):
view_model = Viewmaker(
num_channels=1,
distortion_budget=self.config.model_params.view_bound_magnitude,
activation=self.config.model_params.generator_activation or 'relu',
num_res_blocks=self.config.model_params.num_res_blocks,
clamp=False,
)
return view_model
def configure_optimizers(self):
encoder_optim = torch.optim.SGD(
self.model.parameters(),
lr=self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay,
)
view_optim_name = self.config.optim_params.viewmaker_optim
view_parameters = self.view.parameters()
if view_optim_name == 'adam':
view_optim = torch.optim.Adam(view_parameters)
elif not view_optim_name or view_optim_name == 'sgd':
view_optim = torch.optim.SGD(
view_parameters,
lr=self.config.optim_params.viewmaker_learning_rate or self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay,
)
else:
raise ValueError(f'Optimizer {view_optim_name} not implemented')
return [encoder_optim, view_optim], []
def forward(self, batch):
indices, inputs, _ = batch
view = self.view(inputs)
if self.config.model_params.view_clip:
num_std = self.config.model_params.view_clip_num_std
tot_std = num_std * self.train_dataset.normalize_stdev
view_min = self.train_dataset.normalize_mean - tot_std
view_max = self.train_dataset.normalize_mean + tot_std
view = torch.clamp(view, view_min, view_max)
emb_dict = {
'indices': indices,
'view_embs': self.model(view),
}
return emb_dict
def get_losses_for_batch(self, emb_dict):
indices = emb_dict['indices']
outputs = emb_dict['view_embs']
loss_fn = AdversarialNCELoss(
indices, outputs, self.memory_bank,
k=self.config.loss_params.k,
t=self.config.loss_params.t,
m=self.config.loss_params.m,
view_maker_loss_weight=self.config.loss_params.view_maker_loss_weight,
)
encoder_loss, view_maker_loss = loss_fn.get_loss()
with torch.no_grad():
new_data_memory = loss_fn.updated_new_data_memory()
self.memory_bank.update(indices, new_data_memory)
return encoder_loss, view_maker_loss
def get_view_bound_magnitude(self):
return self.config.model_params.view_bound_magnitude
def training_step(self, batch, batch_idx, optimizer_idx):
emb_dict = self.forward(batch)
emb_dict['optimizer_idx'] = torch.tensor(optimizer_idx, device=self.device)
return emb_dict
def training_step_end(self, emb_dict):
encoder_loss, view_maker_loss = self.get_losses_for_batch(emb_dict)
# Handle Tensor (dp) and int (ddp) cases
if emb_dict['optimizer_idx'].__class__ == int or emb_dict['optimizer_idx'].dim() == 0:
optimizer_idx = emb_dict['optimizer_idx']
else:
optimizer_idx = emb_dict['optimizer_idx'][0]
if optimizer_idx == 0:
metrics = {
'encoder_loss': encoder_loss,
}
return {'loss': encoder_loss, 'log': metrics}
else:
# update the bound allowed for view
self.view.bound_magnitude = self.get_view_bound_magnitude()
metrics = {
'view_maker_loss': view_maker_loss,
# 'view_bound_magnitude': self.view.bound_magnitude,
}
return {'loss': view_maker_loss, 'log': metrics}
def validation_step(self, batch, batch_idx):
_, inputs, labels = batch
outputs = self.model(inputs)
num_correct, batch_size = self.get_nearest_neighbor_label(outputs, labels)
output = OrderedDict({
'val_num_correct': torch.tensor(num_correct, dtype=float, device=self.device),
'val_num_total': torch.tensor(batch_size, dtype=float, device=self.device),
})
return output
def validation_epoch_end(self, outputs):
metrics = {}
for key in outputs[0].keys():
metrics[key] = torch.stack([elem[key] for elem in outputs]).mean()
num_correct = torch.stack([out['val_num_correct'] for out in outputs]).sum()
num_total = torch.stack([out['val_num_total'] for out in outputs]).sum()
val_acc = num_correct / float(num_total)
metrics['val_acc'] = val_acc
progress_bar = {'acc': val_acc}
return {'log': metrics, 'val_acc': val_acc, 'progress_bar': progress_bar}
class PretrainViewMakerSimCLRSystem(PretrainExpertSimCLRSystem):
"""
SimCLR + ViewMaker with Linf/L1 constraints.
"""
def __init__(self, config):
super().__init__(config)
self.view = self.create_viewmaker()
def create_datasets(self):
train_dataset = LibriSpeechTwoViews(
train=True,
spectral_transforms=False,
wavform_transforms=False,
small=self.config.data_params.small,
input_size=self.config.data_params.input_size,
)
val_dataset = LibriSpeech(
train=False,
spectral_transforms=False,
wavform_transforms=False,
small=self.config.data_params.small,
test_url=self.config.data_params.test_url,
input_size=self.config.data_params.input_size,
)
return train_dataset, val_dataset
def create_viewmaker(self):
filter_size = self.train_dataset.FILTER_SIZE
view_model = Viewmaker(
num_channels=1,
distortion_budget=self.config.model_params.view_bound_magnitude,
activation=self.config.model_params.generator_activation or 'relu',
num_res_blocks=self.config.model_params.num_res_blocks,
clamp=False,
)
return view_model
def configure_optimizers(self):
encoder_optim = torch.optim.SGD(
self.model.parameters(),
lr=self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay,
)
view_optim_name = self.config.optim_params.viewmaker_optim
view_parameters = self.view.parameters()
if view_optim_name == 'adam':
view_optim = torch.optim.Adam(view_parameters)
elif not view_optim_name or view_optim_name == 'sgd':
view_optim = torch.optim.SGD(
view_parameters,
lr=self.config.optim_params.viewmaker_learning_rate or self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay,
)
else:
raise ValueError(f'Optimizer {view_optim_name} not implemented')
return [encoder_optim, view_optim], []
def forward(self, batch):
indices, inputs, inputs2, _ = batch
view1 = self.view(inputs)
view2 = self.view(inputs2)
if self.config.model_params.view_clip:
num_std = self.config.model_params.view_clip_num_std
tot_std = num_std * self.train_dataset.normalize_stdev
view_min = self.train_dataset.normalize_mean - tot_std
view_max = self.train_dataset.normalize_mean + tot_std
view1 = torch.clamp(view1, view_min, view_max)
view2 = torch.clamp(view2, view_min, view_max)
emb_dict = {
'indices': indices,
'view1_embs': self.model(view1),
'view2_embs': self.model(view2),
}
return emb_dict
def get_losses_for_batch(self, emb_dict):
loss_function = AdversarialSimCLRLoss(
embs1=emb_dict['view1_embs'],
embs2=emb_dict['view2_embs'],
t=self.config.loss_params.t,
view_maker_loss_weight=self.config.loss_params.view_maker_loss_weight
)
encoder_loss, view_maker_loss = loss_function.get_loss()
with torch.no_grad():
new_data_memory = l2_normalize(emb_dict['view1_embs'].detach(), dim=1)
self.memory_bank.update(emb_dict['indices'], new_data_memory)
return encoder_loss, view_maker_loss
def get_view_bound_magnitude(self):
if self.config.model_params.view_bound_linear_scale:
batch_size = self.config.optim_params.batch_size
num_epochs = self.config.num_epochs
num_steps = int(math.ceil(len(self.train_dataset) / batch_size)) * num_epochs
view_bound_max = self.config.model_params.view_bound_max
view_bound_min = self.config.model_params.view_bound_min
iter_incr = (view_bound_max - view_bound_min) / num_steps
return view_bound_min + self.global_step * iter_incr
else:
return self.config.model_params.view_bound_magnitude # constant
def training_step(self, batch, batch_idx, optimizer_idx):
emb_dict = self.forward(batch)
emb_dict['optimizer_idx'] = torch.tensor(optimizer_idx, device=self.device)
return emb_dict
def training_step_end(self, emb_dict):
encoder_loss, view_maker_loss = self.get_losses_for_batch(emb_dict)
# Handle Tensor (dp) and int (ddp) cases
if emb_dict['optimizer_idx'].__class__ == int or emb_dict['optimizer_idx'].dim() == 0:
optimizer_idx = emb_dict['optimizer_idx']
else:
optimizer_idx = emb_dict['optimizer_idx'][0]
if optimizer_idx == 0:
metrics = {
'encoder_loss': encoder_loss,
}
return {'loss': encoder_loss, 'log': metrics}
else:
# update the bound allowed for view
self.view.bound_magnitude = self.get_view_bound_magnitude()
metrics = {
'view_maker_loss': view_maker_loss,
# 'view_bound_magnitude': self.view.bound_magnitude,
}
return {'loss': view_maker_loss, 'log': metrics}
def validation_step(self, batch, batch_idx):
_, inputs, labels = batch
outputs = self.model(inputs)
num_correct, batch_size = self.get_nearest_neighbor_label(outputs, labels)
output = OrderedDict({
'val_num_correct': torch.tensor(num_correct, dtype=float, device=self.device),
'val_num_total': torch.tensor(batch_size, dtype=float, device=self.device),
})
return output
def validation_epoch_end(self, outputs):
metrics = {}
for key in outputs[0].keys():
metrics[key] = torch.stack([elem[key] for elem in outputs]).mean()
num_correct = torch.stack([out['val_num_correct'] for out in outputs]).sum()
num_total = torch.stack([out['val_num_total'] for out in outputs]).sum()
val_acc = num_correct / float(num_total)
metrics['val_acc'] = val_acc
progress_bar = {'acc': val_acc}
return {'log': metrics, 'val_acc': val_acc, 'progress_bar': progress_bar}
class BaseTransferExpertSystem(pl.LightningModule):
def __init__(self, config):
super().__init__()
self.config = config
self.batch_size = config.optim_params.batch_size
self.encoder, self.pretrain_config = self.load_pretrained_model()
resnet = self.pretrain_config.model_params.resnet_version
if resnet == 'resnet18':
if self.config.model_params.use_prepool:
if self.pretrain_config.model_params.resnet_small:
num_features = 512 * 4 * 4
else:
num_features = 512 * 2 * 2
else:
num_features = 512
elif resnet == 'resnet50':
if self.config.model_params.use_prepool:
num_features = 2048 * 4 * 4
else:
num_features = 2048
else:
raise Exception(f'resnet {resnet} not supported.')
if not self.pretrain_config.model_params.resnet_small:
if self.config.model_params.use_prepool:
cut_ix = -2
else:
cut_ix = -1
# keep pooling layer
self.encoder = nn.Sequential(*list(self.encoder.children())[:cut_ix])
self.encoder = self.encoder.eval()
frozen_params(self.encoder)
self.train_dataset, self.val_dataset = self.create_datasets()
self.num_features = num_features
self.model = self.create_model()
def load_pretrained_model(self):
base_dir = self.config.pretrain_model.exp_dir
checkpoint_name = self.config.pretrain_model.checkpoint_name
config_path = os.path.join(base_dir, 'config.json')
config_json = load_json(config_path)
config = DotMap(config_json)
SystemClass = globals()[config.system]
system = SystemClass(config)
checkpoint_file = os.path.join(base_dir, 'checkpoints', checkpoint_name)
checkpoint = torch.load(checkpoint_file, map_location=self.device)
system.load_state_dict(checkpoint['state_dict'])
encoder = system.model.eval()
for param in encoder.parameters():
param.requires_grad = False
return encoder, config
def train_dataloader(self):
return create_dataloader(self.train_dataset, self.config, self.batch_size)
def val_dataloader(self):
return create_dataloader(self.val_dataset, self.config, self.batch_size, shuffle=False)
class TransferExpertLibriSpeechSystem(BaseTransferExpertSystem):
def create_datasets(self):
train_dataset = LibriSpeechTransfer(
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = LibriSpeechTransfer(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
def create_model(self):
model = LogisticRegression(self.num_features, self.train_dataset.num_labels)
return model.to(self.device)
def configure_optimizers(self):
parameters = self.model.parameters()
if self.config.optim_params == 'adam':
optim = torch.optim.Adam(parameters)
else:
optim = torch.optim.SGD(
parameters,
lr=self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay,
)
return [optim], []
def forward(self, inputs):
batch_size = inputs.size(0)
if self.pretrain_config.model_params.resnet_small:
layer = 5 if self.config.model_params.use_prepool else 6
embs = self.encoder(inputs, layer=layer)
embs = F.avg_pool2d(embs, 2)
else:
embs = self.encoder(inputs)
embs = embs.view(batch_size, -1)
return self.model(embs)
def get_losses_for_batch(self, batch):
_, inputs, label = batch
logits = self.forward(inputs)
return F.cross_entropy(logits, label)
def get_accuracies_for_batch(self, batch):
_, inputs, label = batch
logits = self.forward(inputs)
outputs = F.log_softmax(logits, dim=1)
num_correct_top1, num_correct_top5 = compute_accuracy(outputs, label, topk=(1,5))
num_total = inputs.size(0)
return num_correct_top1, num_correct_top5, num_total
def training_step(self, batch, batch_idx):
loss = self.get_losses_for_batch(batch)
with torch.no_grad():
num_correct_top1, num_correct_top5, num_total = self.get_accuracies_for_batch(batch)
metrics = {
'train_loss': loss,
'train_num_correct_top1': num_correct_top1,
'train_num_correct_top5': num_correct_top5,
'train_num_total': num_total,
'train_top1': num_correct_top1 / float(num_total),
'train_top5': num_correct_top5 / float(num_total),
}
return {'loss': loss, 'log': metrics}
def validation_step(self, batch, batch_idx):
loss = self.get_losses_for_batch(batch)
num_correct_top1, num_correct_top5, num_total = self.get_accuracies_for_batch(batch)
return OrderedDict({
'val_loss': loss,
'val_num_correct_top1': num_correct_top1,
'val_num_correct_top5': num_correct_top5,
'val_num_total': num_total,
'val_top1': num_correct_top1 / float(num_total),
'val_top5': num_correct_top5 / float(num_total),
})
def validation_epoch_end(self, outputs):
metrics = {}
for key in outputs[0].keys():
metrics[key] = torch.tensor([elem[key] for elem in outputs]).float().mean()
num_correct_top1 = sum([out['val_num_correct_top1'] for out in outputs])
num_correct_top5 = sum([out['val_num_correct_top5'] for out in outputs])
num_total = sum([out['val_num_total'] for out in outputs])
val_top1 = num_correct_top1 / float(num_total)
val_top5 = num_correct_top5 / float(num_total)
metrics['val_top1'] = val_top1
metrics['val_top5'] = val_top5
return {'val_loss': metrics['val_loss'], 'log': metrics,
'val_top1': val_top1,'val_top5': val_top5}
class TransferExpertVoxCeleb1System(TransferExpertLibriSpeechSystem):
def create_datasets(self):
train_dataset = VoxCeleb1(
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = VoxCeleb1(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class TransferExpertAudioMNISTSystem(TransferExpertLibriSpeechSystem):
def create_datasets(self):
train_dataset = AudioMNIST(
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = AudioMNIST(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class TransferExpertGoogleSpeechCommandsSystem(TransferExpertLibriSpeechSystem):
def create_datasets(self):
train_dataset = GoogleSpeechCommands(
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = GoogleSpeechCommands(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class TransferExpertFluentSpeechCommandsSystem(TransferExpertLibriSpeechSystem):
def create_datasets(self):
train_dataset = FluentSpeechCommands(
self.config.data_params.caller_intent,
train=True,
spectral_transforms=self.config.data_params.spectral_transforms,
wavform_transforms=not self.config.data_params.spectral_transforms,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = FluentSpeechCommands(
self.config.data_params.caller_intent,
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class BaseTransferViewMakerSystem(pl.LightningModule):
def __init__(self, config):
super().__init__()
self.config = config
self.batch_size = config.optim_params.batch_size
self.encoder, self.viewmaker, self.system, self.pretrain_config = self.load_pretrained_model()
resnet = self.pretrain_config.model_params.resnet_version
if resnet == 'resnet18':
if self.config.model_params.use_prepool:
if self.pretrain_config.model_params.resnet_small:
num_features = 512 * 4 * 4
else:
num_features = 512 * 2 * 2
else:
num_features = 512
elif resnet == 'resnet50':
if self.config.model_params.use_prepool:
num_features = 2048 * 4 * 4
else:
num_features = 2048
else:
raise Exception(f'resnet {resnet} not supported.')
if not self.pretrain_config.model_params.resnet_small:
if self.config.model_params.use_prepool:
cut_ix = -2
else:
cut_ix = -1
self.encoder = nn.Sequential(*list(self.encoder.children())[:cut_ix])
self.encoder = self.encoder.eval()
frozen_params(self.encoder)
frozen_params(self.viewmaker)
self.num_features = num_features
self.train_dataset, self.val_dataset = self.create_datasets()
self.model = self.create_model()
def load_pretrained_model(self):
base_dir = self.config.pretrain_model.exp_dir
checkpoint_name = self.config.pretrain_model.checkpoint_name
config_path = os.path.join(base_dir, 'config.json')
config_json = load_json(config_path)
config = DotMap(config_json)
SystemClass = globals()[config.system]
system = SystemClass(config)
checkpoint_file = os.path.join(base_dir, 'checkpoints', checkpoint_name)
checkpoint = torch.load(checkpoint_file, map_location=self.device)
system.load_state_dict(checkpoint['state_dict'])
encoder = system.model.eval()
viewmaker = system.view.eval()
for param in encoder.parameters():
param.requires_grad = False
for param in viewmaker.parameters():
param.requires_grad = False
return encoder, viewmaker, system, system.config
def train_dataloader(self):
return create_dataloader(self.train_dataset, self.config, self.batch_size)
def val_dataloader(self):
return create_dataloader(self.val_dataset, self.config, self.batch_size, shuffle=False)
class TransferViewMakerLibriSpeechSystem(BaseTransferViewMakerSystem):
def create_datasets(self):
train_dataset = LibriSpeechTransfer(
train=True,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = LibriSpeechTransfer(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
def create_model(self):
model = LogisticRegression(self.num_features, self.train_dataset.num_labels)
return model.to(self.device)
def configure_optimizers(self):
parameters = self.model.parameters()
if self.config.optim_params == 'adam':
optim = torch.optim.Adam(parameters)
else:
optim = torch.optim.SGD(
parameters,
lr=self.config.optim_params.learning_rate,
momentum=self.config.optim_params.momentum,
weight_decay=self.config.optim_params.weight_decay,
)
return [optim], []
def forward(self, inputs, train=True):
batch_size = inputs.size(0)
if train:
inputs = self.viewmaker(inputs)
if self.pretrain_config.model_params.view_clip:
num_std = self.pretrain_config.model_params.view_clip_num_std
tot_std = num_std * self.train_dataset.normalize_stdev
view_min = self.train_dataset.normalize_mean - tot_std
view_max = self.train_dataset.normalize_mean + tot_std
inputs = torch.clamp(inputs, view_min, view_max)
if self.pretrain_config.model_params.resnet_small:
layer = 5 if self.config.model_params.use_prepool else 6
embs = self.encoder(inputs, layer=layer)
embs = F.avg_pool2d(embs, 2)
else:
embs = self.encoder(inputs)
embs = embs.view(batch_size, -1)
return self.model(embs)
def get_losses_for_batch(self, batch, train=True):
_, inputs, label = batch
logits = self.forward(inputs, train=train)
return F.cross_entropy(logits, label)
def get_accuracies_for_batch(self, batch, train=True):
_, inputs, label = batch
logits = self.forward(inputs, train=train)
outputs = F.log_softmax(logits, dim=1)
num_correct_top1, num_correct_top5 = compute_accuracy(outputs, label, topk=(1,5))
num_total = inputs.size(0)
return num_correct_top1, num_correct_top5, num_total
def training_step(self, batch, batch_idx):
loss = self.get_losses_for_batch(batch, train=True)
with torch.no_grad():
num_correct_top1, num_correct_top5, num_total = self.get_accuracies_for_batch(batch, train=True)
metrics = {
'train_loss': loss,
'train_num_correct_top1': num_correct_top1,
'train_num_correct_top5': num_correct_top5,
'train_num_total': num_total,
'train_top1': num_correct_top1 / float(num_total),
'train_top5': num_correct_top5 / float(num_total),
}
return {'loss': loss, 'log': metrics}
def validation_step(self, batch, batch_idx):
loss = self.get_losses_for_batch(batch, train=False)
num_correct_top1, num_correct_top5, num_total = self.get_accuracies_for_batch(batch, train=False)
return OrderedDict({
'val_loss': loss,
'val_num_correct_top1': num_correct_top1,
'val_num_correct_top5': num_correct_top5,
'val_num_total': num_total,
'val_top1': num_correct_top1 / float(num_total),
'val_top5': num_correct_top5 / float(num_total),
})
def validation_epoch_end(self, outputs):
metrics = {}
for key in outputs[0].keys():
metrics[key] = torch.tensor([elem[key] for elem in outputs]).float().mean()
num_correct_top1 = sum([out['val_num_correct_top1'] for out in outputs])
num_correct_top5 = sum([out['val_num_correct_top5'] for out in outputs])
num_total = sum([out['val_num_total'] for out in outputs])
val_top1 = num_correct_top1 / float(num_total)
val_top5 = num_correct_top5 / float(num_total)
metrics['val_top1'] = val_top1
metrics['val_top5'] = val_top5
return {'val_loss': metrics['val_loss'], 'log': metrics,
'val_top1': val_top1,'val_top5': val_top5}
class TransferViewMakerVoxCeleb1System(TransferViewMakerLibriSpeechSystem):
def create_datasets(self):
train_dataset = VoxCeleb1(
train=True,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = VoxCeleb1(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class TransferViewMakerAudioMNISTSystem(TransferViewMakerLibriSpeechSystem):
def create_datasets(self):
train_dataset = AudioMNIST(
train=True,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = AudioMNIST(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class TransferViewMakerGoogleSpeechCommandsSystem(TransferViewMakerLibriSpeechSystem):
def create_datasets(self):
train_dataset = GoogleSpeechCommands(
train=True,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = GoogleSpeechCommands(
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
class TransferViewMakerFluentSpeechCommandsSystem(TransferViewMakerLibriSpeechSystem):
def create_datasets(self):
train_dataset = FluentSpeechCommands(
self.config.data_params.caller_intent,
train=True,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
val_dataset = FluentSpeechCommands(
self.config.data_params.caller_intent,
train=False,
spectral_transforms=False,
wavform_transforms=False,
input_size=self.pretrain_config.data_params.input_size,
)
return train_dataset, val_dataset
| 39.511834
| 110
| 0.642606
| 4,611
| 40,065
| 5.281718
| 0.076556
| 0.050916
| 0.038105
| 0.031206
| 0.833415
| 0.82274
| 0.806233
| 0.79178
| 0.78057
| 0.77556
| 0
| 0.008083
| 0.271234
| 40,065
| 1,013
| 111
| 39.550839
| 0.826015
| 0.019094
| 0
| 0.755662
| 0
| 0
| 0.037068
| 0.002245
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083433
| false
| 0
| 0.035757
| 0.009535
| 0.219309
| 0.002384
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18f14181b324051e9dd51e24de62128954fe8657
| 2,469
|
py
|
Python
|
tests/kdump_test.py
|
sg893052/sonic-utilities
|
fdb79b8d65b8ca22232f4e6b140f593dd01613d5
|
[
"Apache-2.0"
] | 91
|
2016-03-23T14:24:41.000Z
|
2022-03-18T20:25:37.000Z
|
tests/kdump_test.py
|
sg893052/sonic-utilities
|
fdb79b8d65b8ca22232f4e6b140f593dd01613d5
|
[
"Apache-2.0"
] | 1,495
|
2017-02-15T10:49:10.000Z
|
2022-03-31T18:49:56.000Z
|
tests/kdump_test.py
|
sg893052/sonic-utilities
|
fdb79b8d65b8ca22232f4e6b140f593dd01613d5
|
[
"Apache-2.0"
] | 466
|
2016-04-25T09:31:23.000Z
|
2022-03-31T06:54:17.000Z
|
import importlib
from click.testing import CliRunner
from utilities_common.db import Db
class TestKdump(object):
@classmethod
def setup_class(cls):
print("SETUP")
def test_config_kdump_disable(self, get_cmd_module):
(config, show) = get_cmd_module
db = Db()
runner = CliRunner()
result = runner.invoke(config.config.commands["kdump"].commands["disable"], obj=db)
print(result.exit_code)
assert result.exit_code == 0
# Delete the 'KDUMP' table.
db.cfgdb.delete_table("KDUMP")
result = runner.invoke(config.config.commands["kdump"].commands["disable"], obj=db)
print(result.exit_code)
assert result.exit_code == 1
def test_config_kdump_enable(self, get_cmd_module):
(config, show) = get_cmd_module
db = Db()
runner = CliRunner()
result = runner.invoke(config.config.commands["kdump"].commands["enable"], obj=db)
print(result.exit_code)
assert result.exit_code == 0
# Delete the 'KDUMP' table.
db.cfgdb.delete_table("KDUMP")
result = runner.invoke(config.config.commands["kdump"].commands["enable"], obj=db)
print(result.exit_code)
assert result.exit_code == 1
def test_config_kdump_memory(self, get_cmd_module):
(config, show) = get_cmd_module
db = Db()
runner = CliRunner()
result = runner.invoke(config.config.commands["kdump"].commands["memory"], ["256MB"], obj=db)
print(result.exit_code)
assert result.exit_code == 0
# Delete the 'KDUMP' table.
db.cfgdb.delete_table("KDUMP")
result = runner.invoke(config.config.commands["kdump"].commands["memory"], ["256MB"], obj=db)
print(result.exit_code)
assert result.exit_code == 1
def test_config_kdump_num_dumps(self, get_cmd_module):
(config, show) = get_cmd_module
db = Db()
runner = CliRunner()
result = runner.invoke(config.config.commands["kdump"].commands["num_dumps"], ["10"], obj=db)
print(result.exit_code)
assert result.exit_code == 0
# Delete the 'KDUMP' table.
db.cfgdb.delete_table("KDUMP")
result = runner.invoke(config.config.commands["kdump"].commands["num_dumps"], ["10"], obj=db)
print(result.exit_code)
assert result.exit_code == 1
@classmethod
def teardown_class(cls):
print("TEARDOWN")
| 32.92
| 101
| 0.631835
| 305
| 2,469
| 4.937705
| 0.154098
| 0.106242
| 0.148738
| 0.12749
| 0.853918
| 0.853918
| 0.853918
| 0.853918
| 0.853918
| 0.853918
| 0
| 0.009569
| 0.238153
| 2,469
| 74
| 102
| 33.364865
| 0.791069
| 0.041717
| 0
| 0.777778
| 0
| 0
| 0.060568
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.111111
| false
| 0
| 0.055556
| 0
| 0.185185
| 0.185185
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e18cad35bbce8b2d736e9dad8db4452c7baff9f5
| 15,873
|
py
|
Python
|
workalendar/tests/test_canada.py
|
elaav/workalendar
|
61a120296b5992f6a3e86741a0773b42c1d1b4fa
|
[
"MIT"
] | null | null | null |
workalendar/tests/test_canada.py
|
elaav/workalendar
|
61a120296b5992f6a3e86741a0773b42c1d1b4fa
|
[
"MIT"
] | null | null | null |
workalendar/tests/test_canada.py
|
elaav/workalendar
|
61a120296b5992f6a3e86741a0773b42c1d1b4fa
|
[
"MIT"
] | null | null | null |
from datetime import date
from . import GenericCalendarTest
from ..core import MON
from ..america.canada import (
Canada, Ontario, Quebec, BritishColumbia, Alberta, Saskatchewan, Manitoba,
NewBrunswick, NovaScotia, PrinceEdwardIsland, Newfoundland, Yukon,
NorthwestTerritories, Nunavut
)
class CanadaTest(GenericCalendarTest):
cal_class = Canada
def test_holidays_2011(self):
holidays = self.cal.holidays_set(2011)
self.assertIn(date(2011, 1, 3), holidays)
self.assertIn(date(2011, 7, 1), holidays)
self.assertIn(date(2011, 9, 5), holidays)
self.assertIn(date(2011, 12, 26), holidays)
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays) # New years shift
self.assertIn(date(2012, 7, 2), holidays) # Canada day shift
self.assertIn(date(2012, 9, 3), holidays) # Labour day
self.assertIn(date(2012, 12, 25), holidays)
def test_holidays_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays)
self.assertNotIn(date(2013, 3, 29), holidays) # Good Friday not in QC
self.assertNotIn(date(2013, 4, 1), holidays) # Easter Monday QC only
self.assertIn(date(2013, 7, 1), holidays)
self.assertIn(date(2013, 9, 2), holidays)
self.assertIn(date(2013, 12, 25), holidays)
def test_holidays_2017(self):
holidays = self.cal.holidays_set(2017)
self.assertIn(date(2017, 1, 2), holidays)
class OntarioTest(GenericCalendarTest):
cal_class = Ontario
def test_holidays_2010(self):
holidays = self.cal.holidays_set(2010)
self.assertIn(date(2010, 12, 27), holidays) # Christmas day shift
self.assertIn(date(2010, 12, 28), holidays) # Boxing day shift
def test_holidays_2011(self):
holidays = self.cal.holidays_set(2011)
self.assertIn(date(2011, 1, 3), holidays)
self.assertIn(date(2011, 2, 21), holidays) # Family Day Ontario
self.assertIn(date(2011, 4, 22), holidays) # Good Friday
self.assertNotIn(date(2011, 4, 25), holidays) # Easter Monday
self.assertIn(date(2011, 5, 23), holidays) # Victoria Day
self.assertIn(date(2011, 7, 1), holidays) # Canada Day
self.assertIn(date(2011, 8, 1), holidays) # Civic holiday
self.assertIn(date(2011, 9, 5), holidays) # Labour Day
self.assertIn(date(2011, 10, 10), holidays) # Canadian Thanksgiving
self.assertIn(date(2011, 12, 26), holidays)
self.assertIn(date(2011, 12, 27), holidays) # Boxing day shift
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertIn(date(2012, 2, 20), holidays) # Family Day Ontario
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertIn(date(2012, 12, 26), holidays) # Boxing day
class QuebecTest(GenericCalendarTest):
cal_class = Quebec
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertNotIn(date(2012, 4, 6), holidays) # Good Friday
self.assertIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 6, 24), holidays) # St Jean Baptise
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
class BritishColumbiaTest(GenericCalendarTest):
cal_class = BritishColumbia
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
# Family Day BC was not set in 2012
self.assertNotIn(date(2012, 2, 13), holidays)
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 8, 6), holidays) # BC Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
def test_family_day(self):
# From 2013 to 2018, Family Day was on 2nd MON of February
for year in range(2013, 2019):
holidays = dict(self.cal.holidays(year))
day = self.cal.get_nth_weekday_in_month(year, 2, MON, 2)
self.assertIn(day, holidays)
self.assertEqual(holidays[day], "Family Day")
# As of 2019, it happens on 3rd MON of February
for year in (2019, 2020, 2021):
holidays = dict(self.cal.holidays(year))
day = self.cal.get_nth_weekday_in_month(year, 2, MON, 3)
self.assertIn(day, holidays)
self.assertEqual(holidays[day], "Family Day")
class AlbertaTest(GenericCalendarTest):
cal_class = Alberta
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertNotIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
class SaskatchewanTest(GenericCalendarTest):
cal_class = Saskatchewan
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 11, 12), holidays) # Remembrance Day (Shift)
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
class ManitobaTest(GenericCalendarTest):
cal_class = Manitoba
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertIn(date(2012, 2, 20), holidays) # Louis Riel Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertNotIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertNotIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
class NewBrunswickTest(GenericCalendarTest):
cal_class = NewBrunswick
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertNotIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertNotIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertNotIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertNotIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
class NovaScotiaTest(GenericCalendarTest):
cal_class = NovaScotia
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertNotIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertNotIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertNotIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertNotIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
def test_holidays_2015(self):
holidays = self.cal.holidays_set(2015)
self.assertIn(date(2015, 2, 16), holidays) # Viola Desmond day
class PrinceEdwardIslandTest(GenericCalendarTest):
cal_class = PrinceEdwardIsland
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertIn(date(2012, 2, 20), holidays) # Islander Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertNotIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertNotIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertNotIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
class NewfoundlandTest(GenericCalendarTest):
cal_class = Newfoundland
def test_holidays_2013(self):
holidays = self.cal.holidays_set(2013)
self.assertIn(date(2013, 1, 1), holidays)
self.assertIn(date(2013, 3, 29), holidays) # Good Friday
self.assertNotIn(date(2013, 4, 1), holidays) # Easter Monday
self.assertIn(date(2013, 7, 1), holidays)
self.assertIn(date(2013, 9, 2), holidays)
self.assertIn(date(2013, 12, 25), holidays)
class YukonTest(GenericCalendarTest):
cal_class = Yukon
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertNotIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertNotIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertIn(date(2012, 8, 20), holidays) # Discovery Day
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertNotIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
class NorthwestTerritoriesTest(GenericCalendarTest):
cal_class = NorthwestTerritories
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertNotIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertNotIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertIn(date(2012, 6, 21), holidays) # National Aboriginal Day
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
class NunavutTests(GenericCalendarTest):
cal_class = Nunavut
def test_holidays_2012(self):
holidays = self.cal.holidays_set(2012)
self.assertIn(date(2012, 1, 2), holidays)
self.assertNotIn(date(2012, 2, 20), holidays) # Family Day
self.assertIn(date(2012, 4, 6), holidays) # Good Friday
self.assertNotIn(date(2012, 4, 9), holidays) # Easter Monday
self.assertIn(date(2012, 5, 21), holidays) # Victoria Day
self.assertIn(date(2012, 7, 1), holidays) # Canada Day
self.assertIn(date(2012, 9, 3), holidays) # Labour Day
self.assertIn(date(2012, 7, 9), holidays) # Nunavut Day
self.assertNotIn(date(2012, 8, 6), holidays) # Civic Holiday
self.assertNotIn(date(2012, 6, 21), holidays) # Nat. Aboriginal Day
self.assertIn(date(2012, 10, 8), holidays) # Canadian Thanksgiving
self.assertIn(date(2012, 11, 11), holidays) # Remembrance Day
self.assertIn(date(2012, 11, 12), holidays) # Remembrance Day Shift
self.assertIn(date(2012, 12, 25), holidays) # Christmas day
self.assertNotIn(date(2012, 12, 26), holidays) # Boxing day
| 48.690184
| 79
| 0.649405
| 2,059
| 15,873
| 4.965517
| 0.064594
| 0.119718
| 0.215962
| 0.217136
| 0.859937
| 0.84419
| 0.808881
| 0.779245
| 0.768975
| 0.760172
| 0
| 0.118079
| 0.228501
| 15,873
| 325
| 80
| 48.84
| 0.716805
| 0.150255
| 0
| 0.738007
| 0
| 0
| 0.0015
| 0
| 0
| 0
| 0
| 0
| 0.693727
| 1
| 0.077491
| false
| 0
| 0.01476
| 0
| 0.195572
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bee2c36e6fbffdfd5f8b69562d41508750cea95c
| 46
|
py
|
Python
|
pronotepy/ent/__init__.py
|
sosordinet/pronotepy
|
4fc4dd5920af7f6c1b9121d31c87e6c681bdd4b9
|
[
"MIT"
] | null | null | null |
pronotepy/ent/__init__.py
|
sosordinet/pronotepy
|
4fc4dd5920af7f6c1b9121d31c87e6c681bdd4b9
|
[
"MIT"
] | null | null | null |
pronotepy/ent/__init__.py
|
sosordinet/pronotepy
|
4fc4dd5920af7f6c1b9121d31c87e6c681bdd4b9
|
[
"MIT"
] | null | null | null |
from .ent import *
from .complex_ent import *
| 15.333333
| 26
| 0.73913
| 7
| 46
| 4.714286
| 0.571429
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 27
| 23
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8310403ade3db5515080420ef81f0bcb421b266b
| 15,874
|
py
|
Python
|
tests/ut/python/dataset/test_minddataset_exception.py
|
PowerOlive/mindspore
|
bda20724a94113cedd12c3ed9083141012da1f15
|
[
"Apache-2.0"
] | 1
|
2022-03-05T02:59:21.000Z
|
2022-03-05T02:59:21.000Z
|
tests/ut/python/dataset/test_minddataset_exception.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/python/dataset/test_minddataset_exception.py
|
zimo-geek/mindspore
|
665ec683d4af85c71b2a1f0d6829356f2bc0e1ff
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright 2019-2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
import pytest
import mindspore.dataset as ds
from mindspore.mindrecord import FileWriter
def create_cv_mindrecord(file_name, files_num):
"""tutorial for cv dataset writer."""
if os.path.exists(file_name):
os.remove(file_name)
if os.path.exists("{}.db".format(file_name)):
os.remove("{}.db".format(file_name))
writer = FileWriter(file_name, files_num)
cv_schema_json = {"file_name": {"type": "string"},
"label": {"type": "int32"}, "data": {"type": "bytes"}}
data = [{"file_name": "001.jpg", "label": 43,
"data": bytes('0xffsafdafda', encoding='utf-8')}]
writer.add_schema(cv_schema_json, "img_schema")
writer.add_index(["file_name", "label"])
writer.write_raw_data(data)
writer.commit()
def create_diff_schema_cv_mindrecord(file_name, files_num):
"""tutorial for cv dataset writer."""
if os.path.exists(file_name):
os.remove(file_name)
if os.path.exists("{}.db".format(file_name)):
os.remove("{}.db".format(file_name))
writer = FileWriter(file_name, files_num)
cv_schema_json = {"file_name_1": {"type": "string"},
"label": {"type": "int32"}, "data": {"type": "bytes"}}
data = [{"file_name_1": "001.jpg", "label": 43,
"data": bytes('0xffsafdafda', encoding='utf-8')}]
writer.add_schema(cv_schema_json, "img_schema")
writer.add_index(["file_name_1", "label"])
writer.write_raw_data(data)
writer.commit()
def create_diff_page_size_cv_mindrecord(file_name, files_num):
"""tutorial for cv dataset writer."""
if os.path.exists(file_name):
os.remove(file_name)
if os.path.exists("{}.db".format(file_name)):
os.remove("{}.db".format(file_name))
writer = FileWriter(file_name, files_num)
writer.set_page_size(1 << 26) # 64MB
cv_schema_json = {"file_name": {"type": "string"},
"label": {"type": "int32"}, "data": {"type": "bytes"}}
data = [{"file_name": "001.jpg", "label": 43,
"data": bytes('0xffsafdafda', encoding='utf-8')}]
writer.add_schema(cv_schema_json, "img_schema")
writer.add_index(["file_name", "label"])
writer.write_raw_data(data)
writer.commit()
def test_cv_lack_json():
"""tutorial for cv minderdataset."""
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "file_name", "label"]
num_readers = 4
with pytest.raises(Exception):
ds.MindDataset(file_name, "no_exist.json",
columns_list, num_readers)
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_cv_lack_mindrecord():
"""tutorial for cv minderdataset."""
columns_list = ["data", "file_name", "label"]
num_readers = 4
with pytest.raises(Exception, match="does not exist or permission denied"):
_ = ds.MindDataset("no_exist.mindrecord", columns_list, num_readers)
def test_invalid_mindrecord():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
with open(file_name, 'w') as f:
f.write('just for test')
columns_list = ["data", "file_name", "label"]
num_readers = 4
with pytest.raises(RuntimeError, match="Unexpected error. Invalid file, the size of mindrecord file header "
"is larger than the upper limit."):
data_set = ds.MindDataset(file_name, columns_list, num_readers)
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
pass
os.remove(file_name)
def test_minddataset_lack_db():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
os.remove("{}.db".format(file_name))
columns_list = ["data", "file_name", "label"]
num_readers = 4
with pytest.raises(RuntimeError, match="Invalid file, failed to open mindrecord meta files "
"while verifying meta file. Please check the meta file:"):
data_set = ds.MindDataset(file_name, columns_list, num_readers)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1
os.remove(file_name)
def test_cv_minddataset_pk_sample_error_class_column():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "file_name", "label"]
num_readers = 4
sampler = ds.PKSampler(5, None, True, 'no_exist_column')
with pytest.raises(RuntimeError, match="Invalid data, 'class_column': no_exist_column can not found "
"in fields of mindrecord files. Please check 'class_column' in PKSampler"):
data_set = ds.MindDataset(
file_name, columns_list, num_readers, sampler=sampler)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_cv_minddataset_pk_sample_exclusive_shuffle():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "file_name", "label"]
num_readers = 4
sampler = ds.PKSampler(2)
with pytest.raises(Exception, match="sampler and shuffle cannot be specified at the same time."):
data_set = ds.MindDataset(file_name, columns_list, num_readers,
sampler=sampler, shuffle=False)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_cv_minddataset_reader_different_schema():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
file_name_1 = file_name + '_1'
create_cv_mindrecord(file_name, 1)
create_diff_schema_cv_mindrecord(file_name_1, 1)
columns_list = ["data", "label"]
num_readers = 4
with pytest.raises(RuntimeError, match="Invalid file, the metadata of mindrecord file: "
"test_cv_minddataset_reader_different_schema_1 is different from others, "
"please make sure all the mindrecord files generated by the same script."):
data_set = ds.MindDataset([file_name, file_name_1], columns_list,
num_readers)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
os.remove(file_name)
os.remove("{}.db".format(file_name))
os.remove(file_name_1)
os.remove("{}.db".format(file_name_1))
def test_cv_minddataset_reader_different_page_size():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
file_name_1 = file_name + '_1'
create_cv_mindrecord(file_name, 1)
create_diff_page_size_cv_mindrecord(file_name_1, 1)
columns_list = ["data", "label"]
num_readers = 4
with pytest.raises(RuntimeError, match="Invalid file, the metadata of mindrecord file: " \
"test_cv_minddataset_reader_different_page_size_1 is different " \
"from others, please make sure all " \
"the mindrecord files generated by the same script."):
data_set = ds.MindDataset([file_name, file_name_1], columns_list,
num_readers)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
os.remove(file_name)
os.remove("{}.db".format(file_name))
os.remove(file_name_1)
os.remove("{}.db".format(file_name_1))
def test_minddataset_invalidate_num_shards():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "label"]
num_readers = 4
with pytest.raises(Exception) as error_info:
data_set = ds.MindDataset(
file_name, columns_list, num_readers, True, 1, 2)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
try:
assert 'Input shard_id is not within the required interval of [0, 0].' in str(
error_info.value)
except Exception as error:
os.remove(file_name)
os.remove("{}.db".format(file_name))
raise error
else:
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_minddataset_invalidate_shard_id():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "label"]
num_readers = 4
with pytest.raises(Exception) as error_info:
data_set = ds.MindDataset(
file_name, columns_list, num_readers, True, 1, -1)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
try:
assert 'Input shard_id is not within the required interval of [0, 0].' in str(
error_info.value)
except Exception as error:
os.remove(file_name)
os.remove("{}.db".format(file_name))
raise error
else:
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_minddataset_shard_id_bigger_than_num_shard():
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "label"]
num_readers = 4
with pytest.raises(Exception) as error_info:
data_set = ds.MindDataset(
file_name, columns_list, num_readers, True, 2, 2)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
try:
assert 'Input shard_id is not within the required interval of [0, 1].' in str(
error_info.value)
except Exception as error:
os.remove(file_name)
os.remove("{}.db".format(file_name))
raise error
with pytest.raises(Exception) as error_info:
data_set = ds.MindDataset(
file_name, columns_list, num_readers, True, 2, 5)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
try:
assert 'Input shard_id is not within the required interval of [0, 1].' in str(
error_info.value)
except Exception as error:
os.remove(file_name)
os.remove("{}.db".format(file_name))
raise error
else:
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_cv_minddataset_partition_num_samples_equals_0():
"""tutorial for cv minddataset."""
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "label"]
num_readers = 4
def partitions(num_shards):
for partition_id in range(num_shards):
data_set = ds.MindDataset(file_name, columns_list, num_readers,
num_shards=num_shards,
shard_id=partition_id, num_samples=-1)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1):
num_iter += 1
with pytest.raises(ValueError) as error_info:
partitions(5)
try:
assert 'num_samples exceeds the boundary between 0 and 9223372036854775807(INT64_MAX)' in str(
error_info.value)
except Exception as error:
os.remove(file_name)
os.remove("{}.db".format(file_name))
raise error
else:
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_mindrecord_exception():
"""tutorial for exception scenario of minderdataset + map would print error info."""
def exception_func(item):
raise Exception("Error occur!")
file_name = os.environ.get('PYTEST_CURRENT_TEST').split(':')[-1].split(' ')[0]
create_cv_mindrecord(file_name, 1)
columns_list = ["data", "file_name", "label"]
with pytest.raises(RuntimeError, match="The corresponding data files"):
data_set = ds.MindDataset(file_name, columns_list, shuffle=False)
data_set = data_set.map(operations=exception_func, input_columns=["data"],
num_parallel_workers=1)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1
with pytest.raises(RuntimeError, match="The corresponding data files"):
data_set = ds.MindDataset(file_name, columns_list, shuffle=False)
data_set = data_set.map(operations=exception_func, input_columns=["file_name"],
num_parallel_workers=1)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1
with pytest.raises(RuntimeError, match="The corresponding data files"):
data_set = ds.MindDataset(file_name, columns_list, shuffle=False)
data_set = data_set.map(operations=exception_func, input_columns=["label"],
num_parallel_workers=1)
num_iter = 0
for _ in data_set.create_dict_iterator(num_epochs=1, output_numpy=True):
num_iter += 1
os.remove(file_name)
os.remove("{}.db".format(file_name))
def test_shuffle_with_num_samples_exception():
"""
Feature: shuffle files or shuffle samples of each file
Description: set Shuffle.FILES or Shuffle.INFILE and num_samples
Expectation: exception occurred
"""
MIND_DIR = "../data/mindrecord/testMindDataSet/testImageNetData/imagenet.mindrecord0"
with pytest.raises(ValueError, match="'Shuffle.FILES' or 'Shuffle.INFILE' and 'num_samples' "
"cannot be specified at the same time."):
_ = ds.MindDataset(MIND_DIR, shuffle=ds.Shuffle.FILES, num_samples=5)
with pytest.raises(ValueError, match="'Shuffle.FILES' or 'Shuffle.INFILE' and 'num_samples' "
"cannot be specified at the same time."):
_ = ds.MindDataset(MIND_DIR, shuffle=ds.Shuffle.INFILE, num_samples=5)
if __name__ == '__main__':
test_cv_lack_json()
test_cv_lack_mindrecord()
test_invalid_mindrecord()
test_minddataset_lack_db()
test_cv_minddataset_pk_sample_error_class_column()
test_cv_minddataset_pk_sample_exclusive_shuffle()
test_cv_minddataset_reader_different_schema()
test_cv_minddataset_reader_different_page_size()
test_minddataset_invalidate_num_shards()
test_minddataset_invalidate_shard_id()
test_minddataset_shard_id_bigger_than_num_shard()
test_cv_minddataset_partition_num_samples_equals_0()
test_mindrecord_exception()
| 42.106101
| 118
| 0.643694
| 2,096
| 15,874
| 4.583015
| 0.117844
| 0.099105
| 0.036436
| 0.039975
| 0.804601
| 0.787425
| 0.772434
| 0.753383
| 0.723923
| 0.721424
| 0
| 0.016415
| 0.232456
| 15,874
| 376
| 119
| 42.218085
| 0.771996
| 0.068603
| 0
| 0.71987
| 0
| 0
| 0.167562
| 0.013255
| 0
| 0
| 0
| 0
| 0.016287
| 1
| 0.061889
| false
| 0.003257
| 0.013029
| 0
| 0.074919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8356052bca8d4987212fbd86dae4120a961fb232
| 756
|
py
|
Python
|
regressions/python/ac1.py
|
muchang/z3test
|
e3e7739f98b7aa85427fcb8a39a4c675132a896e
|
[
"MIT"
] | 23
|
2015-04-20T08:51:00.000Z
|
2021-11-15T12:20:59.000Z
|
regressions/python/ac1.py
|
muchang/z3test
|
e3e7739f98b7aa85427fcb8a39a4c675132a896e
|
[
"MIT"
] | 18
|
2016-03-02T15:17:42.000Z
|
2021-12-16T22:10:05.000Z
|
regressions/python/ac1.py
|
muchang/z3test
|
e3e7739f98b7aa85427fcb8a39a4c675132a896e
|
[
"MIT"
] | 30
|
2015-05-30T15:29:17.000Z
|
2022-02-25T15:58:58.000Z
|
# Copyright (c) 2015 Microsoft Corporation
"""
Testing AC
>>> from z3 import *
>>> x, y = Reals('x y')
>>> 2 - (x - y)
2 - (x - y)
>>> 2 + (x - y)
2 + x - y
>>> 2 - (x + y)
2 - (x + y)
>>> 2 + (x + (y + y))
2 + x + y + y
>>> 2 - (x - (y - y))
2 - (x - (y - y))
>>> 2 + (x - (y + y))
2 + x - (y + y)
>>> x * (x * x)
x*x*x
>>> x/(y/y)
x/(y/y)
>>> x + -x
x + -x
>>> -(x + y)
-(x + y)
>>> x, y = BitVecs('x y', 16)
>>> 2 - (x - y)
2 - (x - y)
>>> 2 + (x - y)
2 + x - y
>>> 2 - (x + y)
2 - (x + y)
>>> 2 + (x + (y + y))
2 + x + y + y
>>> 2 - (x - (y - y))
2 - (x - (y - y))
>>> 2 + (x - (y + y))
2 + x - (y + y)
>>> x * (x * x)
x*x*x
>>> x/(y/y)
x/(y/y)
"""
if __name__ == "__main__":
import doctest
if doctest.testmod().failed:
exit(1)
| 14.823529
| 42
| 0.332011
| 148
| 756
| 1.641892
| 0.162162
| 0.279835
| 0.296296
| 0.378601
| 0.477366
| 0.477366
| 0.473251
| 0.473251
| 0.452675
| 0.452675
| 0
| 0.063366
| 0.332011
| 756
| 50
| 43
| 15.12
| 0.417822
| 0.851852
| 0
| 0
| 0
| 0
| 0.07767
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83671206cd2a0c095de9d776dfceaf90fae0aa7b
| 49,206
|
py
|
Python
|
Webportal/webportal.py
|
RisjioMaujio/Portal-Of-Programs
|
d618328021d7e6aef98c47d40aad3e073a16ad45
|
[
"MIT"
] | 1
|
2021-02-06T15:40:26.000Z
|
2021-02-06T15:40:26.000Z
|
Webportal/webportal.py
|
RisjioMaujio/Portal-Of-Programs
|
d618328021d7e6aef98c47d40aad3e073a16ad45
|
[
"MIT"
] | null | null | null |
Webportal/webportal.py
|
RisjioMaujio/Portal-Of-Programs
|
d618328021d7e6aef98c47d40aad3e073a16ad45
|
[
"MIT"
] | null | null | null |
import webbrowser
import datetime
import os
from tabulate import *
import csv
import pandas as pd
import sys
import os
navigator_symbol = "/"
if os.name == "nt":
navigator_symbol = "\\"
display=open(r"assets"+navigator_symbol+"website.txt","r")
s=display.read()
print(s)
display.close()
# commmand=input("enter: ")
# webbrowser.open('https://www.google.com/?#q=' + commmand)
# commmand=input("enter: ")
# webbrowser.open('https://www.bing.com/search?q=' + commmand)
# commmand=input("enter: ")
# webbrowser.open('https://www.youtube.com/results?search_query=' + commmand)
# commmand=input("enter: ")
# webbrowser.open('https://gaana.com/search/' + commmand)
def def_main():
display=open(r"assets"+navigator_symbol+"website.txt","r")
s=display.read()
print(s)
display.close()
while True:
displa=open(r"assets"+navigator_symbol+"category.txt","r")
s=displa.read()
print(s)
displa.close()
end_option = str(input("\tPlease Type The Category of Website Which You Want to Visit : ")).capitalize()
print("\n" * 3)
if(end_option=="Search"):
dis=open(r"assets"+navigator_symbol+"search.txt","r")
rp=dis.read()
print(rp)
search()
break
elif(end_option=="Social"):
dis=open(r"assets"+navigator_symbol+"social.txt","r")
rp=dis.read()
print(rp)
social()
break
elif(end_option=="Gservice"):
dis=open(r"assets"+navigator_symbol+"Gservices.txt","r")
rp=dis.read()
print(rp)
gservices()
break
elif(end_option=="Mservice"):
dis=open(r"assets"+navigator_symbol+"Mservices.txt","r")
rp=dis.read()
print(rp)
mservices()
break
elif(end_option=="Entertainment"):
dis=open(r"assets"+navigator_symbol+"entertainment.txt","r")
rp=dis.read()
print(rp)
entertainment()
break
elif(end_option=="Shooping"):
dis=open(r"assets"+navigator_symbol+"shooping.txt","r")
rp=dis.read()
print(rp)
shooping()
elif(end_option=="Fooding"):
dis=open(r"assets"+navigator_symbol+"fooding.txt","r")
rp=dis.read()
print(rp)
fooding()
elif(end_option=="Travelling"):
dis=open(r"assets"+navigator_symbol+"travelling.txt","r")
rp=dis.read()
print(rp)
travelling()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(end_option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def search():
while True:
option=str(input("\t Please Type Your Desired Search Engine Name : ")).capitalize()
print("\n" * 4)
if(option=="Google"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.google.com/?#q=')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.google.com/?#q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Bing"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.bing.com/search?q=')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Yahoo"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://in.yahoo.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://in.search.yahoo.com/search?p=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Ask"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.ask.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.ask.com/web?o=0&l=dir&qo=homepageSearchBox&q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def social():
while True:
option=str(input("\t Please Type Your Desired Social Website Name : ")).capitalize()
print("\n" * 4)
if(option=="Facebook"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.facebook.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand+'%20site:facebook.com&FORM=QBDCRD')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Instagram"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.instgram.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand+'%20site:instagram.com&FORM=QBDCRD')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Twitter"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.twitter.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://twitter.com/search?q='+commmand+'&src=typed_query')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inapprropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Blog"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.blogger.com/about/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.searchblogspot.com/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inapprropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Pinterest"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.pinterest.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.pinterest.com/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def shooping():
while True:
option=str(input("\t Please Type Your Desired Shooping Website Name : ")).capitalize()
print("\n" * 4)
if(option=="Amazon"):
print("\tOk You Have Selected " + str(option) + " as Your Shooping Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.amazon.in/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.amazon.in/s?k='+commmand+'&ref=nb_sb_noss_2')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Flipkart"):
print("\tOk You Have Selected " + str(option) + " as Your Shooping Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.flipkart.com/=')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.flipkart.com/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Ebay"):
print("\tOk You Have Selected " + str(option) + " as Your Shooping Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://pages.ebay.in/cod/cod_buyer.html')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.ebay.com/sch/i.html?_from=R40&_trksid=m570.l1313&_nkw=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Snapdeal"):
print("\tOk You Have Selected " + str(option) + " as Your Shooping Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.snapdeal.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.snapdeal.com/search?keyword=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def entertainment():
while True:
option=str(input("\t Please Type Your Desired Entertainmnet Website Name : ")).capitalize()
print("\n" * 4)
if(option=="Youtube"):
print("\tOk You Have Selected " + str(option) + " as Your Entertainmnet Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.youtube.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.youtube.com/results?search_query=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Hotstar"):
print("\tOk You Have Selected " + str(option) + " as Your Entertainmnet Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.hotstar.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.hotstar.com/in/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Ganna"):
print("\tOk You Have Selected " + str(option) + " as Your Entertainmnet Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://gaana.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://gaana.com/search/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Savaan"):
print("\tOk You Have Selected " + str(option) + " as Your Entertainmnet Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.jiosaavn.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.jiosaavn.com/search/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Prime"):
print("\tOk You Have Selected " + str(option) + " as Your Entertainmnet Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.primevideo.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.primevideo.com/ref=atv_sr_sug_5?_encoding=UTF8&phrase=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def travelling():
while True:
option=str(input("\t Please Type Your Desired Travelling Website Name : ")).capitalize()
print("\n" * 4)
if(option=="Railyatri"):
print("\tOk You Have Selected " + str(option) + " as Your Travelling Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.railyatri.in/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type The Correct Train Number To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.railyatri.in/time-table/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Maketrip"):
print("\tOk You Have Selected " + str(option) + " as Your Travelling Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.makemytrip.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
print("\t You Had Selected " + str(option) + " Which Is Offering on Site Search ")
print("\n" * 4)
print("\t\t\t\t" + " So " + str(option) + " is Now Redirecting.......")
webbrowser.open('https://www.makemytrip.com/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Cleartrip"):
print("\tOk You Have Selected " + str(option) + " as Your Travelling Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.cleartrip.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
print("\t You Had Selected " + str(option) + " Which Is Offering on Site Search ")
print("\n" * 4)
print("\t\t\t\t" + " So " + str(option) + " is Now Redirecting.......")
webbrowser.open('https://www.cleartrip.com/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Irctc"):
print("\tOk You Have Selected " + str(option) + " as Your Travelling Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.irctc.co.in/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type The Correct Train Number To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
print("\n" * 4)
print("\t\t\t\tOn " + str(option) + " Real Time Search Works So May Query You Given Not Responds So Redirecting.......")
webbrowser.open('https://www.irctc.co.in/nget/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def fooding():
while True:
option=str(input("\t Please Type Your Desired Fooding Website Name : ")).capitalize()
print("\n" * 4)
if(option=="Zomato"):
print("\tOk You Have Selected " + str(option) + " as Your Fooding Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.zomato.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t You Can Give Only 'Restuarnts' Query To Seacrh On As It is Real Time Search Website " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.zomato.com/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Swiggy"):
print("\tOk You Have Selected " + str(option) + " as Your Fooding Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.hotstar.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t You Can Give Only 'Restuarnts' Query To Seacrh On As It is Real Time Search Website " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.swiggy.com/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def gservices():
while True:
option=str(input("\t Please Type Your Desired Google Service Name : ")).capitalize()
print("\n" * 4)
if(option=="Account"):
print("\tOk You Have Selected " + str(option) + " as Your Google Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('http://accounts.google.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://support.google.com/accounts/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Drive"):
print("\tOk You Have Selected " + str(option) + " as Your Google Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://drive.google.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://drive.google.com/drive/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Gmail"):
print("\tOk You Have Selected " + str(option) + " as Your Google Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('http://mail.google.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://mail.google.com/mail/u/0/#search/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Maps"):
print("\tOk You Have Selected " + str(option) + " as Your Google Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://maps.google.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.google.com/maps/search/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Youtube"):
print("\tOk You Have Selected " + str(option) + " as Your Google Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.youtube.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.youtube.com/results?search_query=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def mservices():
while True:
option=str(input("\t Please Type Your Desired Microsoft Service Name : ")).capitalize()
print("\n" * 4)
if(option=="Account"):
print("\tOk You Have Selected " + str(option) + " as Your Microsoft Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://accounts.microsoft.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://docs.microsoft.com/en-us/search/?terms=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Onedrive"):
print("\tOk You Have Selected " + str(option) + " as Your Microsoft Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://onedrive.live.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://onedrive.live.com/?id=root&cid=C1094D34D4A160D6&qt=search&q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Outlook"):
print("\tOk You Have Selected " + str(option) + " as Your Microsoft Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://outlook.live.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://outlook.live.com/mail/0/search/' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Maps"):
print("\tOk You Have Selected " + str(option) + " as Your Microsoft Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.bing.com/maps/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand+'location')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Office"):
print("\tOk You Have Selected " + str(option) + " as Your Microsoft Service Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.office.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
print("\t You Had Selected " + str(option) + " Which Is Offering on Site Search ")
print("\n" * 4)
print("\t\t\t\t" + " So " + str(option) + " is Now Redirecting.......")
webbrowser.open('https://www.office.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def_main()
| 34.12344
| 151
| 0.565703
| 6,721
| 49,206
| 4.120518
| 0.033328
| 0.063046
| 0.062432
| 0.075395
| 0.946848
| 0.945331
| 0.927457
| 0.915433
| 0.907814
| 0.903517
| 0
| 0.009691
| 0.236638
| 49,206
| 1,441
| 152
| 34.14712
| 0.727597
| 0.007174
| 0
| 0.85906
| 0
| 0.002876
| 0.431157
| 0.001331
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008629
| false
| 0
| 0.00767
| 0
| 0.016299
| 0.521572
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
836902aba1a3bf832b49d30e30b054869d8df712
| 21,903
|
py
|
Python
|
pygs/test/integration_test/test_graph.py
|
ninowalker/graphserver
|
dc08070bc6e295986633cf510ca46a2f8d451b92
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-01-02T22:09:07.000Z
|
2016-05-09T04:48:13.000Z
|
pygs/test/integration_test/test_graph.py
|
ninowalker/graphserver
|
dc08070bc6e295986633cf510ca46a2f8d451b92
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
pygs/test/integration_test/test_graph.py
|
ninowalker/graphserver
|
dc08070bc6e295986633cf510ca46a2f8d451b92
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
# as of 2010-03-21, this file is out of date and in need of a lot of love
import csv
import unittest
from graphserver.core import Graph, Street, State, WalkOptions, Link, \
ServiceCalendar, Timezone, TimezonePeriod, \
TripBoard, Crossing, TripAlight
import time
class TestGraph(unittest.TestCase):
def test_get_route(self):
"Check it finds the route we expect"
g = Graph()
reader = csv.reader(open("../performance_test/map.csv"))
for wayid, fromv, tov, length in reader:
g.add_vertex( fromv )
g.add_vertex( tov )
g.add_edge( fromv, tov, Street( wayid, float(length) ) )
v85thStreet = "53184534"
vBeaconAve = "53072051"
idealVertices = ['53184534', '53193013', '69374666', '53193014', '69474340', '53185600', '53077802', '69474361', '53090673', '53193015', '53193016', '53193017', '53193018', '53189027', '53193019', '53193020', '53112767', '53193021', '69516594', '53132048', '69516588', '53095152', '53132049', '53239899', '53147269', '53138815', '69516553', '53138764', '53194375', '53185509', '53194376', '53144840', '53178633', '53178635', '53194364', '53125622', '53045160', '53194365', '53194366', '53194367', '53194368', '53185796', '53194369', '53086028', '90251330', '90251121', '30789993', '30789998', '31394282', '31393878', '29977892', '124205994', '31428350', '29545469', '94008501', '29545421', '29545417', '29545423', '29484769', '29484785', '29545373', '29979589', '30078988', '30079048', '244420183', '29979596', '29979598', '30230262', '30230264', '30279409', '30279408', '30230266', '30230273', '30230277', '30230281', '30230300', '30230506', '30231231', '30230962', '60878121', '53224639', '53210038', '53081902', '53052413', '53210039', '53224626', '53168444', '53224629', '53224632', '53208783', '53083017', '53083040', '53208784', '53187334', '53187337', '53089335', '53066732', '53208785', '53178012', '53208786', '53152490', '53183929', '53146692', '53146065', '53083086', '53083102', '53113957', '53113944', '53190685', '53203056', '53167007', '53129046', '53098715', '53208787', '53208788', '53180738', '53072051']
idealEdges = ['9112003-8', '6438432-0', '6438432-1', '6438432-2', '6438432-3', '6438432-4', '6438432-5', '6438432-6', '6438432-7', '6438432-8', '6438432-9', '6438432-10', '6438432-11', '6438432-12', '6438432-13', '6438432-14', '6438432-15', '6438432-16', '6438432-17', '6386686-0', '6386686-1', '6386686-2', '6497278-2', '6497278-3', '6497278-4', '6497278-5', '6497278-6', '6514850-51', '6439614-0', '6439614-1', '6439614-2', '6439614-3', '15255537-1', '6439607-0', '6439607-1', '6439607-2', '6439607-3', '6439607-4', '6439607-5', '6439607-6', '6439607-7', '6439607-8', '6439607-9', '6439607-10', '10497741-3', '10497743-3', '4709507-4', '4709507-5', '4709507-6', '4709507-7', '4709507-8', '4869151-0', '4869146-0', '4869146-1', '4869146-2', '4869146-3', '4869146-4', '4644156-0', '4722460-0', '4722460-1', '4722460-2', '4722460-3', '4722460-4', '4722460-5', '4722460-6', '14017470-0', '14017470-1', '5130429-0', '13866257-0', '13866256-0', '4748963-0', '4748962-0', '4748962-1', '15257844-0', '15257848-0', '15257848-1', '4743936-0', '4743934-0', '4743897-3', '4743897-4', '8116116-0', '6457969-20', '6457969-21', '6457969-22', '6476943-0', '6476943-1', '6476943-2', '6476943-3', '6476943-4', '6456455-20', '6456455-21', '6456455-22', '6456455-23', '6456455-24', '6456455-25', '6456455-26', '6456455-27', '6456455-28', '6456455-29', '6456455-30', '6456455-31', '6456455-32', '6456455-33', '6456455-34', '6456455-35', '6456455-36', '6456455-37', '6456455-38', '6456455-39', '6456455-40', '6456455-41', '6456455-42', '6456455-43', '6456455-44', '6456455-45', '6456455-46']
t0 = time.time()
spt = g.shortest_path_tree( v85thStreet, vBeaconAve, State(g.numagencies,0), WalkOptions() )
t1 = time.time()
print "time:", (t1-t0)*1000
vertices, edges = spt.path( vBeaconAve )
assert spt.get_vertex("53072051").payload.time == 31439
assert spt.get_vertex("53072051").payload.weight == 17311963
assert spt.get_vertex("53072051").payload.dist_walked == 26774.100248
assert( False not in [l==r for l,r in zip( [v.label for v in vertices], idealVertices )] )
assert( False not in [l==r for l,r in zip( [e.payload.name for e in edges], idealEdges )] )
vBallardAve = "53115442"
vLakeCityWay = "124175598"
idealVertices = ['53115442', '53115445', '53115446', '53227448', '53158020', '53105937', '53148458', '53077817', '53077819', '53077821', '53077823', '53077825', '60413953', '53097655', '60413955', '53196479', '53248412', '53245437', '53153886', '53181632', '53246786', '53078069', '53247761', '53129527', '53203543', '53248413', '53182343', '53156127', '53227471', '53240242', '53109739', '53248420', '53234775', '53170822', '53115167', '53209384', '53134650', '53142180', '53087702', '53184534', '53193013', '69374666', '53193014', '69474340', '53185600', '53077802', '69474361', '53090673', '53193015', '53193016', '53193017', '53193018', '53189027', '53193019', '53193020', '53112767', '53193021', '53183554', '53213063', '53197105', '53213061', '53090659', '53213059', '53157290', '53062869', '53213057', '53213055', '53213054', '53184527', '67507140', '67507145', '67507034', '67507151', '67507040', '67507158', '67507048', '67507166', '67507051', '67507176', '67507057', '67507126', '53233319', '53147253', '53233320', '53233321', '60002786', '60002787', '88468933', '53125662', '53195800', '88486410', '53228492', '88486425', '53215121', '88486457', '53199820', '53185765', '53233322', '53227223', '88486676', '53086030', '53086045', '53204778', '88486720', '53204762', '88486429', '53139133', '53139142', '88486453', '53072465', '30790081', '30790104', '53072467', '124181376', '30759113', '53072469', '53072472', '53072473', '53072475', '53072476', '53072477', '53072478', '124175598']
idealEdges = ['6372784-0', '6372784-1', '6480699-3', '6517019-4', '6517019-5', '6517019-6', '6517019-7', '6346366-0', '6346366-1', '6346366-2', '6346366-3', '10425981-2', '8072147-2', '8072147-3', '6441828-10', '22758990-0', '6511156-0', '6511156-1', '6511156-2', '6511156-3', '6511156-4', '6511156-5', '6511156-6', '6511156-7', '6511156-8', '6511156-9', '6511156-10', '6511156-11', '6511156-12', '6511156-13', '6511156-14', '9112003-0', '9112003-1', '9112003-2', '9112003-3', '9112003-4', '9112003-5', '9112003-6', '9112003-7', '9112003-8', '6438432-0', '6438432-1', '6438432-2', '6438432-3', '6438432-4', '6438432-5', '6438432-6', '6438432-7', '6438432-8', '6438432-9', '6438432-10', '6438432-11', '6438432-12', '6438432-13', '6438432-14', '6438432-15', '10425996-0', '10425996-1', '10425996-2', '10425996-3', '10425996-4', '10425996-5', '10425996-6', '10425996-7', '10425996-8', '10425996-9', '10425996-10', '10425996-11', '10425996-12', '9116336-2', '9116336-3', '9116346-1', '9116346-2', '9116346-3', '9116346-4', '9116346-5', '9116346-6', '9116346-7', '9116346-8', '9116346-9', '6488959-1', '6488959-2', '6488959-3', '6488959-4', '6488959-5', '6488959-6', '6488959-7', '6488959-8', '6488959-9', '6488959-10', '6488959-11', '6488959-12', '6488959-13', '6488959-14', '6488959-15', '6488959-16', '6488959-17', '6488959-18', '6488959-19', '6488959-20', '6488959-21', '6488959-22', '6488959-23', '6488959-24', '6488959-25', '6488959-26', '6488959-27', '6488959-28', '6488959-29', '6344932-0', '6344932-1', '6344932-2', '13514591-0', '13514602-0', '13514602-1', '13514602-2', '8591344-0', '8591344-1', '8591344-2', '8591344-3', '8591344-4', '8591344-5']
t0 = time.time()
spt = g.shortest_path_tree( vBallardAve, vLakeCityWay, State(g.numagencies,0), WalkOptions() )
t1 = time.time()
print "time: ", (t1-t0)*1000
vertices, edges = spt.path( vLakeCityWay )
assert spt.get_vertex("124175598").payload.time == 13684
assert spt.get_vertex("124175598").payload.weight == 190321
assert( False not in [l==r for l,r in zip( [v.label for v in vertices], idealVertices )] )
assert( False not in [l==r for l,r in zip( [e.payload.name for e in edges], idealEdges )] )
#one last time
vSandPointWay = "32096172"
vAirportWay = "60147448"
idealVertices = ['32096172', '60411560', '32096173', '32096176', '53110403', '32096177', '32096180', '53208261', '32096181', '60411559', '32096184', '53164136', '32096185', '32096190', '32096191', '32096194', '53123806', '32096196', '32096204', '53199337', '32096205', '32096208', '60411513', '32096209', '53040444', '32096212', '60411512', '53208255', '32096216', '53079385', '53079384', '32096219', '31192107', '31430499', '59948312', '31430457', '31430658', '29973173', '31430639', '29977895', '30012801', '31430516', '30012733', '29464742', '32271244', '31430321', '29464754', '31430318', '29973106', '31429815', '29464758', '31429758', '32103448', '60701659', '29464594', '29463661', '59677238', '59677231', '29463657', '29463479', '29449421', '29449412', '29545007', '29545373', '29979589', '30078988', '30079048', '244420183', '29979596', '29979598', '30230262', '30230264', '30279409', '30279408', '30230266', '30230273', '30230277', '30230281', '30230300', '30230506', '30231566', '30231379', '30230524', '30887745', '30887637', '30887631', '30887106', '60147424', '53131178', '53128410', '53131179', '53027159', '60147448']
idealEdges = ['4910430-0', '4910430-1', '4910417-0', '4910416-0', '4910416-1', '4910414-0', '4910413-0', '4910413-1', '4910412-0', '4910412-1', '4910410-0', '4910410-1', '4910408-0', '4910405-0', '4910405-1', '4910405-2', '4910405-3', '4910402-0', '4910399-0', '4910399-1', '4910397-0', '4910394-0', '4910394-1', '4910392-0', '4910392-1', '4910385-0', '4910385-1', '4910385-2', '4910385-3', '4910385-4', '4910385-5', '4910384-0', '4910384-1', '4869358-0', '4869358-1', '4869358-2', '4869358-3', '4869357-0', '4869357-1', '4869357-2', '4869357-3', '4869357-4', '4869357-5', '4636137-0', '4636137-1', '4636137-2', '4636137-3', '4636137-4', '4636137-5', '4636137-6', '4708973-0', '4708973-1', '4708973-2', '4708973-3', '4636201-0', '4708972-0', '4708972-1', '4708972-2', '4636105-0', '4636093-0', '4729956-0', '4644053-0', '4644064-0', '4722460-2', '4722460-3', '4722460-4', '4722460-5', '4722460-6', '14017470-0', '14017470-1', '5130429-0', '13866257-0', '13866256-0', '4748963-0', '4748962-0', '4748962-1', '15257844-0', '15257848-0', '15257848-1', '15257848-2', '15257848-3', '15257848-4', '4810339-0', '4810342-0', '4810342-1', '4810337-0', '4810290-0', '8044406-0', '15240328-7', '15240328-8', '15240328-9', '15240328-10']
spt = g.shortest_path_tree( vSandPointWay, vAirportWay, State(g.numagencies,0), WalkOptions() )
vertices, edges = spt.path( vAirportWay )
assert spt.get_vertex("60147448").payload.time == 21082
print spt.get_vertex("60147448").payload.weight
assert spt.get_vertex("60147448").payload.weight == 4079909
assert( False not in [l==r for l,r in zip( [v.label for v in vertices], idealVertices )] )
assert( False not in [l==r for l,r in zip( [e.payload.name for e in edges], idealEdges )] )
def test_get_route_retro(self):
"Check it finds the route we expect, in reverse"
g = Graph()
reader = csv.reader(open("../performance_test/map.csv"))
for wayid, fromv, tov, length in reader:
g.add_vertex( fromv )
g.add_vertex( tov )
g.add_edge( fromv, tov, Street( wayid, float(length) ) )
v85thStreet = "53184534"
vBeaconAve = "53072051"
idealVertices = ['53184534', '53193013', '69374666', '53193014', '69474340', '53185600', '53077802', '69474361', '53090673', '53193015', '53193016', '53193017', '53193018', '53189027', '53193019', '53193020', '53112767', '53193021', '69516594', '53132048', '69516588', '53095152', '53132049', '53239899', '53147269', '53138815', '69516553', '53138764', '53194375', '53185509', '53194376', '53144840', '53178633', '53178635', '53194364', '53125622', '53045160', '53194365', '53194366', '53194367', '53194368', '53185796', '53194369', '53086028', '90251330', '90251121', '30789993', '30789998', '31394282', '31393878', '29977892', '124205994', '31428350', '29545469', '29545479', '29545426', '29545421', '29545417', '29545423', '29484769', '29484785', '29545373', '29979589', '30078988', '30079048', '244420183', '29979596', '29979598', '30230262', '30230264', '30279409', '30279408', '30230266', '30230273', '30230277', '30230281', '30230300', '30230506', '30231231', '30230962', '60878121', '53224639', '53210038', '53081902', '53052413', '53210039', '53224626', '53168444', '53224629', '53224632', '53208783', '53083017', '53083040', '53208784', '53187334', '53187337', '53089335', '53066732', '53208785', '53178012', '53208786', '53152490', '53183929', '53146692', '53146065', '53083086', '53083102', '53113957', '53113944', '53190685', '53203056', '53167007', '53129046', '53098715', '53208787', '53208788', '53180738', '53072051']
idealEdges = ['9112003-8', '6438432-0', '6438432-1', '6438432-2', '6438432-3', '6438432-4', '6438432-5', '6438432-6', '6438432-7', '6438432-8', '6438432-9', '6438432-10', '6438432-11', '6438432-12', '6438432-13', '6438432-14', '6438432-15', '6438432-16', '6438432-17', '6386686-0', '6386686-1', '6386686-2', '6497278-2', '6497278-3', '6497278-4', '6497278-5', '6497278-6', '6514850-51', '6439614-0', '6439614-1', '6439614-2', '6439614-3', '15255537-1', '6439607-0', '6439607-1', '6439607-2', '6439607-3', '6439607-4', '6439607-5', '6439607-6', '6439607-7', '6439607-8', '6439607-9', '6439607-10', '10497741-3', '10497743-3', '4709507-4', '4709507-5', '4709507-6', '4709507-7', '4709507-8', '4869151-0', '4869146-0', '4644189-0', '4644192-0', '4644159-0', '4869146-3', '4869146-4', '4644156-0', '4722460-0', '4722460-1', '4722460-2', '4722460-3', '4722460-4', '4722460-5', '4722460-6', '14017470-0', '14017470-1', '5130429-0', '13866257-0', '13866256-0', '4748963-0', '4748962-0', '4748962-1', '15257844-0', '15257848-0', '15257848-1', '4743936-0', '4743934-0', '4743897-3', '4743897-4', '8116116-0', '6457969-20', '6457969-21', '6457969-22', '6476943-0', '6476943-1', '6476943-2', '6476943-3', '6476943-4', '6456455-20', '6456455-21', '6456455-22', '6456455-23', '6456455-24', '6456455-25', '6456455-26', '6456455-27', '6456455-28', '6456455-29', '6456455-30', '6456455-31', '6456455-32', '6456455-33', '6456455-34', '6456455-35', '6456455-36', '6456455-37', '6456455-38', '6456455-39', '6456455-40', '6456455-41', '6456455-42', '6456455-43', '6456455-44', '6456455-45', '6456455-46']
spt = g.shortest_path_tree_retro( v85thStreet, vBeaconAve, State(g.numagencies,31505), WalkOptions() )
vertices, edges = spt.path_retro( v85thStreet )
assert spt.get_vertex(v85thStreet).payload.time == 63
assert spt.get_vertex(v85thStreet).payload.weight == 17022003
assert [v.label for v in vertices] == idealVertices
assert [e.payload.name for e in edges] == idealEdges
vBallardAve = "53115442"
vLakeCityWay = "124175598"
idealVertices = ['53115442', '53115445', '53115446', '53227448', '53158020', '53105937', '53148458', '53077817', '53077819', '53077821', '53077823', '53077825', '53077826', '53077828', '53077830', '53077832', '53077833', '53153886', '53181632', '53246786', '53078069', '53247761', '53129527', '53203543', '53248413', '53182343', '53156127', '53227471', '53240242', '53109739', '53248420', '53234775', '53170822', '53115167', '53209384', '53134650', '53142180', '53087702', '53184534', '53193013', '69374666', '53193014', '69474340', '53185600', '53077802', '69474361', '53090673', '53193015', '53193016', '53193017', '53193018', '53189027', '53193019', '53193020', '53112767', '53193021', '53183554', '53213063', '53197105', '53213061', '53090659', '53213059', '53157290', '53062869', '53213057', '53213055', '53213054', '53184527', '67507140', '67507145', '67507034', '67507151', '67507040', '67507158', '53210973', '53147258', '53210974', '53210975', '60002793', '60002790', '60002789', '60002786', '60002787', '88468933', '53125662', '53195800', '88486410', '53228492', '88486425', '53215121', '88486457', '53199820', '53185765', '53233322', '53227223', '88486676', '53086030', '53086045', '53204778', '88486720', '53204762', '88486429', '53139133', '53139142', '88486453', '53072465', '30790081', '30790104', '53072467', '124181376', '30759113', '53072469', '53072472', '53072473', '53072475', '53072476', '53072477', '53072478', '124175598']
idealEdges = ['6372784-0', '6372784-1', '6480699-3', '6517019-4', '6517019-5', '6517019-6', '6517019-7', '6346366-0', '6346366-1', '6346366-2', '6346366-3', '6346366-4', '6346366-5', '6346366-6', '6346366-7', '6346366-8', '10379527-1', '6511156-2', '6511156-3', '6511156-4', '6511156-5', '6511156-6', '6511156-7', '6511156-8', '6511156-9', '6511156-10', '6511156-11', '6511156-12', '6511156-13', '6511156-14', '9112003-0', '9112003-1', '9112003-2', '9112003-3', '9112003-4', '9112003-5', '9112003-6', '9112003-7', '9112003-8', '6438432-0', '6438432-1', '6438432-2', '6438432-3', '6438432-4', '6438432-5', '6438432-6', '6438432-7', '6438432-8', '6438432-9', '6438432-10', '6438432-11', '6438432-12', '6438432-13', '6438432-14', '6438432-15', '10425996-0', '10425996-1', '10425996-2', '10425996-3', '10425996-4', '10425996-5', '10425996-6', '10425996-7', '10425996-8', '10425996-9', '10425996-10', '10425996-11', '10425996-12', '9116336-2', '9116336-3', '9116346-1', '9116346-2', '9116346-3', '6459254-1', '6459254-2', '6459254-3', '6459254-4', '6459254-5', '4794350-10', '4794350-11', '4794350-12', '6488959-6', '6488959-7', '6488959-8', '6488959-9', '6488959-10', '6488959-11', '6488959-12', '6488959-13', '6488959-14', '6488959-15', '6488959-16', '6488959-17', '6488959-18', '6488959-19', '6488959-20', '6488959-21', '6488959-22', '6488959-23', '6488959-24', '6488959-25', '6488959-26', '6488959-27', '6488959-28', '6488959-29', '6344932-0', '6344932-1', '6344932-2', '13514591-0', '13514602-0', '13514602-1', '13514602-2', '8591344-0', '8591344-1', '8591344-2', '8591344-3', '8591344-4', '8591344-5']
spt = g.shortest_path_tree_retro( vBallardAve, vLakeCityWay, State(g.numagencies,13684) )
vertices, edges = spt.path_retro( vBallardAve )
assert spt.get_vertex(vBallardAve).payload.time == -8
assert spt.get_vertex(vBallardAve).payload.weight == 196300
assert [v.label for v in vertices] == idealVertices
assert [e.payload.name for e in edges] == idealEdges
def test_hello_world(self):
g = Graph()
g.add_vertex( "Seattle" )
g.add_vertex( "Portland" )
g.add_edge( "Seattle", "Portland", Street("I-5 south", 5000) )
g.add_edge( "Portland", "Seattle", Street("I-5 north", 5500) )
spt = g.shortest_path_tree( "Seattle", "Portland", State(g.numagencies,0), WalkOptions() )
assert spt.get_vertex("Seattle").outgoing[0].payload.name == "I-5 south"
g.add_vertex( "Portland-busstop" )
g.add_vertex( "Seattle-busstop" )
g.add_edge( "Seattle", "Seattle-busstop", Link() )
g.add_edge( "Seattle-busstop", "Seattle", Link() )
g.add_edge( "Portland", "Portland-busstop", Link() )
g.add_edge( "Portland-busstop", "Portland", Link() )
spt = g.shortest_path_tree( "Seattle", "Seattle-busstop", State(g.numagencies,0), WalkOptions() )
assert spt.get_vertex("Seattle-busstop").incoming[0].payload.__class__ == Link
spt.destroy()
spt = g.shortest_path_tree( "Seattle-busstop", "Portland", State(g.numagencies,0), WalkOptions() )
assert spt.get_vertex("Portland").incoming[0].payload.__class__ == Street
spt.destroy()
sc = ServiceCalendar()
sc.add_period( 0, 86400, ["WKDY","SAT"] )
tz = Timezone()
tz.add_period( TimezonePeriod( 0, 86400, 0 ) )
g.add_vertex( "Portland-busstop-onbus" )
g.add_vertex( "Seattle-busstop-onbus" )
tb = TripBoard("WKDY", sc, tz, 0)
tb.add_boarding( "A", 10, 0 )
tb.add_boarding( "B", 15, 0 )
tb.add_boarding( "C", 400, 0 )
cr = Crossing()
al = TripAlight("WKDY", sc, tz, 0)
al.add_alighting( "A", 10+20, 0 )
al.add_alighting( "B", 15+20, 0 )
al.add_alighting( "C", 400+20, 0 )
g.add_edge( "Seattle-busstop", "Seattle-busstop-onbus", tb )
g.add_edge( "Seattle-busstop-onbus", "Portland-busstop-onbus", cr )
g.add_edge( "Portland-busstop-onbus", "Portland-busstop", al )
spt = g.shortest_path_tree( "Seattle", "Portland", State(g.numagencies,0), WalkOptions() )
assert spt.get_vertex( "Portland" ).incoming[0].from_v.incoming[0].from_v.incoming[0].from_v.incoming[0].from_v.incoming[0].from_v.label == "Seattle"
spt = g.shortest_path_tree( "Seattle", "Portland", State(g.numagencies,0), WalkOptions() )
vertices, edges = spt.path( "Portland" )
assert [v.label for v in vertices] == ['Seattle', 'Seattle-busstop', "Seattle-busstop-onbus", "Portland-busstop-onbus", 'Portland-busstop', 'Portland']
assert [e.payload.__class__ for e in edges] == [Link, TripBoard, Crossing, TripAlight, Link]
spt.destroy()
g.destroy()
if __name__=="__main__":
unittest.main()
| 117.128342
| 1,661
| 0.623796
| 2,580
| 21,903
| 5.255426
| 0.227132
| 0.006195
| 0.01416
| 0.019913
| 0.779114
| 0.742828
| 0.694446
| 0.692529
| 0.683384
| 0.678295
| 0
| 0.518835
| 0.151623
| 21,903
| 186
| 1,662
| 117.758065
| 0.210849
| 0.003835
| 0
| 0.333333
| 0
| 0
| 0.494454
| 0.010359
| 0
| 0
| 0
| 0
| 0.209302
| 0
| null | null | 0
| 0.031008
| null | null | 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55f0459607c51ee658cb3529827f958bde1eaa5e
| 5,030
|
py
|
Python
|
core_engine/utils/gcp/automl_train.py
|
arpitkjain7/synapse
|
cb4cf28351bde94f4ad7ecc5df0714cfe5d616c6
|
[
"Apache-2.0"
] | 2
|
2021-08-02T07:56:38.000Z
|
2022-02-23T04:27:31.000Z
|
core_engine/utils/gcp/automl_train.py
|
arpitkjain7/synapse
|
cb4cf28351bde94f4ad7ecc5df0714cfe5d616c6
|
[
"Apache-2.0"
] | null | null | null |
core_engine/utils/gcp/automl_train.py
|
arpitkjain7/synapse
|
cb4cf28351bde94f4ad7ecc5df0714cfe5d616c6
|
[
"Apache-2.0"
] | null | null | null |
from google.cloud import automl
# Sample variable values
# project_id = "us-gcp-ame-con-be2-npd-1"
# dataset_id = "TCN8344915572575698944"
# display_name = "decision_caller_api_model_v1"
client = automl.AutoMlClient()
def train_text_classification_model(
project_id: str, dataset_id: str, model_display_name: str, region: str
):
project_location = f"projects/{project_id}/locations/{region}"
metadata = automl.TextClassificationModelMetadata()
model = automl.Model(
display_name=model_display_name,
dataset_id=dataset_id,
text_classification_model_metadata=metadata,
)
response = client.create_model(parent=project_location, model=model)
return {
"operation_id": response.operation.name,
"dataset_id": dataset_id,
"status": "Training Started",
"project_id": project_id,
"region": region,
}
def train_ner_model(
project_id: str, dataset_id: str, model_display_name: str, region: str
):
# A resource that represents Google Cloud Platform location.
project_location = f"projects/{project_id}/locations/{region}"
# Leave model unset to use the default base model provided by Google
metadata = automl.TextExtractionModelMetadata()
model = automl.Model(
display_name=model_display_name,
dataset_id=dataset_id,
text_extraction_model_metadata=metadata,
)
# Create a model with the model metadata in the region.
response = client.create_model(parent=project_location, model=model)
return {
"operation_id": response.operation.name,
"dataset_id": dataset_id,
"status": "Training Started",
"project_id": project_id,
"region": region,
}
def train_image_classification_model(
project_id: str, dataset_id: str, model_display_name: str, region: str
):
project_location = f"projects/{project_id}/locations/{region}"
metadata = automl.ImageClassificationModelMetadata(
train_budget_milli_node_hours=24000
)
model = automl.Model(
display_name=model_display_name,
dataset_id=dataset_id,
image_classification_model_metadata=metadata,
)
# Create a model with the model metadata in the region.
response = client.create_model(parent=project_location, model=model)
return {
"operation_id": response.operation.name,
"dataset_id": dataset_id,
"status": "Training Started",
"project_id": project_id,
"region": region,
}
def train_image_classification_edge_model(
project_id: str,
dataset_id: str,
model_display_name: str,
region: str,
model_type: str = "mobile-versatile-1",
):
project_location = f"projects/{project_id}/locations/{region}"
metadata = automl.ImageClassificationModelMetadata(
train_budget_milli_node_hours=24000, model_type=model_type
)
model = automl.Model(
display_name=model_display_name,
dataset_id=dataset_id,
image_classification_model_metadata=metadata,
)
# Create a model with the model metadata in the region.
response = client.create_model(parent=project_location, model=model)
return {
"operation_id": response.operation.name,
"dataset_id": dataset_id,
"status": "Training Started",
"project_id": project_id,
"region": region,
}
def train_object_detection_model(
project_id: str, dataset_id: str, model_display_name: str, region: str
):
project_location = f"projects/{project_id}/locations/{region}"
metadata = automl.ImageClassificationModelMetadata(
train_budget_milli_node_hours=24000
)
model = automl.Model(
display_name=model_display_name,
dataset_id=dataset_id,
image_classification_model_metadata=metadata,
)
# Create a model with the model metadata in the region.
response = client.create_model(parent=project_location, model=model)
return {
"operation_id": response.operation.name,
"dataset_id": dataset_id,
"status": "Training Started",
"project_id": project_id,
"region": region,
}
def train_object_detection_edge_model(
project_id: str,
dataset_id: str,
model_display_name: str,
region: str,
model_type: str = "mobile-versatile-1",
):
project_location = f"projects/{project_id}/locations/{region}"
metadata = automl.ImageClassificationModelMetadata(
train_budget_milli_node_hours=24000, model_type=model_type
)
model = automl.Model(
display_name=model_display_name,
dataset_id=dataset_id,
image_classification_model_metadata=metadata,
)
# Create a model with the model metadata in the region.
response = client.create_model(parent=project_location, model=model)
return {
"operation_id": response.operation.name,
"dataset_id": dataset_id,
"status": "Training Started",
"project_id": project_id,
"region": region,
}
| 31.835443
| 74
| 0.694433
| 583
| 5,030
| 5.692967
| 0.137221
| 0.084061
| 0.086773
| 0.072311
| 0.87918
| 0.87918
| 0.87918
| 0.87918
| 0.864718
| 0.864718
| 0
| 0.011122
| 0.213519
| 5,030
| 157
| 75
| 32.038217
| 0.827856
| 0.107753
| 0
| 0.777778
| 0
| 0
| 0.142091
| 0.053619
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.007937
| 0
| 0.103175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36116bb3794848d039d3cf6419c8f8f09cb87318
| 2,320
|
py
|
Python
|
defach.py
|
IyXerXez72/ScDefachXerXez
|
ba820610f837c61901cc23b9d6e1f65b8e43da59
|
[
"Apache-2.0"
] | null | null | null |
defach.py
|
IyXerXez72/ScDefachXerXez
|
ba820610f837c61901cc23b9d6e1f65b8e43da59
|
[
"Apache-2.0"
] | null | null | null |
defach.py
|
IyXerXez72/ScDefachXerXez
|
ba820610f837c61901cc23b9d6e1f65b8e43da59
|
[
"Apache-2.0"
] | null | null | null |
#NORECODE OKEH
#MAU BISA DI DENCRIPT JUGA JAN DI RECODE DONG
#^-^ MAKASIH
import marshal,zlib,base64
exec(marshal.loads(zlib.decompress(base64.b32decode("PCOJ2VL3OPNEMEC7QGGWH7FW4PMESOU5DPNI5SKMGDDCNNULQGNNQLRGCDJMCZUSFCJXIZG5QHSOQQLVI6YNHZFP6QJ7KY7UJO2XWJ3QEB2KMD4EJ355XPL5NK2WOQX7C7C7WEDP72DQMQABORODLUBVUAI5AI2AUOQEPAKOACOQIZYC6SEQCPIJ7LSIBDUBDZJXITGS5YKOQU5A2EU7QDON3UBDMAMX2NAG7QJHBW2D5GAOAEFNCRI42YKIUA2DW4ZQV2JT2CL45DSI45BOT3CAHJ7SHJIL2KTD4B3MD3UCFUBF7CCBSLAALUDH2EOYBTUEWQCXIBPQM6Q36QK2BK5A34DDMC72FLIDXIG7AG5AN6Q2WA45BV4BV2BX4F6YAJON4A5WDBDBXAVFMGYDYDCWQ3A62YNQHIGO4DEDWVQ3APQM53PJICGTX3NU6M2E65DNB7J6IK27X4AJWDZJG4EW7RFL5U2JLTXL7WWZOH4ZJIRSA45XWLU77VGKGMHDB23GKXP3J76NLE6YBEQCJEZ3RR2UYCJDPBH6A7JEXFSVZLQO4GBWVFNV4PLVT6FIQEALRVHAGIBB6NAVPEPUOVKIAJP7XJDMFHWKOVJJ7VSQNHTDW6OSLJORHKLS5YXNWAR4TIMNJUEMTGEMIK3CPP7GIIBC5KG7CUVLAXTABOTKRFWT4WLJAVUE5WZIMNEQSTAQUTLJZDKUYKVOLS4BJUNWRCV6J7C6E6YD3QIVXIS6LWUSW2UM22OKWLUTAI7AGNJGNEILYNR4CJVXHBAXYVVJSVHGXJJ2PFNDU22W6H42I6O4VSI6HEY7JND2PZDJSPH6DJRHLXNHLCFQQXBXFBD7HNIKWDVJTWLI5KRHY7SPDS7UPGUXTJBIUV5GVGDPDESRPV4EUDVNO75HLKV5JK5EKYVDPSM32QN2KVFTXW32IZVUXNFETPXDC4WHRQO655WVVQTVKQQXU7AF266MJRN4YUGFVTFL7CFPY56JJ6DSB32IJNLMAWUFBVFYQDWPUXJS2H6LWTM7N7JWC2SWH42N2N35GJXHH62AZV3WVYFWPXIOBNOBNGAG6XETHD3NSCADTBINF2XB3RRNY3CEMMPPGJQRBT3QLEQ7DTTGXWHB7CAEGHITB42EEHI3HKNQBKKL3M2NSZJOBILLPO3NW5KT3Q2BYVWIHRCCQ3GDDHQEJELPI5SNQKUTPBPDSK6GF56HUTL6RVPWADO3QHP7K2GK6QOT7SD5SMY6K5IQ74MLOMVYEHSPMLDMUSB3RTBPCXTJEIY3MPM7IN3Q7M64X3LPOWXKJEKNDG5GCY23UAY5OCG2W2LRE7RHW667LHIUDM3S4ASGI4ORZ6O5YBTMDMGR5VD2DUKDS5I5WIG4BQMPHCHBKGJHJDNLWAPDTMV3QW7B7IDBIKEMUYL2EQE5F775MMZCGG4V76WEUSXSH37JOKEMP4K4GTN64N7DGO77ZPXUR3NHQ6FJ7FOFD6MMXJ73FUIXOGPHI2N2CYN3EOZDYJN3LX3ESMLPMLZ3CAF3Q7NO7CHJDCOBOERXYYJ4RNZSYVLYIP4WOPUTLNS2AZMUNEKY4ELWUGK4JYDEV4TJAMCROQKRBLYYX3BT2JUFKPX2LZ4FUOVWLMAX4BBENT4MIVUCLDGG4W2CSGHO65YKBEIXIWMC6DACZYD2EBOSPFGOQUPVAFVIRWDIHU5L2V2ZZMCYYV6DUNBLXWU4Z4FJXZZ35DH2CXXSLPUD7Y6CAZOD4N6BLWKVTHUFWHAXLP5Y75XXO537O57XWIMV4VHT4ZSZHFCP5JS5EPBOTWJOUCNJ2TH44S5VDI427VGL4VJA2B2Q5GOJJHUUZBYQL74NSOR6V6NTI2CVJG4YXU5HZVGEPGTNNZ2CR3OUMQ4BY4FPRPBBIFBQ4WRVWQNXF7UGMMPMOPLTYU2JFAOR7PM7MOX5UGX4NE62CKLF6TL5A4TZ4IX4BYLFTGTO7FBM4Q63XPDJZLYY73PWPRRY3NRZZHJRJZ3IZ43IRSZTXRWZDPR6ZD3RXYL7GX2GXDKYQDSOLXJ7COP3PRTI2HC4WJFHTV6HUOCRKJML76CEWTALV52W2VS3RRGMPZXQBCN4PAZMXRT2GC6RLACK6B5RBFOGHCSGJLXPVYUKZRV7F6QLSQYHBVQ="))))
| 386.666667
| 2,218
| 0.983621
| 24
| 2,320
| 95.083333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177951
| 0.006897
| 2,320
| 5
| 2,219
| 464
| 0.8125
| 0.02931
| 0
| 0
| 0
| 0
| 0.960427
| 0.960427
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
36663c71c58a196be7c3dd22f4f4404b5cdc1f50
| 1,294
|
py
|
Python
|
test/test_cleaners.py
|
zo-edv/py_win_unc
|
610b7c9ce4ea17554d04342126169b488c8ccfae
|
[
"MIT"
] | 10
|
2015-08-14T06:34:28.000Z
|
2020-10-03T17:48:09.000Z
|
test/test_cleaners.py
|
zo-edv/py_win_unc
|
610b7c9ce4ea17554d04342126169b488c8ccfae
|
[
"MIT"
] | 11
|
2017-01-12T23:43:56.000Z
|
2020-06-19T18:32:56.000Z
|
test/test_cleaners.py
|
zo-edv/py_win_unc
|
610b7c9ce4ea17554d04342126169b488c8ccfae
|
[
"MIT"
] | 8
|
2015-09-25T20:44:33.000Z
|
2018-10-04T03:19:42.000Z
|
from unittest import TestCase
from win_unc import cleaners as C
class TestCleaners(TestCase):
def test_clean_drive_letter(self):
self.assertEqual(C.clean_drive_letter('A'), 'A')
self.assertEqual(C.clean_drive_letter('A:'), 'A')
self.assertEqual(C.clean_drive_letter('A:\\'), 'A')
self.assertEqual(C.clean_drive_letter('a'), 'A')
self.assertEqual(C.clean_drive_letter('a:\\'), 'A')
def test_clean_username(self):
self.assertEqual(C.clean_username('username'), 'username')
self.assertEqual(C.clean_username('userNAME'), 'userNAME')
self.assertEqual(C.clean_username(' user'), 'user')
self.assertEqual(C.clean_username('user '), 'user')
self.assertEqual(C.clean_username(' user '), 'user')
def test_clean_unc_path(self):
self.assertEqual(C.clean_unc_path(r'\\path'), r'\\path')
self.assertEqual(C.clean_unc_path(r'\\path\B'), r'\\path\B')
self.assertEqual(C.clean_unc_path(r'\\path\IPC$'), r'\\path\IPC$')
self.assertEqual(C.clean_unc_path(r'\\path\\'), r'\\path')
self.assertEqual(C.clean_unc_path(r' \\path'), r'\\path')
self.assertEqual(C.clean_unc_path(r'\\path '), r'\\path')
self.assertEqual(C.clean_unc_path(r' \\path '), r'\\path')
| 44.62069
| 74
| 0.645286
| 180
| 1,294
| 4.427778
| 0.138889
| 0.31995
| 0.34128
| 0.44793
| 0.801757
| 0.7867
| 0.7867
| 0.7867
| 0.745295
| 0.745295
| 0
| 0
| 0.167697
| 1,294
| 28
| 75
| 46.214286
| 0.740019
| 0
| 0
| 0.304348
| 0
| 0
| 0.142968
| 0
| 0
| 0
| 0
| 0
| 0.73913
| 1
| 0.130435
| false
| 0
| 0.086957
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e9f90f402d62511d3d3637845498973eda72a640
| 97
|
py
|
Python
|
source/mq/test2.py
|
PYH-torder/robot-test
|
381df1e8911d8ca43c2a57613a7a75e674fea7b6
|
[
"MIT"
] | null | null | null |
source/mq/test2.py
|
PYH-torder/robot-test
|
381df1e8911d8ca43c2a57613a7a75e674fea7b6
|
[
"MIT"
] | null | null | null |
source/mq/test2.py
|
PYH-torder/robot-test
|
381df1e8911d8ca43c2a57613a7a75e674fea7b6
|
[
"MIT"
] | null | null | null |
import time
import dyccon
# dyccon.order("spacle")
dyccon.order("ade4")
# dyccon.order("spacle")
| 16.166667
| 24
| 0.731959
| 13
| 97
| 5.461538
| 0.461538
| 0.464789
| 0.478873
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.092784
| 97
| 6
| 25
| 16.166667
| 0.795455
| 0.463918
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
18250de3d102a272cb5342e088886076ede45dc1
| 13,051
|
py
|
Python
|
py3canvas/tests/files.py
|
tylerclair/py3canvas
|
7485d458606b65200f0ffa5bbe597a9d0bee189f
|
[
"MIT"
] | null | null | null |
py3canvas/tests/files.py
|
tylerclair/py3canvas
|
7485d458606b65200f0ffa5bbe597a9d0bee189f
|
[
"MIT"
] | null | null | null |
py3canvas/tests/files.py
|
tylerclair/py3canvas
|
7485d458606b65200f0ffa5bbe597a9d0bee189f
|
[
"MIT"
] | null | null | null |
"""Files API Tests for Version 1.0.
This is a testing template for the generated FilesAPI Class.
"""
import unittest
import requests
import secrets
from py3canvas.apis.files import FilesAPI
from py3canvas.apis.files import File
from py3canvas.apis.files import Folder
from py3canvas.apis.files import Usagerights
from py3canvas.apis.files import License
class TestFilesAPI(unittest.TestCase):
"""Tests for the FilesAPI."""
def setUp(self):
self.client = FilesAPI(secrets.instance_address, secrets.access_token)
def test_get_quota_information_courses(self):
"""Integration test for the FilesAPI.get_quota_information_courses method."""
course_id = None # Change me!!
r = self.client.get_quota_information_courses(course_id)
def test_get_quota_information_groups(self):
"""Integration test for the FilesAPI.get_quota_information_groups method."""
group_id = None # Change me!!
r = self.client.get_quota_information_groups(group_id)
def test_get_quota_information_users(self):
"""Integration test for the FilesAPI.get_quota_information_users method."""
user_id = None # Change me!!
r = self.client.get_quota_information_users(user_id)
def test_list_files_courses(self):
"""Integration test for the FilesAPI.list_files_courses method."""
course_id = None # Change me!!
r = self.client.list_files_courses(
course_id,
content_types=None,
exclude_content_types=None,
include=None,
only=None,
order=None,
search_term=None,
sort=None,
)
def test_list_files_users(self):
"""Integration test for the FilesAPI.list_files_users method."""
user_id = None # Change me!!
r = self.client.list_files_users(
user_id,
content_types=None,
exclude_content_types=None,
include=None,
only=None,
order=None,
search_term=None,
sort=None,
)
def test_list_files_groups(self):
"""Integration test for the FilesAPI.list_files_groups method."""
group_id = None # Change me!!
r = self.client.list_files_groups(
group_id,
content_types=None,
exclude_content_types=None,
include=None,
only=None,
order=None,
search_term=None,
sort=None,
)
def test_list_files_folders(self):
"""Integration test for the FilesAPI.list_files_folders method."""
id = None # Change me!!
r = self.client.list_files_folders(
id,
content_types=None,
exclude_content_types=None,
include=None,
only=None,
order=None,
search_term=None,
sort=None,
)
def test_get_public_inline_preview_url(self):
"""Integration test for the FilesAPI.get_public_inline_preview_url method."""
id = None # Change me!!
r = self.client.get_public_inline_preview_url(id, submission_id=None)
def test_get_file_files(self):
"""Integration test for the FilesAPI.get_file_files method."""
id = None # Change me!!
r = self.client.get_file_files(id, include=None)
def test_get_file_courses(self):
"""Integration test for the FilesAPI.get_file_courses method."""
course_id = None # Change me!!
id = None # Change me!!
r = self.client.get_file_courses(course_id, id, include=None)
def test_get_file_groups(self):
"""Integration test for the FilesAPI.get_file_groups method."""
group_id = None # Change me!!
id = None # Change me!!
r = self.client.get_file_groups(group_id, id, include=None)
def test_get_file_users(self):
"""Integration test for the FilesAPI.get_file_users method."""
user_id = None # Change me!!
id = None # Change me!!
r = self.client.get_file_users(id, user_id, include=None)
def test_update_file(self):
"""Integration test for the FilesAPI.update_file method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_delete_file(self):
"""Integration test for the FilesAPI.delete_file method."""
id = None # Change me!!
r = self.client.delete_file(id, replace=None)
def test_reset_link_verifier(self):
"""Integration test for the FilesAPI.reset_link_verifier method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_list_folders(self):
"""Integration test for the FilesAPI.list_folders method."""
id = None # Change me!!
r = self.client.list_folders(id)
def test_list_all_folders_courses(self):
"""Integration test for the FilesAPI.list_all_folders_courses method."""
course_id = None # Change me!!
r = self.client.list_all_folders_courses(course_id)
def test_list_all_folders_users(self):
"""Integration test for the FilesAPI.list_all_folders_users method."""
user_id = None # Change me!!
r = self.client.list_all_folders_users(user_id)
def test_list_all_folders_groups(self):
"""Integration test for the FilesAPI.list_all_folders_groups method."""
group_id = None # Change me!!
r = self.client.list_all_folders_groups(group_id)
def test_resolve_path_courses_full_path(self):
"""Integration test for the FilesAPI.resolve_path_courses_full_path method."""
course_id = None # Change me!!
r = self.client.resolve_path_courses_full_path(course_id)
def test_resolve_path_courses(self):
"""Integration test for the FilesAPI.resolve_path_courses method."""
course_id = None # Change me!!
r = self.client.resolve_path_courses(course_id)
def test_resolve_path_users_full_path(self):
"""Integration test for the FilesAPI.resolve_path_users_full_path method."""
user_id = None # Change me!!
r = self.client.resolve_path_users_full_path(user_id)
def test_resolve_path_users(self):
"""Integration test for the FilesAPI.resolve_path_users method."""
user_id = None # Change me!!
r = self.client.resolve_path_users(user_id)
def test_resolve_path_groups_full_path(self):
"""Integration test for the FilesAPI.resolve_path_groups_full_path method."""
group_id = None # Change me!!
r = self.client.resolve_path_groups_full_path(group_id)
def test_resolve_path_groups(self):
"""Integration test for the FilesAPI.resolve_path_groups method."""
group_id = None # Change me!!
r = self.client.resolve_path_groups(group_id)
def test_get_folder_courses(self):
"""Integration test for the FilesAPI.get_folder_courses method."""
course_id = None # Change me!!
id = None # Change me!!
r = self.client.get_folder_courses(course_id, id)
def test_get_folder_users(self):
"""Integration test for the FilesAPI.get_folder_users method."""
user_id = None # Change me!!
id = None # Change me!!
r = self.client.get_folder_users(id, user_id)
def test_get_folder_groups(self):
"""Integration test for the FilesAPI.get_folder_groups method."""
group_id = None # Change me!!
id = None # Change me!!
r = self.client.get_folder_groups(group_id, id)
def test_get_folder_folders(self):
"""Integration test for the FilesAPI.get_folder_folders method."""
id = None # Change me!!
r = self.client.get_folder_folders(id)
def test_update_folder(self):
"""Integration test for the FilesAPI.update_folder method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_create_folder_courses(self):
"""Integration test for the FilesAPI.create_folder_courses method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_create_folder_users(self):
"""Integration test for the FilesAPI.create_folder_users method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_create_folder_groups(self):
"""Integration test for the FilesAPI.create_folder_groups method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_create_folder_folders(self):
"""Integration test for the FilesAPI.create_folder_folders method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_delete_folder(self):
"""Integration test for the FilesAPI.delete_folder method."""
id = None # Change me!!
r = self.client.delete_folder(id, force=None)
def test_upload_file(self):
"""Integration test for the FilesAPI.upload_file method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_copy_file(self):
"""Integration test for the FilesAPI.copy_file method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_copy_folder(self):
"""Integration test for the FilesAPI.copy_folder method."""
# This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_get_uploaded_media_folder_for_user_courses(self):
"""Integration test for the FilesAPI.get_uploaded_media_folder_for_user_courses method."""
course_id = None # Change me!!
r = self.client.get_uploaded_media_folder_for_user_courses(course_id)
def test_get_uploaded_media_folder_for_user_groups(self):
"""Integration test for the FilesAPI.get_uploaded_media_folder_for_user_groups method."""
group_id = None # Change me!!
r = self.client.get_uploaded_media_folder_for_user_groups(group_id)
def test_set_usage_rights_courses(self):
"""Integration test for the FilesAPI.set_usage_rights_courses method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_set_usage_rights_groups(self):
"""Integration test for the FilesAPI.set_usage_rights_groups method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_set_usage_rights_users(self):
"""Integration test for the FilesAPI.set_usage_rights_users method."""
# This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.
pass
def test_remove_usage_rights_courses(self):
"""Integration test for the FilesAPI.remove_usage_rights_courses method."""
course_id = None # Change me!!
file_ids = None # Change me!!
r = self.client.remove_usage_rights_courses(
course_id, file_ids, folder_ids=None
)
def test_remove_usage_rights_groups(self):
"""Integration test for the FilesAPI.remove_usage_rights_groups method."""
group_id = None # Change me!!
file_ids = None # Change me!!
r = self.client.remove_usage_rights_groups(file_ids, group_id, folder_ids=None)
def test_remove_usage_rights_users(self):
"""Integration test for the FilesAPI.remove_usage_rights_users method."""
user_id = None # Change me!!
file_ids = None # Change me!!
r = self.client.remove_usage_rights_users(file_ids, user_id, folder_ids=None)
def test_list_licenses_courses(self):
"""Integration test for the FilesAPI.list_licenses_courses method."""
course_id = None # Change me!!
r = self.client.list_licenses_courses(course_id)
def test_list_licenses_groups(self):
"""Integration test for the FilesAPI.list_licenses_groups method."""
group_id = None # Change me!!
r = self.client.list_licenses_groups(group_id)
def test_list_licenses_users(self):
"""Integration test for the FilesAPI.list_licenses_users method."""
user_id = None # Change me!!
r = self.client.list_licenses_users(user_id)
| 37.502874
| 126
| 0.675197
| 1,718
| 13,051
| 4.858556
| 0.061118
| 0.03666
| 0.083862
| 0.129148
| 0.922487
| 0.874326
| 0.808554
| 0.755601
| 0.610159
| 0.483767
| 0
| 0.000709
| 0.243047
| 13,051
| 347
| 127
| 37.610951
| 0.844215
| 0.405026
| 0
| 0.447917
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.260417
| false
| 0.067708
| 0.041667
| 0
| 0.307292
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
a107b4984ea5f5f40052e1b8943b1d8d73349e1b
| 11,013
|
py
|
Python
|
reddit-scraper.py
|
lambda-labs-13-stock-price-2/reddit-scraper
|
999b0f8aaad3661658911e5212d0cf6d76ed3a47
|
[
"MIT"
] | null | null | null |
reddit-scraper.py
|
lambda-labs-13-stock-price-2/reddit-scraper
|
999b0f8aaad3661658911e5212d0cf6d76ed3a47
|
[
"MIT"
] | null | null | null |
reddit-scraper.py
|
lambda-labs-13-stock-price-2/reddit-scraper
|
999b0f8aaad3661658911e5212d0cf6d76ed3a47
|
[
"MIT"
] | null | null | null |
[?1049h[?1h=[1;55r[?12;25h[?12l[?25h[27m[23m[m[H[2J[?25l[2;1H[1m[34m~ [3;1H~ [4;1H~ [5;1H~ [6;1H~ [7;1H~ [8;1H~ [9;1H~ [10;1H~ [11;1H~ [12;1H~ [13;1H~ [14;1H~ [15;1H~ [16;1H~ [17;1H~ [18;1H~ [19;1H~ [20;1H~ [21;1H~ [22;1H~ [23;1H~ [24;1H~ [25;1H~ [26;1H~ [27;1H~ [28;1H~ [29;1H~ [30;1H~ [31;1H~ [32;1H~ [33;1H~ [34;1H~ [35;1H~ [36;1H~ [37;1H~ [38;1H~ [39;1H~ [40;1H~ [41;1H~ [42;1H~ [43;1H~ [44;1H~ [45;1H~ [46;1H~ [47;1H~ [48;1H~ [49;1H~ [50;1H~ [51;1H~ [52;1H~ [53;1H~ [54;1H~ [m[55;172H0,0-1[9CAll[22;87HVIM - Vi IMproved[24;87Hversion 7.4.1689[25;83Hby Bram Moolenaar et al.[26;68HModified by pkg-vim-maintainers@lists.alioth.debian.org[27;74HVim is open source and freely distributable[29;81HHelp poor children in Uganda![30;72Htype :help iccf[34m<Enter>[m for information [32;72Htype :q[34m<Enter>[m to exit [33;72Htype :help[34m<Enter>[m or [34m<F1>[m for on-line help[34;72Htype :help version7[34m<Enter>[m for version info[1;1H[?12l[?25h[55;1H
[?1l>[?1049l
| 5,506.5
| 10,997
| 0.055752
| 314
| 11,013
| 2.267516
| 0.38535
| 0.235955
| 0.050562
| 0.05618
| 0.095506
| 0.039326
| 0
| 0
| 0
| 0
| 0
| 0.265979
| 0.911922
| 11,013
| 2
| 10,998
| 5,506.5
| 0.36701
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a158a72bde6eaf1436d7ebef2d73332de3d17ec3
| 50
|
py
|
Python
|
instance/config.py
|
VirginiaNdungu1/Taarifa-App
|
0a04bd0ddffd43a59cb92a136645cd9c8d4a1768
|
[
"MIT"
] | null | null | null |
instance/config.py
|
VirginiaNdungu1/Taarifa-App
|
0a04bd0ddffd43a59cb92a136645cd9c8d4a1768
|
[
"MIT"
] | null | null | null |
instance/config.py
|
VirginiaNdungu1/Taarifa-App
|
0a04bd0ddffd43a59cb92a136645cd9c8d4a1768
|
[
"MIT"
] | null | null | null |
NEWS_API_KEY = "aea4c50137034ada8a03fa5b0dc38047"
| 25
| 49
| 0.88
| 4
| 50
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.404255
| 0.06
| 50
| 1
| 50
| 50
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0.64
| 0.64
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a182ef98a8de49f2c145a2fdb18e438214842cb0
| 286
|
py
|
Python
|
Python/Floor, Ceil and Rint.py
|
HarshitRuwali/HackerRank-Solutions
|
29c3ebd87723e1237866a551783bf62cf470d919
|
[
"MIT"
] | 8
|
2020-07-16T12:17:16.000Z
|
2022-01-11T04:24:03.000Z
|
Python/Floor, Ceil and Rint.py
|
HarshitRuwali/HackerRank-Solutions
|
29c3ebd87723e1237866a551783bf62cf470d919
|
[
"MIT"
] | null | null | null |
Python/Floor, Ceil and Rint.py
|
HarshitRuwali/HackerRank-Solutions
|
29c3ebd87723e1237866a551783bf62cf470d919
|
[
"MIT"
] | 5
|
2020-11-30T17:40:15.000Z
|
2022-02-28T11:26:59.000Z
|
import numpy as np
np.set_printoptions(sign=' ')
arr = np.array(input().split(),float)
print(np.floor(arr), np.ceil(arr), np.rint(arr), sep = '\n')
'''
OR
'''
import numpy as np
arr = np.array(input().split(), float)
print(np.floor(arr), np.ceil(arr), np.rint(arr), sep = '\n')
| 15.052632
| 60
| 0.622378
| 49
| 286
| 3.612245
| 0.387755
| 0.169492
| 0.146893
| 0.169492
| 0.700565
| 0.700565
| 0.700565
| 0.700565
| 0.700565
| 0.700565
| 0
| 0
| 0.13986
| 286
| 18
| 61
| 15.888889
| 0.719512
| 0
| 0
| 0.857143
| 0
| 0
| 0.018116
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0.428571
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
a190ddb88376b1056c91323b28e02c74348d9549
| 24
|
py
|
Python
|
03_Day_Operators/19.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
03_Day_Operators/19.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
03_Day_Operators/19.py
|
diegofregolente/30-Days-Of-Python
|
e0cad31f6d5ab1384ad6fa5a5d24a84771d6c267
|
[
"Apache-2.0"
] | null | null | null |
print('10' == 10) # 19
| 12
| 23
| 0.458333
| 4
| 24
| 2.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.25
| 24
| 1
| 24
| 24
| 0.277778
| 0.083333
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a1f63938406cb863ba16bac0c1ee0742f5a072f0
| 6,593
|
py
|
Python
|
stellarpop/bak25sep12/Sampler.py
|
Annarien/GravitationalLensesResources
|
55d653f95bfd1e19c66a64079b5af39ea1e000a5
|
[
"MIT"
] | null | null | null |
stellarpop/bak25sep12/Sampler.py
|
Annarien/GravitationalLensesResources
|
55d653f95bfd1e19c66a64079b5af39ea1e000a5
|
[
"MIT"
] | null | null | null |
stellarpop/bak25sep12/Sampler.py
|
Annarien/GravitationalLensesResources
|
55d653f95bfd1e19c66a64079b5af39ea1e000a5
|
[
"MIT"
] | null | null | null |
import numpy
def SimpleSample(pars,costs,deterministics,niter,cov=None,jump=None):
if jump is None:
stretch,offset = 3.3,3.
else:
stretch,offset = jump
nvars = len(pars)
niter = int(niter)
trace = numpy.empty((niter,nvars))
logps = numpy.zeros(niter)
dets = []
if cov is None:
widths = {'x':0.05,'y':0.05,'reff':0.1,'q':0.03,'pa':1.,'eta':0.03,'nu':0.03}
cov = numpy.empty(nvars)
for varIndx in xrange(nvars):
name = pars[varIndx].__name__
cov[varIndx] = widths[name.split('_')[0]]
else:
cov = numpy.asarray(cov)
blank = numpy.zeros(nvars)
for varIndx in xrange(nvars):
trace[0,varIndx] = pars[varIndx].value
logps[0] += pars[varIndx].logp
for cost in costs:
logps[0] += cost.logp
dets.append([d.value for d in deterministics])
for i in xrange(1,niter):
z = 10**(numpy.random.random(nvars)*stretch-offset)
if cov.ndim==1:
W = numpy.random.randn(cov.size)*cov*z
else:
W = numpy.random.multivariate_normal(blank,cov)*z
logp = 0.
updates = trace[i-1].copy()+W
bad = False
for varIndx in xrange(nvars):
pars[varIndx].value = updates[varIndx]
for varIndx in xrange(nvars):
try:
logp += pars[varIndx].logp
except:
logp += -1e200
bad = True
break
if bad==True:
logps[i] = logps[i-1]
trace[i] = trace[i-1].copy()
dets.append(dets[-1])
continue
for cost in costs:
logp += cost.logp
if logp>logps[i-1]:
logps[i] = logp
trace[i] = updates
dets.append([d.value for d in deterministics])
continue
if logp-logps[i-1]>numpy.log(numpy.random.random()):
logps[i] = logp
trace[i] = updates
dets.append([d.value for d in deterministics])
else:
logps[i] = logps[i-1]
trace[i] = trace[i-1].copy()
dets.append(dets[-1])
for varIndx in xrange(nvars):
pars[varIndx].value = trace[-1][varIndx]
return logps,trace,dets
def Optimizer(pars,costs,deterministics,niter,cov=None):
from scipy.optimize import leastsq
from numpy import exp
nvars = len(pars)
fake = numpy.ones(nvars*10)
def optFunc(p):
post = 0.
for i in range(p.size):
try:
pars[i].value = p[i]
post += pars[i].logp
except:
print 'blah'
return fake*1e10
for cost in costs:
post += cost.logp
return exp(post*-1)*fake
inpar = numpy.empty(nvars)
for i in range(nvars):
inpar[i] = pars[i].value
outpar,ier = leastsq(optFunc,inpar,epsfcn=1e-4)
return outpar
def MCMCOpt(inpars,costs,deterministics,niter,cov=None,jump=None):
if jump is None:
stretch,offset = 3.3,3
else:
from math import log10
lo,hi = jump
lo,hi = log10(lo),log10(hi)
stretch,offset = hi+lo,lo
pars = []
for par in inpars:
try:
tmp = par.logp
pars.append(par)
except:
pass
nvars = len(pars)
trace = numpy.empty((niter,nvars))
logps = numpy.zeros(niter)
dets = []
if cov is None:
widths = {'x':0.05,'y':0.05,'re':0.1,'q':0.03,'pa':1.,'eta':0.03,'nu':0.03}
cov = numpy.empty(nvars)
for varIndx in xrange(nvars):
name = pars[varIndx].__name__
cov[varIndx] = widths[name.split('_')[0]]
blank = numpy.zeros(nvars)
for varIndx in xrange(nvars):
trace[0,varIndx] = pars[varIndx].value
logps[0] += pars[varIndx].logp
for cost in costs:
logps[0] += cost.logp
dets.append([d.value for d in deterministics])
for i in xrange(1,niter):
# z = 10**(numpy.random.random(nvars)*stretch-offset)
z = 10**(numpy.random.randn(nvars)*0.3)
if cov.ndim==1:
W = numpy.random.randn(cov.size)*cov*z
else:
W = numpy.random.multivariate_normal(blank,cov)*z
logp = 0.
updates = trace[i-1].copy()+W
bad = False
for varIndx in xrange(nvars):
pars[varIndx].value = updates[varIndx]
for varIndx in xrange(nvars):
try:
logp += pars[varIndx].logp
except:
logp = -1e300
bad = True
break
if bad==True:
logps[i] = logps[i-1]
trace[i] = trace[i-1].copy()
dets.append(dets[-1])
continue
for cost in costs:
logp += cost.logp
if logp>logps[i-1]:
logps[i] = logp
trace[i] = updates
dets.append([d.value for d in deterministics])
continue
else:
logps[i] = logps[i-1]
trace[i] = trace[i-1].copy()
dets.append(dets[-1])
for varIndx in xrange(nvars):
pars[varIndx].value = trace[-1][varIndx]
return logps,trace,dets
for i in xrange(1,niter):
z = 10**(numpy.random.random(nvars)*3.3-3.)
if cov.ndim==1:
W = numpy.random.randn(cov.size)*cov*z
else:
W = numpy.random.multivariate_normal(blank,cov)*z
logp = 0.
updates = trace[i-1].copy()+W
bad = False
for varIndx in xrange(nvars):
try:
pars[varIndx].value = updates[varIndx]
logp += pars[varIndx].logp
except:
logp += -1e200
bad = True
break
if bad==True:
logps[i] = logps[i-1]
trace[i] = trace[i-1].copy()
dets.append(dets[-1])
continue
for cost in costs:
logp += cost.logp
if logp>logps[i-1]:
logps[i] = logp
trace[i] = updates
dets.append([d.value for d in deterministics])
continue
if logp-logps[i-1]>numpy.log(numpy.random.random()):
logps[i] = logp
trace[i] = updates
dets.append([d.value for d in deterministics])
else:
logps[i] = logps[i-1]
trace[i] = trace[i-1].copy()
dets.append(dets[-1])
for varIndx in xrange(nvars):
pars[varIndx].value = trace[-1][varIndx]
return logps,trace,dets
| 30.243119
| 85
| 0.511452
| 855
| 6,593
| 3.928655
| 0.121637
| 0.039297
| 0.04287
| 0.064305
| 0.821375
| 0.812444
| 0.79994
| 0.79994
| 0.79994
| 0.79994
| 0
| 0.030167
| 0.356439
| 6,593
| 217
| 86
| 30.382488
| 0.76149
| 0.007735
| 0
| 0.792929
| 0
| 0
| 0.004899
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.005051
| 0.020202
| null | null | 0.005051
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b81468321b433ee1143d1fd4f186ba69e53d1a07
| 4,802
|
py
|
Python
|
User/tests/test_view_dynamic_pages.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | 1
|
2022-02-15T13:36:29.000Z
|
2022-02-15T13:36:29.000Z
|
User/tests/test_view_dynamic_pages.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | null | null | null |
User/tests/test_view_dynamic_pages.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | null | null | null |
from django.urls import reverse
def test_view_signup_view(client):
url = reverse('User:signup')
response = client.get(url)
assert response.status_code == 200
assert 'Zarejestruj' in response.content.decode('UTF-8')
def test_view_profile_creation_user_logged(user_A, client):
client.force_login(user_A)
url = reverse('User:profile_create_additional')
response = client.get(url)
assert response.status_code == 200
assert 'Zapisz' in response.content.decode('UTF-8')
def test_view_profile_creation_user_anonymous(client):
url = reverse('User:profile_create_additional')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_profile_creation_flat_user_logged(user_A, client):
client.force_login(user_A)
url = reverse('User:profile_create_flat')
response = client.get(url)
assert response.status_code == 200
assert 'Dodaj' in response.content.decode('UTF-8')
def test_view_profile_creation_flat_user_anonymous(client):
url = reverse('User:profile_create_flat')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_profile_user_logged(user_A, user_A_profile, client):
client.force_login(user_A)
user_A_profile.user = user_A
url = reverse('User:profile')
response = client.get(url)
assert response.status_code == 200
assert user_A.username in response.content.decode('UTF-8')
def test_view_profile_user_anonymous(client):
url = reverse('User:profile')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_profile_update_user_logged(user_A, user_A_profile, client):
client.force_login(user_A)
user_A_profile.user = user_A
url = reverse('User:profile_update')
response = client.get(url)
assert response.status_code == 200
assert user_A.profile.phone_number in response.content.decode('UTF-8')
def test_view_profile_update_user_anonymous(client):
url = reverse('User:profile_update')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_profile_pass_change_user_logged(user_A, user_A_profile, client):
client.force_login(user_A)
user_A_profile.user = user_A
url = reverse('User:pass_change')
response = client.get(url)
assert response.status_code == 200
assert 'Stare hasło' in response.content.decode('UTF-8')
def test_view_profile_pass_change_user_anonymous(client):
url = reverse('User:pass_change')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_profile_delete_user_logged(user_A, user_A_profile, client):
client.force_login(user_A)
user_A_profile.user = user_A
url = reverse('User:delete_user')
response = client.get(url)
assert response.status_code == 200
assert 'Potwierdź' in response.content.decode('UTF-8')
def test_view_profile_delete_user_anonymous(client):
url = reverse('User:delete_user')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_profile_logout(user_A, user_A_profile, client):
client.login(username="Kermit", password="Secret")
user_A_profile.user = user_A
url = reverse('User:user_logout')
response = client.get(url)
assert response.status_code == 302
assert response['Location'] == reverse('main')
def test_view_profile_login(client):
url = reverse('User:user_login')
response = client.get(url)
assert response.status_code == 200
assert 'Zaloguj się' in response.content.decode('UTF-8')
def test_view_report_failure_user_logged(user_A, user_A_profile, client):
client.force_login(user_A)
user_A_profile.user = user_A
url = reverse('User:report_failure')
response = client.get(url)
assert response.status_code == 200
assert 'Wyślij' in response.content.decode('UTF-8')
def test_view_report_failure_user_anonymous(client):
url = reverse('User:report_failure')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_contact_neighbour_user_logged(user_A, user_A_profile, client):
client.force_login(user_A)
user_A_profile.user = user_A
url = reverse('User:contact_neighbour')
response = client.get(url)
assert response.status_code == 200
assert 'Wyślij' in response.content.decode('UTF-8')
def test_view_contact_neighbour_user_anonymous(client):
url = reverse('User:contact_neighbour')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
| 28.081871
| 78
| 0.734486
| 680
| 4,802
| 4.902941
| 0.088235
| 0.059988
| 0.062687
| 0.113977
| 0.916317
| 0.916317
| 0.879124
| 0.847031
| 0.836833
| 0.826335
| 0
| 0.016671
| 0.163057
| 4,802
| 170
| 79
| 28.247059
| 0.812889
| 0
| 0
| 0.705357
| 0
| 0
| 0.111828
| 0.031653
| 0
| 0
| 0
| 0
| 0.339286
| 1
| 0.169643
| false
| 0.044643
| 0.008929
| 0
| 0.178571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62b9fdaa9f207454cbb7efe594f9ea696396b7a5
| 74,405
|
py
|
Python
|
pytests/tuqquery/tuq_advisor.py
|
ashwin2002/testrunner
|
141369afdfb920bebedd0f359caa926621497133
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/tuq_advisor.py
|
ashwin2002/testrunner
|
141369afdfb920bebedd0f359caa926621497133
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/tuq_advisor.py
|
ashwin2002/testrunner
|
141369afdfb920bebedd0f359caa926621497133
|
[
"Apache-2.0"
] | null | null | null |
from remote.remote_util import RemoteMachineShellConnection
from .tuq import QueryTests
import time
from deepdiff import DeepDiff
from membase.api.exception import CBQError
import threading
class QueryAdvisorTests(QueryTests):
def setUp(self):
super(QueryAdvisorTests, self).setUp()
self.log.info("============== QueryAdvisorTests setup has started ==============")
self.index_to_be_created = self.input.param("index_to_be_created", '')
if self.load_sample:
self.rest.load_sample("travel-sample")
init_time = time.time()
while True:
next_time = time.time()
query_response = self.run_cbq_query("SELECT COUNT(*) FROM `" + self.bucket_name + "`")
self.log.info(f"{self.bucket_name}+ count: {query_response['results'][0]['$1']}")
if query_response['results'][0]['$1'] == 31591:
break
if next_time - init_time > 600:
break
time.sleep(2)
self.wait_for_all_indexes_online()
list_of_indexes = self.run_cbq_query(query="select raw name from system:indexes WHERE indexes.bucket_id is missing")
for index in list_of_indexes['results']:
if index == "def_primary":
continue
else:
self.run_cbq_query(query="drop index `travel-sample`.`%s`" % index)
self.purge_all_sessions()
self.log.info("============== QueryAdvisorTests setup has completed ==============")
self.log_config_info()
def suite_setUp(self):
super(QueryAdvisorTests, self).suite_setUp()
self.log.info("============== QueryAdvisorTests suite_setup has started ==============")
self.log.info("============== QueryAdvisorTests suite_setup has completed ==============")
def tearDown(self):
self.log.info("============== QueryAdvisorTests tearDown has started ==============")
travel_sample = self.get_bucket_from_name("travel-sample")
if travel_sample:
self.delete_bucket(travel_sample)
self.log.info("============== QueryAdvisorTests tearDown has completed ==============")
super(QueryAdvisorTests, self).tearDown()
def suite_tearDown(self):
self.log.info("============== QueryAdvisorTests suite_tearDown has started ==============")
self.log.info("============== QueryAdvisorTests suite_tearDown has completed ==============")
super(QueryAdvisorTests, self).suite_tearDown()
def get_statements(self, advisor_results):
indexes = []
statements = []
for index in advisor_results['results'][0]['$1']['recommended_indexes']:
indexes.append(index['index'])
statements.append(index['statements'])
return indexes, statements
def purge_all_sessions(self):
try:
self.log.info("Purging all previous sessions")
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list'}) as List", server=self.master)
for task in results['results'][0]['List']:
session = task['tasks_cache']['name']
purge = self.run_cbq_query(query="SELECT ADVISOR({{'action':'purge', 'session':'{0}'}}) as Purge".format(session), server=self.master)
except Exception as e:
self.log.error("List/Purge sessions failed: {0}".format(e))
self.fail()
# Advisor on update statement
def test_query_string(self):
try:
advise = self.run_cbq_query(query="SELECT ADVISOR(\"UPDATE `{0}` SET city = 'San Francisco' WHERE lower(city) = 'sanfrancisco'\")".format(self.bucket_name), server=self.master)
simple_indexes, statements = self.get_statements(advise)
except Exception as e:
self.log.error("Advisor statement failed: {0}".format(e))
self.fail()
for index in simple_indexes:
self.run_cbq_query(query=index)
self.wait_for_all_indexes_online()
try:
results_with_advise_index = self.run_cbq_query(query="UPDATE `{0}` SET city = 'SF' WHERE lower(city) = 'san francisco'".format(self.bucket_name), server=self.master)
self.assertEqual(results_with_advise_index['status'], 'success')
self.assertEqual(results_with_advise_index['metrics']['mutationCount'], 938)
finally:
index_name = index.split("INDEX")[1].split("ON")[0].strip()
self.run_cbq_query("DROP INDEX `{0}`.{1}".format(self.bucket_name,index_name))
# same query: query count should be > 1
def test_same_query_array(self):
try:
results_simple = self.run_cbq_query(query="SELECT ADVISOR([ \
\"UPDATE `{0}` SET city = 'San Francisco' WHERE lower(city) = 'sanfrancisco'\", \
\"UPDATE `{0}` SET city = 'San Francisco' WHERE lower(city) = 'sanfrancisco'\" \
])".format(self.bucket_name), server=self.master)
simple_indexes, statements = self.get_statements(results_simple)
self.assertEqual(statements[0][0]['run_count'], 2)
except Exception as e:
self.log.error("Advisor statement failed: {0}".format(e))
self.fail()
# similar query: statement count should be > 1
def test_similar_query_array(self):
try:
results_simple = self.run_cbq_query(query="SELECT ADVISOR([ \
\"UPDATE `{0}` SET city = 'San Francisco' WHERE lower(city) = 'sanfrancisco'\", \
\"UPDATE `{0}` SET city = 'San Francisco' WHERE lower(city) = 'saintfrancois'\" \
])".format(self.bucket_name), server=self.master)
simple_indexes, statements = self.get_statements(results_simple)
self.assertEqual(len(statements[0]), 2)
except Exception as e:
self.log.error("Advisor statement failed: {0}".format(e))
self.fail()
def test_diff_query_array(self):
query1 = f"UPDATE `{self.bucket_name}` SET city = 'San Francisco' WHERE lower(city) = 'sanfrancisco'"
query2 = f"SELECT name, city FROM `{self.bucket_name}` WHERE type = 'hotel' AND country = 'France'"
query3 = f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'lyon'"
try:
advise = self.run_cbq_query(query=f"SELECT ADVISOR([\"{query1}\", \"{query2}\", \"{query3}\"])", server=self.master)
self.assertEqual(len(advise['results'][0]['$1']['recommended_indexes']), 3)
except Exception as e:
self.log.error("Advisor statement failed: {0}".format(e))
self.fail()
def test_query_output_array(self):
# Run some queries
query_paris = "SELECT airportname FROM `{0}` WHERE type = 'airport' and lower(city) = 'paris' AND country = 'France'".format(self.bucket_name)
query_lyon = "SELECT airportname FROM `{0}` WHERE type ='airport' and lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name)
query_grenoble = "SELECT airportname FROM `{0}` WHERE type = 'airport' and lower(city) = 'grenoble' AND country = 'France'".format(self.bucket_name)
results = self.run_cbq_query(query=query_paris, server=self.master)
results = self.run_cbq_query(query=query_paris, server=self.master)
results = self.run_cbq_query(query=query_paris, server=self.master)
results = self.run_cbq_query(query=query_lyon, server=self.master)
results = self.run_cbq_query(query=query_lyon, server=self.master)
results = self.run_cbq_query(query=query_grenoble, server=self.master)
try:
results = self.run_cbq_query(query="select ADVISOR((SELECT RAW statement FROM system:completed_requests order by requestTime DESC limit 6)) as `Advise`".format(self.bucket_name), server=self.master)
advises = results['results'][0]['Advise']
query_count = dict()
for index in advises['recommended_indexes']:
for query in index['statements']:
query_count[query['statement']] = query['run_count']
self.assertEqual(query_count[query_paris], 3)
self.assertEqual(query_count[query_lyon], 2)
self.assertEqual(query_count[query_grenoble], 1)
except Exception as e:
self.log.error("Advisor statement failed: {0}".format(e))
self.fail()
def test_query_array_arg_large(self,num=10):
query_paris = "SELECT airportname FROM `{0}` WHERE type = 'airport' and lower(city) = 'paris' AND country = 'France'".format(self.bucket_name)
query_array = [query_paris] * num
try:
results = self.run_cbq_query(query="select ADVISOR({0}) as `Advise`".format(query_array), server=self.master)
advises = results['results'][0]['Advise']
self.assertEqual(advises['recommended_indexes'][0]['statements'][0]['run_count'], num)
self.assertEqual(advises['recommended_indexes'][0]['statements'][0]['statement'], query_paris)
except Exception as e:
self.log.error("Advisor statement failed: {0}".format(e))
self.fail()
# get session recommendation for completed session
def test_get_session_completed(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '10s', 'query_count': 2 })", server=self.master)
session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Wait for session to complete
self.sleep(10)
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'get', 'session': '{0}'}})".format(session), server=self.master)
self.assertTrue('recommended_indexes' in results['results'][0]['$1'][0][0], "There are no recommended index: {0}".format(results['results'][0]['$1'][0][0]))
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_get_session_stopped(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1h', 'query_count': 2 })", server=self.master)
session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
self.sleep(3)
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'get', 'session': '{0}'}})".format(session), server=self.master)
self.assertTrue('recommended_indexes' in results['results'][0]['$1'][0][0], "There are no recommended index: {0}".format(results['results'][0]['$1'][0][0]))
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_stop_session(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1234567ms', 'query_count': 2 })", server=self.master)
session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list'}) as List", server=self.master)
task = results['results'][0]['List'][0]['tasks_cache']
self.log.info("Task cache is {0}".format(task))
self.assertEqual(list(task.keys()), ['class', 'delay', 'id', 'name', 'node', 'results', 'state', 'subClass', 'submitTime'])
self.assertEqual(task['state'], "cancelled")
self.assertEqual(task['delay'], "20m34.567s")
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_abort_session(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '3600s', 'query_count': 200 })", server=self.master)
session = results['results'][0]['$1']['session']
# Check session is active
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'active'}) as List", server=self.master)
task = results['results'][0]['List'][0]['tasks_cache']
self.log.info("Task cache is {0}".format(task))
self.assertEqual(task['state'], "scheduled")
self.assertEqual(task['delay'], "1h0m0s")
self.assertEqual(task['name'], session)
# Abort session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'abort', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", server=self.master)
self.assertEqual(results['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_purge_session_completed(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '5000ms', 'query_count': 2 })", server=self.master)
session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Wait for session to complete
self.sleep(5)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list'}) as List", server=self.master)
task = results['results'][0]['List'][0]['tasks_cache']
self.assertEqual(task['state'], "completed")
# Purge session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'purge', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", server=self.master)
self.assertEqual(results['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_purge_session_stopped(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '5000s', 'query_count': 2 })", server=self.master)
session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Stop session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list'}) as List", server=self.master)
task = results['results'][0]['List'][0]['tasks_cache']
self.assertEqual(task['state'], "cancelled")
# Purge session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'purge', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", server=self.master)
self.assertEqual(results['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_purge_session_active(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '3600s', 'query_count': 200 })", server=self.master)
session = results['results'][0]['$1']['session']
# Check session is active
list_all = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'active'}) as List", server=self.master)
task = list_all['results'][0]['List'][0]['tasks_cache']
self.log.info("Task cache is {0}".format(task))
self.assertEqual(task['state'], "scheduled")
self.assertEqual(task['delay'], "1h0m0s")
self.assertEqual(task['name'], session)
# Purge session
purge = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'purge', 'session': '{0}'}})".format(session), server=self.master)
list_all = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", server=self.master)
self.assertEqual(list_all['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_list_session(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '99h', 'query_count': 2 })", server=self.master)
active_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '50ms', 'query_count': 2 })", server=self.master)
completed_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1600m', 'query_count': 2 })", server=self.master)
stopped_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Stop session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(stopped_session), server=self.master)
# List sessions
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list'}) as List", server=self.master)
all_sessions = dict()
for task in results['results'][0]['List']:
all_sessions[task['tasks_cache']['state']] = task['tasks_cache']['name']
self.assertEqual(len(all_sessions), 3)
self.assertEqual(all_sessions['scheduled'], active_session)
self.assertEqual(all_sessions['cancelled'], stopped_session)
self.assertEqual(all_sessions['completed'], completed_session)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_list_session_active(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '99h', 'query_count': 2 })", server=self.master)
active_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '50ms', 'query_count': 2 })", server=self.master)
completed_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1600m', 'query_count': 2 })", server=self.master)
stopped_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Stop session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(stopped_session), server=self.master)
# List ACTIVE sessions
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status':'active'}) as List", server=self.master)
all_sessions = dict()
for task in results['results'][0]['List']:
all_sessions[task['tasks_cache']['state']] = task['tasks_cache']['name']
self.assertEqual(len(all_sessions), 1)
self.assertEqual(all_sessions['scheduled'], active_session)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_list_session_completed(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '99h', 'query_count': 2 })", server=self.master)
active_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '50ms', 'query_count': 2 })", server=self.master)
completed_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1600m', 'query_count': 2 })", server=self.master)
stopped_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Stop session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(stopped_session), server=self.master)
# List COMPLETED sessions
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status':'completed'}) as List", server=self.master)
all_sessions = dict()
for task in results['results'][0]['List']:
all_sessions[task['tasks_cache']['state']] = task['tasks_cache']['name']
self.assertEqual(len(all_sessions), 1)
self.assertEqual(all_sessions['completed'], completed_session)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_list_session_all(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '99h', 'query_count': 2 })", server=self.master)
active_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '50ms', 'query_count': 2 })", server=self.master)
completed_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1600m', 'query_count': 2 })", server=self.master)
stopped_session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Stop session
results = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'stop', 'session': '{0}'}})".format(stopped_session), server=self.master)
# List ALL sessions
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status':'all'}) as List", server=self.master)
all_sessions = dict()
for task in results['results'][0]['List']:
all_sessions[task['tasks_cache']['state']] = task['tasks_cache']['name']
self.assertEqual(len(all_sessions), 3)
self.assertEqual(all_sessions['scheduled'], active_session)
self.assertEqual(all_sessions['cancelled'], stopped_session)
self.assertEqual(all_sessions['completed'], completed_session)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_start_session_duration_value(self):
durations = ['3600000000000ns','3600000000us','3600000ms','3600s','60m', '1h']
try:
for duration in durations:
start = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'start', 'duration': '{0}'}})".format(duration), server=self.master)
session = start['results'][0]['$1']['session']
active = self.run_cbq_query(query="SELECT ADVISOR({'action':'list'}) as List", server=self.master)
delay = active['results'][0]['List'][0]['tasks_cache']['delay']
self.assertEqual(delay, '1h0m0s')
abort = self.run_cbq_query(query="SELECT ADVISOR({{'action':'abort', 'session':'{0}'}}) as Abort".format(session), server=self.master)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_duration_completed(self):
durations = ['1800000000ns','1800000us','1800ms','1.8s','0.03m', '0.0005h']
try:
for duration in durations:
start = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'start', 'duration': '{0}'}})".format(duration), server=self.master)
session = start['results'][0]['$1']['session']
self.sleep(3)
complete = self.run_cbq_query(query="SELECT ADVISOR({'action':'list','status':'completed'}) as List", server=self.master)
name = complete['results'][0]['List'][0]['tasks_cache']['name']
delay = complete['results'][0]['List'][0]['tasks_cache']['delay']
state = complete['results'][0]['List'][0]['tasks_cache']['state']
self.assertEqual(delay, '1.8s')
self.assertEqual(name, session)
self.assertEqual(state, "completed")
purge = self.run_cbq_query(query="SELECT ADVISOR({{'action':'purge', 'session':'{0}'}}) as Purge".format(session), server=self.master)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_response_below(self):
responses = ['100000000ns','100000us','100ms','0.1s', '0.000027h']
query1=f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'lyon' AND country = 'France'"
query2=f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'grenoble' AND country = 'France'"
query3=f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'nice' AND country = 'France'"
try:
for response in responses:
start = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'start', 'duration': '60s', 'response': '{0}'}})".format(response), server=self.master)
session = start['results'][0]['$1']['session']
results = self.run_cbq_query(query=query1, server=self.master)
results = self.run_cbq_query(query=query2, server=self.master)
results = self.run_cbq_query(query=query3, server=self.master)
stop = self.run_cbq_query(query="SELECT ADVISOR({{'action':'stop', 'session':'{0}'}}) as Stop".format(session), server=self.master)
get = self.run_cbq_query(query="SELECT ADVISOR({{'action':'get', 'session':'{0}'}}) as Get".format(session), server=self.master)
run_count = get['results'][0]['Get'][0][0]['recommended_indexes'][0]['statements'][0]['run_count']
self.assertEqual(run_count, 1)
purge = self.run_cbq_query(query="SELECT ADVISOR({{'action':'purge', 'session':'{0}'}}) as Purge".format(session), server=self.master)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_response_above(self):
responses = ['9000000000000ns','9000000000us','9000000ms','9000s', '0.25h']
query1=f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'lyon' AND country = 'France'"
query2=f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'grenoble' AND country = 'France'"
query3=f"SELECT airportname FROM `{self.bucket_name}` WHERE type = 'airport' AND lower(city) = 'nice' AND country = 'France'"
try:
for response in responses:
start = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'start', 'duration': '60s', 'response': '{0}'}})".format(response), server=self.master)
session = start['results'][0]['$1']['session']
results = self.run_cbq_query(query=query1, server=self.master)
results = self.run_cbq_query(query=query2, server=self.master)
results = self.run_cbq_query(query=query3, server=self.master)
stop = self.run_cbq_query(query="SELECT ADVISOR({{'action':'stop', 'session':'{0}'}}) as Stop".format(session), server=self.master)
get = self.run_cbq_query(query="SELECT ADVISOR({{'action':'get', 'session':'{0}'}}) as Get".format(session), server=self.master)
advise = get['results'][0]['Get'][0]
self.assertEqual(advise, [[]])
purge = self.run_cbq_query(query="SELECT ADVISOR({{'action':'purge', 'session':'{0}'}}) as Purge".format(session), server=self.master)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_profile(self):
self.users = [{"id": "johnDoe", "name": "Jonathan Downing", "password": "password1"}]
self.create_users()
grant = self.run_cbq_query(query="GRANT {0} to {1}".format("admin", self.users[0]['id']),server=self.master)
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
query2=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "grenoble" AND country = "France"'
query3=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "nice" AND country = "France"'
try:
start = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'start', 'duration': '180s', 'profile': '{0}'}})".format(self.users[0]['id']), server=self.master)
session = start['results'][0]['$1']['session']
# Run query as other user
# results = self.curl_with_roles(query1)
# results = self.curl_with_roles(query1)
results = self.run_cbq_query(query=query1, username=self.users[0]['id'], password=self.users[0]['password'], server=self.master)
results = self.run_cbq_query(query=query1, username=self.users[0]['id'], password=self.users[0]['password'], server=self.master)
# run query as current user
results = self.run_cbq_query(query=query2, server=self.master)
stop = self.run_cbq_query(query="SELECT ADVISOR({{'action':'stop', 'session':'{0}'}}) as Stop".format(session), server=self.master)
get = self.run_cbq_query(query="SELECT ADVISOR({{'action':'get', 'session':'{0}'}}) as Get".format(session), server=self.master)
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], query1)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_all(self):
self.users = [{"id": "joaoDoe", "name": "Joao Downing", "password": "password1"}]
self.create_users()
user_id = self.users[0]['id']
user_pwd = self.users[0]['password']
grant = self.run_cbq_query(query=f"GRANT admin to {user_id}",server=self.master)
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
query2=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "grenoble" AND country = "France"'
query3=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "nice" AND country = "France"'
try:
start = self.run_cbq_query(query="SELECT ADVISOR({{'action':'start', 'duration':'40m', 'profile': '{0}', 'query_count':5, 'response':'50ms'}})".format(self.users[0]['id']), server=self.master)
session = start['results'][0]['$1']['session']
# Run query as other user
results = self.run_cbq_query(query=query1, username=user_id, password=user_pwd, server=self.master)
results = self.run_cbq_query(query=query1, username=user_id, password=user_pwd, server=self.master)
# Run query as current user
results = self.run_cbq_query(query=query2, server=self.master)
# Run query as other user
results = self.run_cbq_query(query=query1, username=user_id, password=user_pwd, server=self.master)
results = self.run_cbq_query(query=query1, username=user_id, password=user_pwd, server=self.master)
results = self.run_cbq_query(query=query1, username=user_id, password=user_pwd, server=self.master)
results = self.run_cbq_query(query=query1, username=user_id, password=user_pwd, server=self.master)
# Stop and get session
stop = self.run_cbq_query(query="SELECT ADVISOR({{'action':'stop', 'session':'{0}'}}) as Stop".format(session), server=self.master)
get = self.run_cbq_query(query="SELECT ADVISOR({{'action':'get', 'session':'{0}'}}) as Get".format(session), server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], query1)
self.assertEqual(statement['run_count'], 5)
# Purge and list session
purge = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'purge', 'session':'{session}'}}) as Get", server=self.master)
list_all = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", server=self.master)
self.assertEqual(list_all['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_cbo(self):
advise_index = "CREATE INDEX adv_lower_city_country_type ON `travel-sample`(lower(`city`),`country`) WHERE `type` = 'airport'"
advise_stats = "UPDATE STATISTICS FOR `travel-sample`(lower(`city`), `country`, `type`)"
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
# update stats to ensure CBO is used
stats = self.run_cbq_query(query=f"update statistics for `{self.bucket_name}`(type)", server=self.master)
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=self.master)
session = start['results'][0]['$1']['session']
results = self.run_cbq_query(query=query1, server=self.master)
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertEqual(index['index'], advise_index)
self.assertEqual(index['update_statistics'], advise_stats)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_query_txn(self):
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
close_txn = ['ROLLBACK WORK', 'COMMIT']
try:
for rollback_or_commit in close_txn:
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '15m'})", server=self.master)
session = start['results'][0]['$1']['session']
# Run query in transaction
results = self.run_cbq_query(query="BEGIN WORK", server=self.master)
query_params = {'txid': results['results'][0]['txid']}
results = self.run_cbq_query(query=query1, query_params=query_params, server=self.master)
results = self.run_cbq_query(query=rollback_or_commit, query_params=query_params, server=self.master)
# Stop and check session advise
stop = self.run_cbq_query(query="SELECT ADVISOR({{'action':'stop', 'session':'{0}'}}) as Stop".format(session), server=self.master)
get = self.run_cbq_query(query="SELECT ADVISOR({{'action':'get', 'session':'{0}'}}) as Get".format(session), server=self.master)
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], query1)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_negative_txn(self):
results = self.run_cbq_query(query="BEGIN WORK", server=self.master)
query_params = {'txid': results['results'][0]['txid']}
error = "advisor function is not supported within the transaction"
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '15m'})", query_params=query_params, server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_session_query_count(self):
query_lyon=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
query_grenoble=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "grenoble" AND country = "France"'
query_nice=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "nice" AND country = "France"'
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '15m', 'query_count': 6})", server=self.master)
session = start['results'][0]['$1']['session']
# Run 9 queries
results = self.run_cbq_query(query=query_lyon, server=self.master)
results = self.run_cbq_query(query=query_grenoble, server=self.master)
results = self.run_cbq_query(query=query_nice, server=self.master)
results = self.run_cbq_query(query=query_lyon, server=self.master)
results = self.run_cbq_query(query=query_grenoble, server=self.master)
results = self.run_cbq_query(query=query_lyon, server=self.master)
results = self.run_cbq_query(query=query_nice, server=self.master)
results = self.run_cbq_query(query=query_grenoble, server=self.master)
results = self.run_cbq_query(query=query_nice, server=self.master)
# Stop and check session advise. We should only see 6 queries count = 3*lyon + 2*grenoble + 1*nice
stop = self.run_cbq_query(query="SELECT ADVISOR({{'action':'stop', 'session':'{0}'}}) as Stop".format(session), server=self.master)
get = self.run_cbq_query(query="SELECT ADVISOR({{'action':'get', 'session':'{0}'}}) as Get".format(session), server=self.master)
queries_count = dict()
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for query in index['statements']:
queries_count[query['statement']] = query['run_count']
self.assertEqual(queries_count[query_lyon], 3)
self.assertEqual(queries_count[query_grenoble], 2)
self.assertEqual(queries_count[query_nice], 1)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_get_active_session(self):
try:
results = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '5000s', 'query_count': 2 })", server=self.master)
session = results['results'][0]['$1']['session']
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
results = self.run_cbq_query(query="SELECT airportname FROM `{0}` WHERE lower(city) = 'lyon' AND country = 'France'".format(self.bucket_name), server=self.master)
# Get session
get = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'get', 'session': '{0}'}}) as Get".format(session), server=self.master)
self.assertEqual(get['results'][0]['Get'], [])
# Abort session
abort = self.run_cbq_query(query="SELECT ADVISOR({{'action': 'abort', 'session': '{0}'}})".format(session), server=self.master)
results = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", server=self.master)
self.assertEqual(results['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_negative_query_syntax_error(self):
query_syntax = f'SELECT airportname FROM `{self.bucket_name}` WERE type = \\"airport\\"'
error = "syntax error - at type"
try:
advise = self.run_cbq_query(query=f"SELECT ADVISOR(\"{query_syntax}\") as Advisor", server=self.master)
self.assertEqual(advise["results"][0]["Advisor"]["errors"][0]["error"], error)
self.assertEqual(advise["results"][0]["Advisor"]["errors"][0]["run_count"], 1)
self.assertEqual(advise["results"][0]["Advisor"]["errors"][0]["statement"], query_syntax.replace('\\',''))
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_negative_invalid_arg(self):
query = "SELECT ADVISOR({'action': 'start', 'duration': '10s', 'invalid': 10});"
error = "Error evaluating projection. - cause: Invalid arguments to Advisor() function: [invalid]"
try:
results = self.run_cbq_query(query=query, server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_negative_missing_arg(self):
query = "SELECT ADVISOR({'action': 'start', 'response': '10s'});"
error = "Error evaluating projection. - cause: advisor() not valid argument for 'duration'"
try:
results = self.run_cbq_query(query=query, server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_negative_array(self):
query=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
error = "Number of arguments to function ADVISOR must be 1. - at "
try:
results = self.run_cbq_query(query=f"SELECT ADVISOR('{query}','{query}')", server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_negative_invalid_value(self):
invalid_actions = [ \
{'cmd': {'action':'start', 'duration':'two'}, 'msg': 'Error evaluating projection. - cause: time: invalid duration two'}, \
{'cmd': {'action':'start', 'duration':'1hr'}, 'msg': 'Error evaluating projection. - cause: time: unknown unit hr in duration 1hr'}, \
{'cmd': {'action':'start', 'duration':'1h', 'response':'nul'}, 'msg': 'Error evaluating projection. - cause: time: invalid duration nul'}, \
{'cmd': {'action':'start', 'duration':'1h', 'response':'1sec'}, 'msg': 'Error evaluating projection. - cause: time: unknown unit sec in duration 1sec'}, \
{'cmd': {'action':'start', 'duration':'1h', 'query_count':'ten'}, 'msg': 'Error evaluating projection. - cause: advisor() not valid argument for \'query_count\''}, \
{'cmd': {'action':'start', 'duration':'1h', 'profile':9999}, 'msg': 'Error evaluating projection. - cause: advisor() not valid argument for \'profile\''} ]
for action in invalid_actions:
try:
session = self.run_cbq_query(query=f"SELECT ADVISOR({action['cmd']})", server=self.master)
except CBQError as ex:
self.assertTrue(str(ex).find(action['msg']) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_negative_list(self):
error = "Error evaluating projection. - cause: advisor() not valid argument for 'status'"
try:
session = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status':'stopped'})", server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_negative_missing_session(self):
error = "Error evaluating projection. - cause: advisor() not valid argument for 'session'"
try:
session = self.run_cbq_query(query="SELECT ADVISOR({'action':'get'})", server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_negative_invalid_session(self):
error = "Error evaluating projection. - cause: advisor() not valid argument for 'session'"
for action in ['get','purge','stop','abort']:
try:
session = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'{action}', 'session':123456}})", server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def run_async_query(self, query, username, password, server):
results = self.run_cbq_query(query=query, username=username, password=password, server=server)
# Check the query has been cancelled
self.assertEqual(results['status'], "stopped")
def test_session_query_cancel(self):
long_query = f"SELECT DISTINCT MIN(aport.airportname) AS Airport__Name, MIN(lmark.name) AS Landmark_Name, MIN(aport.tz) AS Landmark_Time FROM `{self.bucket_name}` aport LEFT JOIN `travel-sample` lmark ON aport.city = lmark.city AND lmark.country = 'United States' AND lmark.type = 'landmark' WHERE aport.type = 'airport' GROUP BY lmark.name ORDER BY lmark.name LIMIT 3"
self.users = [{"id": "jimDoe", "name": "Jim Downing", "password": "password1"}]
self.create_users()
role = "admin"
user_id = self.users[0]['id']
user_pwd = self.users[0]['password']
grant = self.run_cbq_query(query=f"GRANT {role} to {user_id}",server=self.master)
cancel_query = f"DELETE FROM system:active_requests WHERE users = '{user_id}'"
# Create index for join query
create_index = f"CREATE INDEX `def_city` ON `{self.bucket_name}`(`city`)"
results = self.run_cbq_query(query=create_index,server=self.master)
th = threading.Thread(target=self.run_async_query,args=(long_query, user_id, user_pwd, self.master))
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1h', 'query_count': 2 })", server=self.master)
session = start['results'][0]['$1']['session']
# Spawn query in a thread
th.start()
# Cancel query
self.sleep(1)
cancel = self.run_cbq_query(query=cancel_query,username=user_id, password=user_pwd, server=self.master)
th.join()
# Stop and get session advise
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'stop', 'session': '{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'get', 'session': '{session}'}}) as Get", server=self.master)
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], long_query)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_query_timeout(self):
long_query = f"SELECT DISTINCT MIN(aport.airportname) AS Airport__Name, MIN(lmark.name) AS Landmark_Name, MIN(aport.tz) AS Landmark_Time FROM `{self.bucket_name}` aport LEFT JOIN `travel-sample` lmark ON aport.city = lmark.city AND lmark.country = 'United States' AND lmark.type = 'landmark' WHERE aport.type = 'airport' GROUP BY lmark.name ORDER BY lmark.name LIMIT 3"
# Create index for join query
create_index = f"CREATE INDEX `def_city` ON `{self.bucket_name}`(`city`)"
results = self.run_cbq_query(query=create_index,server=self.master)
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1h', 'query_count': 2 })", server=self.master)
session = start['results'][0]['$1']['session']
try:
results = self.run_cbq_query(query=long_query, query_params={'timeout':'500ms'}, server=self.master)
except CBQError as ex:
self.assertTrue(str(ex).find("Timeout 500ms exceeded") > 0)
# Stop and get session advise
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'stop', 'session': '{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'get', 'session': '{session}'}}) as Get", server=self.master)
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], long_query)
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_collection(self):
advise_index1 = "CREATE INDEX adv_lower_city_country ON `default`:`travel-sample`.`inventory`.`airport`(lower(`city`),`country`)"
advise_index2 = "CREATE INDEX adv_country_lower_city ON `default`:`travel-sample`.`inventory`.`airport`(`country`,lower(`city`))"
query1=f'SELECT airportname FROM `{self.bucket_name}`.inventory.airport WHERE lower(city) = "lyon" AND country = "France"'
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=self.master)
session = start['results'][0]['$1']['session']
results = self.run_cbq_query(query=query1, server=self.master)
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_collection_context(self):
advise_index1 = "CREATE INDEX adv_lower_city_country ON `default`:`travel-sample`.`inventory`.`airport`(lower(`city`),`country`)"
advise_index2 = "CREATE INDEX adv_country_lower_city ON `default`:`travel-sample`.`inventory`.`airport`(`country`,lower(`city`))"
query1='SELECT airportname FROM airport WHERE lower(city) = "lyon" AND country = "France"'
query_contexts = ["", f"default:`{self.bucket_name}`.inventory", f"default:`{self.bucket_name}`._default"]
for context in query_contexts:
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", query_context=context, server=self.master)
session = start['results'][0]['$1']['session']
# Run query in bucket.collection context
results = self.run_cbq_query(query=query1, query_context=f"default:`{self.bucket_name}`.inventory", server=self.master)
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", query_context=context, server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", query_context=context, server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_collection_join(self):
advise_index1 = "CREATE INDEX adv_country_city ON `default`:`travel-sample`.`inventory`.`landmark`(`country`,`city`)"
advise_index2 = "CREATE INDEX adv_city_country ON `default`:`travel-sample`.`inventory`.`landmark`(`city`,`country`)"
query1="SELECT DISTINCT MIN(aport.airportname) AS Airport_Name, MIN(lmark.name) AS Landmark_Name, MIN(aport.tz) AS Landmark_Time FROM `travel-sample`.inventory.landmark aport LEFT JOIN `travel-sample`.inventory.landmark lmark ON aport.city = lmark.city AND lmark.country = 'United States' GROUP BY lmark.name ORDER BY lmark.name LIMIT 3"
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=self.master)
session = start['results'][0]['$1']['session']
# Run query in bucket.collection context
results = self.run_cbq_query(query=query1, query_context=f"default:`{self.bucket_name}`.inventory", server=self.master)
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_negative_authorization(self):
self.users = [{"id": "jackDoe", "name": "Jack Downing", "password": "password1"}]
self.create_users()
role = "query_select"
user_id = self.users[0]['id']
user_pwd = self.users[0]['password']
grant = self.run_cbq_query(query=f"GRANT {role} on `{self.bucket_name}` to {user_id}",server=self.master)
sessions_queries = ["SELECT ADVISOR({'action': 'start', 'duration': '1h', 'query_count': 2 })", "SELECT ADVISOR({'action': 'list', 'status': 'all'})"]
error = "User does not have credentials to run queries accessing the system tables. Add role query_system_catalog to allow the query to run."
for query in sessions_queries:
try:
results = self.run_cbq_query(query=query, username=user_id, password=user_pwd, server=self.master)
self.fail("Start session did not fail. Error expected: {0}".format(error))
except CBQError as ex:
self.assertTrue(str(ex).find(error) > 0)
else:
self.fail("There were no errors. Error expected: {0}".format(error))
def test_session_authorization(self):
self.users = [{"id": "janneDoe", "name": "Janne Downing", "password": "password1"}]
self.create_users()
role_ctlg = "query_system_catalog"
role_qury = "query_select"
user_id = self.users[0]['id']
user_pwd = self.users[0]['password']
grant_ctlg = self.run_cbq_query(query=f"GRANT {role_ctlg} to {user_id}",server=self.master)
grant_qury = self.run_cbq_query(query=f"GRANT {role_qury} on `{self.bucket_name}` to {user_id}",server=self.master)
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
try:
# Start session as authorized user
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1h', 'query_count': 2 })", username=user_id, password=user_pwd, server=self.master)
session = start['results'][0]['$1']['session']
# Run query as other user
results = self.run_cbq_query(query=query1, server=self.master)
self.sleep(2)
# Stop and get session advise as authorized user
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'stop', 'session': '{session}'}}) as Stop", username=user_id, password=user_pwd, server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'get', 'session': '{session}'}}) as Get", username=user_id, password=user_pwd, server=self.master)
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], query1)
# Purge and list sessions as authorized user
purge = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'purge', 'session': '{session}'}})", username=user_id, password=user_pwd, server=self.master)
sessions = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", username=user_id, password=user_pwd, server=self.master)
self.assertEqual(sessions['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_authorization_other(self):
self.users = [{"id": "jeanDoe", "name": "Jean Downing", "password": "password1"}]
self.create_users()
role_ctlg = "query_system_catalog"
role_qury = "query_select"
user_id = self.users[0]['id']
user_pwd = self.users[0]['password']
grant_ctlg = self.run_cbq_query(query=f"GRANT {role_ctlg} to {user_id}",server=self.master)
grant_qury = self.run_cbq_query(query=f"GRANT {role_qury} on `{self.bucket_name}` to {user_id}",server=self.master)
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
try:
# Start session as current user
start = self.run_cbq_query(query="SELECT ADVISOR({'action': 'start', 'duration': '1h', 'query_count': 2 })", server=self.master)
session = start['results'][0]['$1']['session']
# Run query as current user
results = self.run_cbq_query(query=query1, server=self.master)
self.sleep(2)
# Stop and get session advise as authorized user
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'stop', 'session': '{session}'}}) as Stop", username=user_id, password=user_pwd, server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'get', 'session': '{session}'}}) as Get", username=user_id, password=user_pwd, server=self.master)
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
for statement in index['statements']:
self.assertEqual(statement['statement'], query1)
# Purge and list sessions as authorized user
purge = self.run_cbq_query(query=f"SELECT ADVISOR({{'action': 'purge', 'session': '{session}'}})", username=user_id, password=user_pwd, server=self.master)
sessions = self.run_cbq_query(query="SELECT ADVISOR({'action':'list', 'status': 'all'}) as List", username=user_id, password=user_pwd, server=self.master)
self.assertEqual(sessions['results'][0]['List'],[])
except Exception as e:
self.log.error("Advisor session failed: {0}".format(e))
self.fail()
def test_session_delete_completed_req(self):
advise_index1 = "CREATE INDEX adv_lower_city_country_type ON `travel-sample`(lower(`city`),`country`) WHERE `type` = 'airport'"
query1=f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=self.master)
session = start['results'][0]['$1']['session']
# Run query in bucket.collection context
results = self.run_cbq_query(query=query1, server=self.master)
# Delete completed requests
delete = self.run_cbq_query(query=f"DELETE FROM system:completed_requests", server=self.master)
# Stop and get session
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=self.master)
# Check advise
advise = get['results'][0]['Get'][0]
self.assertEqual(advise, [[]])
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_drop_collection(self):
advise_index1 = "CREATE INDEX adv_country_city ON `default`:`travel-sample`.`inventory`.`landmark`(`country`,`city`)"
advise_index2 = "CREATE INDEX adv_city_country ON `default`:`travel-sample`.`inventory`.`landmark`(`city`,`country`)"
query1="SELECT DISTINCT MIN(aport.airportname) AS Airport_Name, MIN(lmark.name) AS Landmark_Name, MIN(aport.tz) AS Landmark_Time FROM `travel-sample`.inventory.landmark aport LEFT JOIN `travel-sample`.inventory.landmark lmark ON aport.city = lmark.city AND lmark.country = 'United States' GROUP BY lmark.name ORDER BY lmark.name LIMIT 3"
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=self.master)
session = start['results'][0]['$1']['session']
# Run query in bucket.collection context
results = self.run_cbq_query(query=query1, server=self.master)
# Drop collection
drop_collection = self.run_cbq_query(query="DROP COLLECTION `travel-sample`.`inventory`.`landmark`", server=self.master)
# Stop and get session
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_drop_scope(self):
advise_index1 = "CREATE INDEX adv_country_city ON `default`:`travel-sample`.`inventory`.`landmark`(`country`,`city`)"
advise_index2 = "CREATE INDEX adv_city_country ON `default`:`travel-sample`.`inventory`.`landmark`(`city`,`country`)"
query1="SELECT DISTINCT MIN(aport.airportname) AS Airport_Name, MIN(lmark.name) AS Landmark_Name, MIN(aport.tz) AS Landmark_Time FROM `travel-sample`.inventory.landmark aport LEFT JOIN `travel-sample`.inventory.landmark lmark ON aport.city = lmark.city AND lmark.country = 'United States' GROUP BY lmark.name ORDER BY lmark.name LIMIT 3"
try:
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=self.master)
session = start['results'][0]['$1']['session']
# Run query in bucket.collection context
results = self.run_cbq_query(query=query1, server=self.master)
# Drop scope
drop_scope = self.run_cbq_query(query="DROP SCOPE `travel-sample`.`inventory`", server=self.master)
# Stop and get session
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=self.master)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=self.master)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_kill_index(self):
advise_index1 = "CREATE INDEX adv_country_lower_city_type ON `travel-sample`(`country`,lower((`city`))) WHERE `type` = 'airport'"
advise_index2 = "CREATE INDEX adv_lower_city_country_type ON `travel-sample`(lower((`city`)),`country`) WHERE `type` = 'airport'"
query1 = f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
node1 = self.servers[0]
node2 = self.servers[1]
try:
# Start session on node1
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=node1)
session = start['results'][0]['$1']['session']
# Run query on node1
results = self.run_cbq_query(query=query1, server=node1)
# Kill index service on node1
remote_client = RemoteMachineShellConnection(node1)
remote_client.terminate_process(process_name="indexer")
self.sleep(3)
# Stop session on node2
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=node2)
# Get session on node1
self.sleep(1)
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=node1)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
self.assertEqual(index['statements'][0]['run_count'], 1)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_kill_n1ql(self):
advise_index1 = "CREATE INDEX adv_country_lower_city_type ON `travel-sample`(`country`,lower((`city`))) WHERE `type` = 'airport'"
advise_index2 = "CREATE INDEX adv_lower_city_country_type ON `travel-sample`(lower((`city`)),`country`) WHERE `type` = 'airport'"
query1 = f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
node1 = self.servers[0]
node2 = self.servers[1]
try:
# Start session on node1
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=node1)
session = start['results'][0]['$1']['session']
# Run query on node1
results = self.run_cbq_query(query=query1, server=node1)
# Kill n1ql service on node1
remote_client = RemoteMachineShellConnection(node1)
remote_client.terminate_process(process_name="cbq-engine")
self.sleep(3)
# Stop session on node2
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=node2)
# List session on node1 and node2
self.sleep(1)
list_node1 = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'list'}}) as List", server=node1)
list_node2 = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'list'}}) as List", server=node2)
# Check advise
self.assertEqual(list_node1['results'][0]['List'], [])
self.assertEqual(list_node2['results'][0]['List'], [])
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
def test_session_multi_node(self):
advise_index1 = "CREATE INDEX adv_country_lower_city_type ON `travel-sample`(`country`,lower((`city`))) WHERE `type` = 'airport'"
advise_index2 = "CREATE INDEX adv_lower_city_country_type ON `travel-sample`(lower((`city`)),`country`) WHERE `type` = 'airport'"
query1 = f'SELECT airportname FROM `{self.bucket_name}` WHERE type = "airport" AND lower(city) = "lyon" AND country = "France"'
node1 = self.servers[0]
node2 = self.servers[1]
try:
# Start session on node1
start = self.run_cbq_query(query="SELECT ADVISOR({'action':'start', 'duration':'40m'})", server=node1)
session = start['results'][0]['$1']['session']
# Run query on node2
results = self.run_cbq_query(query=query1, server=node2)
# Run query on node1
results = self.run_cbq_query(query=query1, server=node1)
# Stop session on node1
stop = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'stop', 'session':'{session}'}}) as Stop", server=node1)
# Get session on node2
get = self.run_cbq_query(query=f"SELECT ADVISOR({{'action':'get', 'session':'{session}'}}) as Get", server=node2)
# Check advise
for index in get['results'][0]['Get'][0][0]['recommended_indexes']:
self.assertTrue(index['index'] == advise_index1 or index['index'] == advise_index2)
self.assertEqual(index['statements'][0]['statement'], query1)
self.assertEqual(index['statements'][0]['run_count'], 2)
except Exception as e:
self.log.error(f"Advisor session failed: {e}")
self.fail()
| 69.149628
| 378
| 0.620254
| 9,153
| 74,405
| 4.912706
| 0.04097
| 0.05582
| 0.051372
| 0.077058
| 0.891718
| 0.875506
| 0.853667
| 0.834364
| 0.809879
| 0.802162
| 0
| 0.015757
| 0.220388
| 74,405
| 1,075
| 379
| 69.213953
| 0.759426
| 0.029904
| 0
| 0.700986
| 0
| 0.07667
| 0.365428
| 0.063339
| 0
| 0
| 0
| 0
| 0.105148
| 1
| 0.062432
| false
| 0.035049
| 0.006572
| 0
| 0.071194
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c56f7a314e27a30d1813e47aba616e1e5f65dcae
| 24,386
|
py
|
Python
|
src/graph_transpiler/webdnn/frontend/tensorflow/ops/gen_array_ops.py
|
gunpowder78/webdnn
|
c659ea49007f91d178ce422a1eebe289516a71ee
|
[
"MIT"
] | 1
|
2018-07-26T13:52:21.000Z
|
2018-07-26T13:52:21.000Z
|
src/graph_transpiler/webdnn/frontend/tensorflow/ops/gen_array_ops.py
|
gunpowder78/webdnn
|
c659ea49007f91d178ce422a1eebe289516a71ee
|
[
"MIT"
] | null | null | null |
src/graph_transpiler/webdnn/frontend/tensorflow/ops/gen_array_ops.py
|
gunpowder78/webdnn
|
c659ea49007f91d178ce422a1eebe289516a71ee
|
[
"MIT"
] | null | null | null |
from typing import List
import numpy as np
import tensorflow as tf
from webdnn import ConstantVariable
from webdnn.frontend.constraints import AxisVar, unify_order
from webdnn.graph.operators.reshape import Reshape
from webdnn.graph.operators.zero_padding_2d import ZeroPadding2D
from webdnn.graph.variable import Variable
from webdnn.graph.axis import Axis
from webdnn.graph.graph import Graph
from webdnn.graph.order import Order, OrderNC, OrderNTC, OrderNHWC, OrderC
from webdnn.graph.placeholder import Placeholder
from webdnn.frontend.tensorflow.converter import TensorFlowConverter
@TensorFlowConverter.register_handler("BatchMatrixBandPart")
def batch_matrix_band_part_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("BatchMatrixDiag")
def batch_matrix_diag_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("BatchMatrixDiagPart")
def batch_matrix_diag_part_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("BatchMatrixSetDiag")
def batch_matrix_set_diag_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("BatchToSpace")
def batch_to_space_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("BatchToSpaceND")
def batch_to_space_nd_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Bitcast")
def bitcast_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("CheckNumerics")
def check_numerics_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Concat")
def concat_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ConcatOffset")
def concat_offset_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ConcatV2")
def concat_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Const")
def const_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
# FIXME: should output ConstantVariable?
tensor = tf_op.outputs[0]
shape = [Placeholder() if dim.value is None else dim.value for dim in tensor.shape.dims]
if len(shape) == 0:
# Scalar variable
# WebDNN's variable should have at least 1 dimension
variable = ConstantVariable(np.array([tf_op.get_attr("value").float_val._values[0]], dtype=np.float32),
Order([Axis.C]))
else:
variable = Variable(shape, Order([AxisVar() for _ in shape]))
converter.set_variable(tensor, variable)
@TensorFlowConverter.register_handler("DepthToSpace")
def depth_to_space_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Dequantize")
def dequantize_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Diag")
def diag_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("DiagPart")
def diag_part_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("EditDistance")
def edit_distance_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ExpandDims")
def expand_dims_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ExtractImagePatches")
def extract_image_patches_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("FakeQuantWithMinMaxArgs")
def fake_quant_with_min_max_args_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("FakeQuantWithMinMaxArgsGradient")
def fake_quant_with_min_max_args_gradient_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("FakeQuantWithMinMaxVars")
def fake_quant_with_min_max_vars_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("FakeQuantWithMinMaxVarsGradient")
def fake_quant_with_min_max_vars_gradient_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("FakeQuantWithMinMaxVarsPerChannel")
def fake_quant_with_min_max_vars_per_channel_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("FakeQuantWithMinMaxVarsPerChannelGradient")
def fake_quant_with_min_max_vars_per_channel_gradient_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Fill")
def fill_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Gather")
def gather_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("GatherNd")
def gather_nd_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("GatherV2")
def gather_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Identity")
def identity_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
converter.set_variable(tf_op.outputs[0], converter.get_variable(tf_op.inputs[0]))
@TensorFlowConverter.register_handler("ImmutableConst")
def immutable_const_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("InvertPermutation")
def invert_permutation_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ListDiff")
def list_diff_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MatrixBandPart")
def matrix_band_part_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MatrixDiag")
def matrix_diag_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MatrixDiagPart")
def matrix_diag_part_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MatrixSetDiag")
def matrix_set_diag_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MirrorPad")
def mirror_pad_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("MirrorPadGrad")
def mirror_pad_grad_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("OneHot")
def one_hot_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("OnesLike")
def ones_like_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Pack")
def pack_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Pad")
def pad_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
# Zero padding
# FIXME: currently, determining padding from shape of input / output. Originally, determining by inputs[1] is correct.
in_var = converter.get_variable(tf_op.inputs[0])
unify_order(in_var.order, OrderNHWC) # FIXME: assuming input order as NHWC
out_tf_var = tf_op.outputs[0]
# calculate output shape from out_tf_var.shape and in_var.shape
# ZeroPadding2D operator only accepts padding for H and W axes.
padding = [0, 0]
for dim in range(in_var.ndim):
in_size = in_var.shape[dim]
out_size = out_tf_var.shape.dims[dim].value
assert isinstance(in_size, int), "[TensorFlowConverter] Pad: Placeholder for input shape is not supported yet."
assert isinstance(out_size, int), "[TensorFlowConverter] Pad: Placeholder for output shape is not supported yet."
axis = in_var.order.axes[dim]
if axis in [Axis.H, Axis.W]:
assert (out_size - in_size % 2) != 0, "[TensorFlowConverter] Pad: Uneven padding is not supported yet."
pad_size = (out_size - in_size) // 2
if axis == Axis.H:
padding[0] = pad_size
elif axis == Axis.W:
padding[1] = pad_size
else:
assert out_size == in_size, "[TensorFlowConverter] Pad: padding for axis other than H and W is not supported yet."
out_var, = ZeroPadding2D(None, padding=tuple(padding))(in_var)
converter.set_variable(out_tf_var, out_var)
@TensorFlowConverter.register_handler("PadV2")
def pad_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ParallelConcat")
def parallel_concat_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Placeholder")
def placeholder_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("PlaceholderV2")
def placeholder_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("PlaceholderWithDefault")
def placeholder_with_default_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("PreventGradient")
def prevent_gradient_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizeAndDequantize")
def quantize_and_dequantize_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizeAndDequantizeV2")
def quantize_and_dequantize_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizeAndDequantizeV3")
def quantize_and_dequantize_v3_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizeV2")
def quantize_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizedConcat")
def quantized_concat_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizedInstanceNorm")
def quantized_instance_norm_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("QuantizedReshape")
def quantized_reshape_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Rank")
def rank_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("RefIdentity")
def ref_identity_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Reshape")
def reshape_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
# input: data, output_shape
# output: reshaped_data
# Currently, ignores output_shape.
in_var = converter.get_variable(tf_op.inputs[0])
out_tf_var = tf_op.outputs[0]
# calculate output shape from out_tf_var.shape and in_var.shape
# out_tf_var.shape can have at most one placeholder.
out_placeholder_count = 0
out_placeholder_idx = None
out_constant_prod = 1
out_shape = []
for i, dim_size in enumerate(out_tf_var.shape.dims):
out_shape.append(dim_size.value)
if dim_size.value is None:
out_placeholder_count += 1
out_placeholder_idx = i
else:
out_constant_prod *= dim_size.value
if out_placeholder_count > 1:
raise NotImplementedError(
"[TensorFlowConverter] Reshape: output with more than one placeholder is not supported yet.")
elif out_placeholder_count == 1:
if in_var.size % out_constant_prod != 0:
raise ValueError("[TensorFlowConverter] Reshape: invalid reshape output value.")
out_shape[out_placeholder_idx] = in_var.size // out_constant_prod
out_var, = Reshape(None, in_order=in_var.order, out_order=Order([AxisVar() for _ in out_shape]),
out_shape=out_shape)(in_var)
converter.set_variable(out_tf_var, out_var)
@TensorFlowConverter.register_handler("ResourceStridedSliceAssign")
def resource_strided_slice_assign_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Reverse")
def reverse_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ReverseSequence")
def reverse_sequence_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ReverseV2")
def reverse_v2_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ScatterNd")
def scatter_nd_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ScatterNdNonAliasingAdd")
def scatter_nd_non_aliasing_add_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Shape")
def shape_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ShapeN")
def shape_n_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Size")
def size_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Slice")
def slice_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("SpaceToBatch")
def space_to_batch_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("SpaceToBatchND")
def space_to_batch_nd_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("SpaceToDepth")
def space_to_depth_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Split")
def split_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("SplitV")
def split_v_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Squeeze")
def squeeze_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
squeeze_dims = tf_op.get_attr("squeeze_dims") # type: List[int]
in_var = converter.get_variable(tf_op.inputs[0])
in_var_shape = in_var.shape
out_var_shape = [] # type: List[int]
out_var_order = [] # type: List[Axis]
for dim in range(len(in_var_shape)):
if dim in squeeze_dims:
assert in_var_shape[dim] == 1, f"[TensorFlowConverter] {tf_op.type}: dimension to be squeezed have to be 1."
else:
out_var_shape.append(in_var_shape[dim])
out_var_order.append(in_var.order.axes[dim])
out_var, = Reshape(None, in_order=in_var.order, out_order=Order(out_var_order), out_shape=out_var_shape)(in_var)
out_tf_var = tf_op.outputs[0]
converter.set_variable(out_tf_var, out_var)
@TensorFlowConverter.register_handler("StopGradient")
def stop_gradient_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("StridedSlice")
def strided_slice_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("StridedSliceAssign")
def strided_slice_assign_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("StridedSliceGrad")
def strided_slice_grad_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Tile")
def tile_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("TileGrad")
def tile_grad_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Transpose")
def transpose_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Unique")
def unique_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("UniqueWithCounts")
def unique_with_counts_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Unpack")
def unpack_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("Where")
def where_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
@TensorFlowConverter.register_handler("ZerosLike")
def zeros_like_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
raise NotImplementedError(f"[TensorFlowConverter] {tf_op.type} is not supported yet.")
| 46.185606
| 126
| 0.787091
| 2,849
| 24,386
| 6.517726
| 0.09477
| 0.03899
| 0.210566
| 0.173353
| 0.767731
| 0.754968
| 0.747752
| 0.736712
| 0.723464
| 0.71377
| 0
| 0.002298
| 0.10789
| 24,386
| 527
| 127
| 46.273245
| 0.851253
| 0.026163
| 0
| 0.282738
| 0
| 0
| 0.306182
| 0.093127
| 0
| 0
| 0
| 0.001898
| 0.014881
| 1
| 0.258929
| false
| 0
| 0.03869
| 0
| 0.297619
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3de91ded5cbccbf77a1e6dca5a19d05acdc96082
| 131
|
py
|
Python
|
flowtrail/colors.py
|
krisfris/flowtrail
|
012c3397859bb11841210f934e7102bfb848c1cd
|
[
"MIT"
] | null | null | null |
flowtrail/colors.py
|
krisfris/flowtrail
|
012c3397859bb11841210f934e7102bfb848c1cd
|
[
"MIT"
] | null | null | null |
flowtrail/colors.py
|
krisfris/flowtrail
|
012c3397859bb11841210f934e7102bfb848c1cd
|
[
"MIT"
] | null | null | null |
import random
def random_color():
return [random.uniform(0.0, 1.0), random.uniform(0.0, 1.0), random.uniform(0.0, 1.0), 1.0]
| 21.833333
| 94
| 0.656489
| 26
| 131
| 3.269231
| 0.307692
| 0.094118
| 0.141176
| 0.529412
| 0.6
| 0.6
| 0.6
| 0.6
| 0.6
| 0.6
| 0
| 0.123894
| 0.137405
| 131
| 5
| 95
| 26.2
| 0.628319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
9aa2fd5bbac081ac5d41bd786a69d5ee93e8716d
| 434
|
py
|
Python
|
src/test/test_cwssim_container.py
|
jannsta1/torf
|
b7866bf1a824b3ab6f44b7fa5da0c7a781766fd0
|
[
"BSD-2-Clause"
] | 3
|
2021-06-15T12:01:22.000Z
|
2022-01-21T23:17:37.000Z
|
src/test/test_cwssim_container.py
|
jannsta1/torf
|
b7866bf1a824b3ab6f44b7fa5da0c7a781766fd0
|
[
"BSD-2-Clause"
] | null | null | null |
src/test/test_cwssim_container.py
|
jannsta1/torf
|
b7866bf1a824b3ab6f44b7fa5da0c7a781766fd0
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from src.cwssim_container import Cwsim_container_from_ims, response_across_im_series
from src.utils import get_fwd_drone_ims
# @pytest.fixture
# def ims(im_w=235, im_h=150):
# return get_fwd_drone_ims(im_w=im_w, im_h=im_h)
def test_response_across_im_series_multi_process():
response_across_im_series()
def test_response_across_im_series_single_process():
response_across_im_series(multiprocess=False)
| 25.529412
| 84
| 0.827189
| 72
| 434
| 4.472222
| 0.402778
| 0.217391
| 0.248447
| 0.341615
| 0.360248
| 0.180124
| 0
| 0
| 0
| 0
| 0
| 0.015424
| 0.103687
| 434
| 16
| 85
| 27.125
| 0.812339
| 0.218894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.428571
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9aadcd21c6ab76d33fbcf41a77df891172542770
| 30,567
|
py
|
Python
|
mealpy/evolutionary_based/DE.py
|
Alhassan20/mealpy
|
7ed365c5c495ad1c1e066662c90159b3d5e9b8e3
|
[
"MIT"
] | 1
|
2021-10-03T05:27:36.000Z
|
2021-10-03T05:27:36.000Z
|
mealpy/evolutionary_based/DE.py
|
Alhassan20/mealpy
|
7ed365c5c495ad1c1e066662c90159b3d5e9b8e3
|
[
"MIT"
] | null | null | null |
mealpy/evolutionary_based/DE.py
|
Alhassan20/mealpy
|
7ed365c5c495ad1c1e066662c90159b3d5e9b8e3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# ------------------------------------------------------------------------------------------------------%
# Created by "Thieu Nguyen" at 09:48, 16/03/2020 %
# %
# Email: nguyenthieu2102@gmail.com %
# Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 %
# Github: https://github.com/thieu1995 %
#-------------------------------------------------------------------------------------------------------%
from numpy import where, sum, any, mean, array, clip, ones, abs
from numpy.random import uniform, choice, normal, randint, random, rand
from copy import deepcopy
from scipy.stats import cauchy
from mealpy.optimizer import Root
"""
BaseDE: - the very first DE algorithm (Novel mutation strategy for enhancing SHADE and LSHADE algorithms for global numerical optimization)
strategy = 0: DE/current-to-rand/1/bin
= 1: DE/best/1/bin
= 2: DE/best/2/bin
= 3: DE/rand/2/bin
= 4: DE/current-to-best/1/bin
= 5: DE/current-to-rand/1/bin
"""
class BaseDE(Root):
"""
The original version of: Differential Evolution (DE)
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
wf=0.8, cr=0.9, strategy=0, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.weighting_factor = wf
self.crossover_rate = cr
self.strategy = strategy
def _mutation__(self, current_pos, new_pos):
pos_new = where(uniform(0, 1, self.problem_size) < self.crossover_rate, current_pos, new_pos)
return self.amend_position_faster(pos_new)
def _create_children__(self, pop, g_best):
pop_child = deepcopy(pop)
if self.strategy == 0:
for i in range(0, self.pop_size):
# Choose 3 random element and different to i
idx_list = choice(list(set(range(0, self.pop_size)) - {i}), 3, replace=False)
pos_new = pop[idx_list[0]][self.ID_POS] + self.weighting_factor * (pop[idx_list[1]][self.ID_POS] - pop[idx_list[2]][self.ID_POS])
pos_new = self._mutation__(pop[i][self.ID_POS], pos_new)
fit = self.get_fitness_position(pos_new)
pop_child[i] = [pos_new, fit]
return pop_child
elif self.strategy == 1:
for i in range(0, self.pop_size):
idx_list = choice(list(set(range(0, self.pop_size)) - {i}), 2, replace=False)
pos_new = g_best[self.ID_POS] + self.weighting_factor * (pop[idx_list[0]][self.ID_POS] - pop[idx_list[1]][self.ID_POS])
pos_new = self._mutation__(pop[i][self.ID_POS], pos_new)
fit = self.get_fitness_position(pos_new)
pop_child[i] = [pos_new, fit]
return pop_child
elif self.strategy == 2:
for i in range(0, self.pop_size):
idx_list = choice(list(set(range(0, self.pop_size)) - {i}), 4, replace=False)
pos_new = g_best[self.ID_POS] + self.weighting_factor * (pop[idx_list[0]][self.ID_POS] - pop[idx_list[1]][self.ID_POS]) + \
self.weighting_factor * (pop[idx_list[2]][self.ID_POS] - pop[idx_list[3]][self.ID_POS])
pos_new = self._mutation__(pop[i][self.ID_POS], pos_new)
fit = self.get_fitness_position(pos_new)
pop_child[i] = [pos_new, fit]
return pop_child
elif self.strategy == 3:
for i in range(0, self.pop_size):
idx_list = choice(list(set(range(0, self.pop_size)) - {i}), 5, replace=False)
pos_new = pop[idx_list[0]][self.ID_POS] + self.weighting_factor * (pop[idx_list[1]][self.ID_POS] - pop[idx_list[2]][self.ID_POS]) + \
self.weighting_factor * (pop[idx_list[3]][self.ID_POS] - pop[idx_list[4]][self.ID_POS])
pos_new = self._mutation__(pop[i][self.ID_POS], pos_new)
fit = self.get_fitness_position(pos_new)
pop_child[i] = [pos_new, fit]
return pop_child
elif self.strategy == 4:
for i in range(0, self.pop_size):
idx_list = choice(list(set(range(0, self.pop_size)) - {i}), 2, replace=False)
pos_new = pop[i][self.ID_POS] + self.weighting_factor * (g_best[self.ID_POS] - pop[i][self.ID_POS]) + \
self.weighting_factor * (pop[idx_list[0]][self.ID_POS] - pop[idx_list[1]][self.ID_POS])
pos_new = self._mutation__(pop[i][self.ID_POS], pos_new)
fit = self.get_fitness_position(pos_new)
pop_child[i] = [pos_new, fit]
return pop_child
elif self.strategy == 5:
for i in range(0, self.pop_size):
idx_list = choice(list(set(range(0, self.pop_size)) - {i}), 3, replace=False)
pos_new = pop[i][self.ID_POS] + self.weighting_factor * (pop[idx_list[0]][self.ID_POS] - pop[i][self.ID_POS]) + \
self.weighting_factor * (pop[idx_list[1]][self.ID_POS] - pop[idx_list[2]][self.ID_POS])
pos_new = self._mutation__(pop[i][self.ID_POS], pos_new)
fit = self.get_fitness_position(pos_new)
pop_child[i] = [pos_new, fit]
return pop_child
### Survivor Selection
def _greedy_selection__(self, pop_old=None, pop_new=None):
pop = [pop_new[i] if pop_new[i][self.ID_FIT] < pop_old[i][self.ID_FIT] else pop_old[i] for i in range(self.pop_size)]
return pop
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
for epoch in range(self.epoch):
# create children
pop_child = self._create_children__(pop, g_best)
# create new pop by comparing fitness of corresponding each member in pop and children
pop = self._greedy_selection__(pop, pop_child)
# update global best position
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class JADE(Root):
"""
The original version of: Differential Evolution (JADE)
Link:
JADE: Adaptive Differential Evolution with Optional External Archive
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
miu_f=0.5, miu_cr=0.5, p=0.1, c=0.1, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.miu_f = miu_f # the initial f, location is changed then that f is good
self.miu_cr = miu_cr # the initial cr,
self.p = p # uniform(0.05, 0.2) # the x_best is select from the top 100p % solutions
self.c = c # uniform(1/20, 1/5) # the adaptation parameter control value of f and cr
### Survivor Selection
def lehmer_mean(self, list_objects):
return sum(list_objects**2) / sum(list_objects)
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
miu_cr = self.miu_cr
miu_f = self.miu_f
archive_pop = list()
for epoch in range(self.epoch):
list_f = list()
list_cr = list()
sorted_pop = sorted(pop, key=lambda x:x[self.ID_FIT])
for i in range(0, self.pop_size):
## Calculate adaptive parameter cr and f
cr = normal(miu_cr, 0.1)
cr = clip(cr, 0, 1)
while True:
f = cauchy.rvs(miu_f, 0.1)
if f < 0:
continue
elif f > 1:
f = 1
break
top = int(self.pop_size * self.p)
x_best = sorted_pop[randint(0, top)]
x_r1 = pop[choice(list(set(range(0, self.pop_size)) - {i}))]
new_pop = pop + archive_pop
while True:
x_r2 = new_pop[randint(0, len(new_pop))]
if any(x_r2[self.ID_POS] - x_r1[self.ID_POS]) and any(x_r2[self.ID_POS] - pop[i][self.ID_POS]):
break
x_new = pop[i][self.ID_POS] + f * (x_best[self.ID_POS] - pop[i][self.ID_POS]) + f * (x_r1[self.ID_POS] - x_r2[self.ID_POS])
pos_new = where(uniform(0, 1, self.problem_size) < cr, x_new, pop[i][self.ID_POS])
j_rand = randint(0, self.problem_size)
pos_new[j_rand] = x_new[j_rand]
fit_new = self.get_fitness_position(pos_new)
if fit_new < pop[i][self.ID_FIT]:
archive_pop.append(pop[i])
list_cr.append(cr)
list_f.append(f)
pop[i] = [pos_new, fit_new]
# Randomly remove solution
temp = len(archive_pop) - self.pop_size
if temp > 0:
idx_list = choice(range(0, len(archive_pop)), len(archive_pop) - self.pop_size, replace=False)
archive_pop_new = []
for idx, solution in enumerate(archive_pop):
if idx not in idx_list:
archive_pop_new.append(solution)
archive_pop = deepcopy(archive_pop_new)
# Update miu_cr and miu_f
miu_cr = (1 - self.c) * miu_cr + self.c * mean(array(list_cr))
miu_f = (1 - self.c) * miu_f + self.c * self.lehmer_mean(array(list_f))
# update global best position
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class SADE(Root):
"""
The original version of: Self-Adaptive Differential Evolution(SADE)
Link:
Self-adaptive differential evolution algorithm for numerical optimization
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
### Survivor Selection
def lehmer_mean(self, list_objects):
return sum(list_objects ** 2) / sum(list_objects)
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
list_cr = list()
loop_probability = 50
loop_cr = 5
ns1 = ns2 = nf1 = nf2 = 0
crm = 0.5
p1 = 0.5
for epoch in range(self.epoch):
for i in range(0, self.pop_size):
## Calculate adaptive parameter cr and f
cr = normal(crm, 0.1)
cr = clip(cr, 0, 1)
while True:
f = normal(0.5, 0.3)
if f < 0:
continue
elif f > 1:
f = 1
break
id1, id2, id3 = choice(list(set(range(0, self.pop_size)) - {i}), 3, replace=False)
if rand() < p1:
x_new = pop[id1][self.ID_POS] + f * (pop[id2][self.ID_POS] - pop[id3][self.ID_POS])
pos_new = where(uniform(0, 1, self.problem_size) < cr, x_new, pop[i][self.ID_POS])
j_rand = randint(0, self.problem_size)
pos_new[j_rand] = x_new[j_rand]
fit_new = self.get_fitness_position(pos_new)
if fit_new < pop[i][self.ID_FIT]:
ns1 += 1
pop[i] = [pos_new, fit_new]
list_cr.append(cr)
else:
nf1 += 1
else:
x_new = pop[i][self.ID_POS] + f * (g_best[self.ID_POS] - pop[i][self.ID_POS]) + f * (pop[id1][self.ID_POS] - pop[id2][self.ID_POS])
pos_new = where(uniform(0, 1, self.problem_size) < cr, x_new, pop[i][self.ID_POS])
j_rand = randint(0, self.problem_size)
pos_new[j_rand] = x_new[j_rand]
fit_new = self.get_fitness_position(pos_new)
if fit_new < pop[i][self.ID_FIT]:
ns2 += 1
pop[i] = [pos_new, fit_new]
list_cr.append(cr)
else:
nf2 += 1
# Update cr and p1
if (epoch + 1) / loop_cr == 0:
crm = mean(list_cr)
list_cr = list()
if (epoch + 1) / loop_probability == 0:
p1 = ns1 * (ns2 + nf2) / (ns2 * (ns1 + nf1) + ns1 * (ns2 + nf2))
ns1 = ns2 = nf1 = nf2 = 0
# update global best position
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class SHADE(Root):
"""
The original version of: Success-History Adaptation Differential Evolution (SHADE)
Link:
Success-History Based Parameter Adaptation for Differential Evolution
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
miu_f=0.5, miu_cr=0.5, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.miu_f = miu_f # list the initial f,
self.miu_cr = miu_cr # list the initial cr,
### Survivor Selection
def weighted_lehmer_mean(self, list_objects, list_weights):
up = list_weights * list_objects**2
down = list_weights * list_objects
return sum(up) / sum(down)
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
miu_cr = self.miu_cr * ones(self.pop_size)
miu_f = self.miu_f * ones(self.pop_size)
archive_pop = list()
k = 0
for epoch in range(self.epoch):
list_f = list()
list_cr = list()
list_f_index = list()
list_cr_index = list()
list_f_new = ones(self.pop_size)
list_cr_new = ones(self.pop_size)
pop_new = deepcopy(pop) # Save all new elements --> Use to update the list_cr and list_f
pop_old = deepcopy(pop) # Save all old elements --> Use to update cr value
sorted_pop = sorted(pop, key=lambda x: x[self.ID_FIT])
for i in range(0, self.pop_size):
## Calculate adaptive parameter cr and f
idx_rand = randint(0, self.pop_size)
cr = normal(miu_cr[idx_rand], 0.1)
cr = clip(cr, 0, 1)
while True:
f = cauchy.rvs(miu_f[idx_rand], 0.1)
if f < 0:
continue
elif f > 1:
f = 1
break
list_cr_new[i] = cr
list_f_new[i] = f
p = uniform(2/self.pop_size, 0.2)
top = int(self.pop_size * p)
x_best = sorted_pop[randint(0, top)]
x_r1 = pop[choice(list(set(range(0, self.pop_size)) - {i}))]
new_pop = pop + archive_pop
while True:
x_r2 = new_pop[randint(0, len(new_pop))]
if any(x_r2[self.ID_POS] - x_r1[self.ID_POS]) and any(x_r2[self.ID_POS] - pop[i][self.ID_POS]):
break
x_new = pop[i][self.ID_POS] + f * (x_best[self.ID_POS] - pop[i][self.ID_POS]) + f * (x_r1[self.ID_POS] - x_r2[self.ID_POS])
pos_new = where(uniform(0, 1, self.problem_size) < cr, x_new, pop[i][self.ID_POS])
j_rand = randint(0, self.problem_size)
pos_new[j_rand] = x_new[j_rand]
fit_new = self.get_fitness_position(pos_new)
pop_new[i] = [pos_new, fit_new]
for i in range(0, self.pop_size):
if pop_new[i][self.ID_FIT] < pop[i][self.ID_FIT]:
list_cr.append(list_cr_new[i])
list_f.append(list_f_new[i])
list_f_index.append(i)
list_cr_index.append(i)
pop[i] = pop_new[i]
archive_pop.append(deepcopy(pop[i]))
# Randomly remove solution
temp = len(archive_pop) - self.pop_size
if temp > 0:
idx_list = choice(range(0, len(archive_pop)), len(archive_pop) - self.pop_size, replace=False)
archive_pop_new = []
for idx, solution in enumerate(archive_pop):
if idx not in idx_list:
archive_pop_new.append(solution)
archive_pop = deepcopy(archive_pop_new)
# Update miu_cr and miu_f
if len(list_f) != 0 and len(list_cr) != 0:
# Eq.13, 14, 10
list_fit_old = ones(len(list_cr_index))
list_fit_new = ones(len(list_cr_index))
idx_increase = 0
for i in range(0, self.pop_size):
if i in list_cr_index:
list_fit_old[idx_increase] = pop_old[i][self.ID_FIT]
list_fit_new[idx_increase] = pop_new[i][self.ID_FIT]
idx_increase += 1
list_weights = abs(list_fit_new - list_fit_old) / sum(abs(list_fit_new - list_fit_old))
miu_cr[k] = sum(list_weights * array(list_cr))
miu_f[k] = self.weighted_lehmer_mean(array(list_f), list_weights)
k += 1
if k >= self.pop_size:
k = 0
# update global best position
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class L_SHADE(Root):
"""
The original version of: Linear Population Size Reduction Success-History Adaptation Differential Evolution (LSHADE)
Link:
Improving the Search Performance of SHADE Using Linear Population Size Reduction
"""
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100, miu_f=0.5, miu_cr=0.5, **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.miu_f = miu_f # list the initial f,
self.miu_cr = miu_cr # list the initial cr,
self.n_min = int(pop_size/5)
### Survivor Selection
def weighted_lehmer_mean(self, list_objects, list_weights):
up = list_weights * list_objects ** 2
down = list_weights * list_objects
return sum(up) / sum(down)
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
miu_cr = self.miu_cr * ones(self.pop_size)
miu_f = self.miu_f * ones(self.pop_size)
archive_pop = list()
k = 0
pop_size = self.pop_size
for epoch in range(self.epoch):
list_f = list()
list_cr = list()
list_f_index = list()
list_cr_index = list()
list_f_new = ones(pop_size)
list_cr_new = ones(pop_size)
pop_new = deepcopy(pop) # Save all new elements --> Use to update the list_cr and list_f
pop_old = deepcopy(pop) # Save all old elements --> Use to update cr value
sorted_pop = sorted(pop, key=lambda x: x[self.ID_FIT])
for i in range(0, pop_size):
## Calculate adaptive parameter cr and f
idx_rand = randint(0, pop_size)
cr = normal(miu_cr[idx_rand], 0.1)
cr = clip(cr, 0, 1)
while True:
f = cauchy.rvs(miu_f[idx_rand], 0.1)
if f < 0:
continue
elif f > 1:
f = 1
break
list_cr_new[i] = cr
list_f_new[i] = f
p = uniform(0.15, 0.2)
top = int(pop_size * p)
x_best = sorted_pop[randint(0, top)]
x_r1 = pop[choice(list(set(range(0, pop_size)) - {i}))]
new_pop = pop + archive_pop
while True:
x_r2 = new_pop[randint(0, len(new_pop))]
if any(x_r2[self.ID_POS] - x_r1[self.ID_POS]) and any(x_r2[self.ID_POS] - pop[i][self.ID_POS]):
break
x_new = pop[i][self.ID_POS] + f * (x_best[self.ID_POS] - pop[i][self.ID_POS]) + f * (x_r1[self.ID_POS] - x_r2[self.ID_POS])
pos_new = where(uniform(0, 1, self.problem_size) < cr, x_new, pop[i][self.ID_POS])
j_rand = randint(0, self.problem_size)
pos_new[j_rand] = x_new[j_rand]
fit_new = self.get_fitness_position(pos_new)
pop_new[i] = [pos_new, fit_new]
for i in range(0, pop_size):
if pop_new[i][self.ID_FIT] < pop[i][self.ID_FIT]:
list_cr.append(list_cr_new[i])
list_f.append(list_f_new[i])
list_f_index.append(i)
list_cr_index.append(i)
pop[i] = pop_new[i]
archive_pop.append(deepcopy(pop[i]))
# Randomly remove solution
temp = len(archive_pop) - pop_size
if temp > 0:
idx_list = choice(range(0, len(archive_pop)), len(archive_pop) - pop_size, replace=False)
archive_pop_new = []
for idx, solution in enumerate(archive_pop):
if idx not in idx_list:
archive_pop_new.append(solution)
archive_pop = deepcopy(archive_pop_new)
# Update miu_cr and miu_f
if len(list_f) != 0 and len(list_cr) != 0:
# Eq.13, 14, 10
list_fit_old = ones(len(list_cr_index))
list_fit_new = ones(len(list_cr_index))
idx_increase = 0
for i in range(0, pop_size):
if i in list_cr_index:
list_fit_old[idx_increase] = pop_old[i][self.ID_FIT]
list_fit_new[idx_increase] = pop_new[i][self.ID_FIT]
idx_increase += 1
list_weights = abs(list_fit_new - list_fit_old) / sum(abs(list_fit_new - list_fit_old))
miu_cr[k] = sum(list_weights * array(list_cr))
miu_f[k] = self.weighted_lehmer_mean(array(list_f), list_weights)
k += 1
if k >= pop_size:
k = 0
# Linear Population Size Reduction
pop_size = round(self.pop_size + epoch * ((self.n_min - self.pop_size)/self.epoch))
# update global best position
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
class SAP_DE(Root):
"""
The original version of: Differential Evolution with Self-Adaptive Populations
Link:
Exploring dynamic self-adaptive populations in differential evolution
"""
ID_CR = 2
ID_MR = 3
ID_PS = 4
def __init__(self, obj_func=None, lb=None, ub=None, verbose=True, epoch=750, pop_size=100,
wf=0.8, cr=0.9, F=1, branch="ABS", **kwargs):
super().__init__(obj_func, lb, ub, verbose, kwargs)
self.epoch = epoch
self.pop_size = pop_size
self.weighting_factor = wf
self.crossover_rate = cr
self.F = F
self.M = pop_size
self.branch = branch # absolute (ABS) or relative (REL)
def create_solution(self, minmax=0):
position = uniform(self.lb, self.ub)
fitness = self.get_fitness_position(position=position, minmax=minmax)
crossover_rate = uniform(0, 1)
mutation_rate = uniform(0, 1)
if self.branch == "ABS":
pop_size = int(10 * self.problem_size + normal(0, 1))
elif self.branch == "REL":
pop_size = int(10 * self.problem_size + uniform(-0.5, 0.5))
return [position, fitness, crossover_rate, mutation_rate, pop_size]
def edit_to_range(self, var=None, lower=0, upper=1, func_value=None):
while var <= lower or var >= upper:
if var <= lower:
var += func_value()
if var >= upper:
var -= func_value()
return var
def train(self):
pop = [self.create_solution() for _ in range(self.pop_size)]
g_best = self.get_global_best_solution(pop=pop, id_fit=self.ID_FIT, id_best=self.ID_MIN_PROB)
m_new = self.pop_size
for epoch in range(self.epoch):
for i in range(0, self.pop_size):
### create children
# Choose 3 random element and different to i
idxs = choice(list(set(range(0, self.pop_size)) - {i}), 3, replace=False)
j = randint(0, self.pop_size)
self.F = uniform(0, 1)
sol_new = deepcopy(pop[idxs[0]])
## Crossover
if uniform(0, 1) < pop[i][self.ID_CR] or i == j:
pos_new = pop[idxs[0]][self.ID_POS] + self.F * (pop[idxs[1]][self.ID_POS] - pop[idxs[2]][self.ID_POS])
cr_new = pop[idxs[0]][self.ID_CR] + self.F * (pop[idxs[1]][self.ID_CR] - pop[idxs[2]][self.ID_CR])
mr_new = pop[idxs[0]][self.ID_MR] + self.F * (pop[idxs[1]][self.ID_MR] - pop[idxs[2]][self.ID_MR])
if self.branch == "ABS":
ps_new = pop[idxs[0]][self.ID_PS] + int(self.F * (pop[idxs[1]][self.ID_PS] - pop[idxs[2]][self.ID_PS]))
elif self.branch == "REL":
ps_new = pop[idxs[0]][self.ID_PS] + self.F * (pop[idxs[1]][self.ID_PS] - pop[idxs[2]][self.ID_PS])
pos_new = self.amend_position_faster(pos_new)
fit_new = self.get_fitness_position(pos_new)
cr_new = self.edit_to_range(cr_new, 0, 1, random)
mr_new = self.edit_to_range(mr_new, 0, 1, random)
sol_new = [pos_new, fit_new, cr_new, mr_new, ps_new]
## Mutation
if uniform(0, 1) < pop[idxs[0]][self.ID_MR]:
pos_new = pop[i][self.ID_POS] + normal(0, pop[idxs[0]][self.ID_MR])
cr_new = normal(0, 1)
mr_new = normal(0, 1)
if self.branch == "ABS":
ps_new = pop[i][self.ID_PS] + int(normal(0.5, 1))
elif self.branch == "REL":
ps_new = pop[i][self.ID_PS] + normal(0, pop[idxs[0]][self.ID_MR])
pos_new = self.amend_position_faster(pos_new)
fit_new = self.get_fitness_position(pos_new)
sol_new = [pos_new, fit_new, cr_new, mr_new, ps_new]
pop[i] = deepcopy(sol_new)
# Calculate new population size
total = sum([pop[i][self.ID_PS] for i in range(0, self.pop_size)])
if self.branch == "ABS":
m_new = int(total / self.pop_size)
elif self.branch == "REL":
m_new = int(self.pop_size + total)
if m_new <= 4:
m_new = self.M + int(uniform(0, 4))
elif m_new > 4 * self.M:
m_new = self.M - int(uniform(0, 4))
## Change population by population size
if m_new <= self.pop_size:
pop = pop[:m_new]
else:
pop_sorted = sorted(pop, key=lambda x: x[self.ID_FIT])
best = deepcopy(pop_sorted[0])
pop_best = [best for i in range(0, m_new - self.pop_size)]
pop = pop + pop_best
self.pop_size = m_new
# update global best position
g_best = self.update_global_best_solution(pop, self.ID_MIN_PROB, g_best)
self.loss_train.append(g_best[self.ID_FIT])
if self.verbose:
print("> Epoch: {}, Best fit: {}".format(epoch + 1, g_best[self.ID_FIT]))
self.solution = g_best
return g_best[self.ID_POS], g_best[self.ID_FIT], self.loss_train
| 47.317337
| 151
| 0.534007
| 4,254
| 30,567
| 3.582511
| 0.063
| 0.06063
| 0.048425
| 0.023622
| 0.809186
| 0.777428
| 0.757743
| 0.729528
| 0.712402
| 0.701509
| 0
| 0.021223
| 0.346419
| 30,567
| 645
| 152
| 47.390698
| 0.741616
| 0.101351
| 0
| 0.70101
| 0
| 0
| 0.006601
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042424
| false
| 0
| 0.010101
| 0.00404
| 0.111111
| 0.012121
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b121fe5d1467093d1754046eb9017faad57cc1ef
| 116
|
py
|
Python
|
opengnn/__init__.py
|
css518/Keywords-Guided-Method-Name-Generation
|
2b361bb26fc74b64e92feb30776a0a92f278fb98
|
[
"MIT"
] | 5
|
2021-04-13T03:01:51.000Z
|
2021-09-11T09:08:49.000Z
|
opengnn/__init__.py
|
css518/Keywords-Guided-Method-Name-Generation
|
2b361bb26fc74b64e92feb30776a0a92f278fb98
|
[
"MIT"
] | null | null | null |
opengnn/__init__.py
|
css518/Keywords-Guided-Method-Name-Generation
|
2b361bb26fc74b64e92feb30776a0a92f278fb98
|
[
"MIT"
] | null | null | null |
from opengnn import decoders
from opengnn import encoders
from opengnn import inputters
from opengnn import models
| 19.333333
| 29
| 0.853448
| 16
| 116
| 6.1875
| 0.4375
| 0.444444
| 0.686869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 116
| 5
| 30
| 23.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
492f65d9062813dff0e00114411b518796045dfe
| 10,953
|
py
|
Python
|
Tools/pybench/Strings.py
|
sireliah/polish-python
|
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
|
[
"PSF-2.0"
] | 1
|
2018-06-21T18:21:24.000Z
|
2018-06-21T18:21:24.000Z
|
Tools/pybench/Strings.py
|
sireliah/polish-python
|
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
|
[
"PSF-2.0"
] | null | null | null |
Tools/pybench/Strings.py
|
sireliah/polish-python
|
605df4944c2d3bc25f8bf6964b274c0a0d297cc3
|
[
"PSF-2.0"
] | null | null | null |
z pybench zaimportuj Test
zaimportuj sys
spróbuj:
intern
wyjąwszy NameError:
intern = sys.intern
klasa ConcatStrings(Test):
version = 2.0
operations = 10 * 5
rounds = 100000
def test(self):
# Make sure the strings are *not* interned
s = ''.join(map(str,range(100)))
t = ''.join(map(str,range(1,101)))
dla i w range(self.rounds):
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
def calibrate(self):
s = ''.join(map(str,range(100)))
t = ''.join(map(str,range(1,101)))
dla i w range(self.rounds):
dalej
klasa CompareStrings(Test):
version = 2.0
operations = 10 * 5
rounds = 200000
def test(self):
# Make sure the strings are *not* interned
s = ''.join(map(str,range(10)))
t = ''.join(map(str,range(10))) + "abc"
dla i w range(self.rounds):
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
def calibrate(self):
s = ''.join(map(str,range(10)))
t = ''.join(map(str,range(10))) + "abc"
dla i w range(self.rounds):
dalej
klasa CompareInternedStrings(Test):
version = 2.0
operations = 10 * 5
rounds = 300000
def test(self):
# Make sure the strings *are* interned
s = intern(''.join(map(str,range(10))))
t = s
dla i w range(self.rounds):
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
def calibrate(self):
s = intern(''.join(map(str,range(10))))
t = s
dla i w range(self.rounds):
dalej
klasa CreateStringsWithConcat(Test):
version = 2.0
operations = 10 * 5
rounds = 200000
def test(self):
dla i w range(self.rounds):
s = 'om'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
def calibrate(self):
dla i w range(self.rounds):
dalej
klasa StringSlicing(Test):
version = 2.0
operations = 5 * 7
rounds = 160000
def test(self):
s = ''.join(map(str,range(100)))
dla i w range(self.rounds):
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
def calibrate(self):
s = ''.join(map(str,range(100)))
dla i w range(self.rounds):
dalej
### String methods
jeżeli hasattr('', 'lower'):
klasa StringMappings(Test):
version = 2.0
operations = 3 * (5 + 4 + 2 + 1)
rounds = 70000
def test(self):
s = ''.join(map(chr,range(20)))
t = ''.join(map(chr,range(50)))
u = ''.join(map(chr,range(100)))
v = ''.join(map(chr,range(256)))
dla i w range(self.rounds):
s.lower()
s.lower()
s.lower()
s.lower()
s.lower()
s.upper()
s.upper()
s.upper()
s.upper()
s.upper()
s.title()
s.title()
s.title()
s.title()
s.title()
t.lower()
t.lower()
t.lower()
t.lower()
t.upper()
t.upper()
t.upper()
t.upper()
t.title()
t.title()
t.title()
t.title()
u.lower()
u.lower()
u.upper()
u.upper()
u.title()
u.title()
v.lower()
v.upper()
v.title()
def calibrate(self):
s = ''.join(map(chr,range(20)))
t = ''.join(map(chr,range(50)))
u = ''.join(map(chr,range(100)))
v = ''.join(map(chr,range(256)))
dla i w range(self.rounds):
dalej
klasa StringPredicates(Test):
version = 2.0
operations = 10 * 7
rounds = 100000
def test(self):
data = ('abc', '123', ' ', '\xe4\xf6\xfc', '\xdf'*10)
len_data = len(data)
dla i w range(self.rounds):
s = data[i % len_data]
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
def calibrate(self):
data = ('abc', '123', ' ', '\u1234\u2345\u3456', '\uFFFF'*10)
data = ('abc', '123', ' ', '\xe4\xf6\xfc', '\xdf'*10)
len_data = len(data)
dla i w range(self.rounds):
s = data[i % len_data]
| 19.249561
| 75
| 0.281658
| 1,163
| 10,953
| 2.649183
| 0.082545
| 0.098669
| 0.143135
| 0.190847
| 0.871146
| 0.846803
| 0.822785
| 0.797144
| 0.733853
| 0.710159
| 0
| 0.053361
| 0.597918
| 10,953
| 568
| 76
| 19.283451
| 0.646231
| 0.012143
| 0
| 0.943052
| 0
| 0
| 0.022473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.004556
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
49644b4cf687bc4e7ef66aa8a6f99abfd014ece7
| 2,514
|
py
|
Python
|
test/test_utils.py
|
Debilski/pelita
|
192934d63d7337ffd2e167db8ce27b3dfd8a545d
|
[
"BSD-2-Clause"
] | null | null | null |
test/test_utils.py
|
Debilski/pelita
|
192934d63d7337ffd2e167db8ce27b3dfd8a545d
|
[
"BSD-2-Clause"
] | null | null | null |
test/test_utils.py
|
Debilski/pelita
|
192934d63d7337ffd2e167db8ce27b3dfd8a545d
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from textwrap import dedent
from pelita import utils
@pytest.mark.parametrize('is_blue', [True, False])
def test_setup_test_game(is_blue):
layout = utils.load_builtin_layout('small_001', is_blue=is_blue)
test_game = utils.setup_test_game(layout=layout, is_blue=is_blue)
if is_blue:
assert test_game.position == (1, 5)
assert test_game.other.position == (1, 6)
assert test_game.enemy[0].position == (16, 1)
assert test_game.enemy[1].position == (16, 2)
else:
assert test_game.position == (16, 2)
assert test_game.other.position == (16, 1)
assert test_game.enemy[0].position == (1, 5)
assert test_game.enemy[1].position == (1, 6)
# load_builtin_layout loads unnoised enemies
assert test_game.enemy[0].is_noisy is False
assert test_game.enemy[1].is_noisy is False
@pytest.mark.parametrize('is_blue', [True, False])
def test_setup_test_game(is_blue):
# Test that is_noisy is set properly
layout = """
##################
#. ... .##. y#
# # # . .### # #
# # ##. x . #
# . .## # #
#a# ###. . # # #
#b .##. ... .#
##################
"""
test_game = utils.setup_test_game(layout=layout, is_blue=is_blue, is_noisy={"a":False, "b":True, "x":False, "y":True})
if is_blue:
assert test_game.position == (1, 5)
assert test_game.other.position == (1, 6)
assert test_game.enemy[0].position == (8, 3)
assert test_game.enemy[1].position == (16, 1)
else:
assert test_game.position == (8, 3)
assert test_game.other.position == (16, 1)
assert test_game.enemy[0].position == (1, 5)
assert test_game.enemy[1].position == (1, 6)
# load_builtin_layout loads unnoised enemies
assert test_game.enemy[0].is_noisy is False
assert test_game.enemy[1].is_noisy is True
@pytest.mark.parametrize('is_blue', [True, False])
def test_setup_test_game_incomplete_noisy_dict(is_blue):
# Test that is_noisy is set properly
layout = """
##################
#. ... .##. y#
# # # . .### # #
# # ##. x . #
# . .## # #
#a# ###. . # # #
#b .##. ... .#
##################
"""
test_game = utils.setup_test_game(layout=layout, is_blue=is_blue, is_noisy={"b":True, "y":True})
# load_builtin_layout loads unnoised enemies
assert test_game.enemy[0].is_noisy is False
assert test_game.enemy[1].is_noisy is True
| 32.230769
| 122
| 0.578759
| 338
| 2,514
| 4.073965
| 0.14497
| 0.180102
| 0.223675
| 0.193174
| 0.879448
| 0.847495
| 0.83878
| 0.793028
| 0.793028
| 0.793028
| 0
| 0.028841
| 0.241448
| 2,514
| 77
| 123
| 32.649351
| 0.693235
| 0.078759
| 0
| 0.745763
| 0
| 0
| 0.179376
| 0
| 0
| 0
| 0
| 0
| 0.372881
| 1
| 0.050847
| false
| 0
| 0.050847
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b8ed537b66d95b25046f4de8b2b50a81ef10f851
| 12,010
|
py
|
Python
|
tests/test_network.py
|
eclissi91/curio
|
09dd8a20142b6210fe18f676a746918601c6092e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_network.py
|
eclissi91/curio
|
09dd8a20142b6210fe18f676a746918601c6092e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_network.py
|
eclissi91/curio
|
09dd8a20142b6210fe18f676a746918601c6092e
|
[
"BSD-3-Clause"
] | null | null | null |
# test_network.py
from os.path import dirname, join
import sys
import os
import ssl
from functools import partial
import pytest
from curio import *
from curio import network
from curio import ssl as curiossl
from curio.socket import *
def test_tcp_echo(kernel):
results = []
async def handler(client, addr):
results.append('handler start')
while True:
results.append('recv wait')
data = await client.recv(100)
if not data:
break
results.append(('handler', data))
await client.sendall(data)
results.append('handler done')
async def client(address, serv):
results.append('client start')
sock = socket(AF_INET, SOCK_STREAM)
await sock.connect(address)
await sock.send(b'Msg1')
await sleep(0.1)
resp = await sock.recv(100)
results.append(('client', resp))
await sock.send(b'Msg2')
await sleep(0.1)
resp = await sock.recv(100)
results.append(('client', resp))
results.append('client close')
await sock.close()
await serv.cancel()
async def main():
async with TaskGroup() as g:
serv = await g.spawn(tcp_server, '', 25000, handler)
await g.spawn(client, ('localhost', 25000), serv)
kernel.run(main())
assert results == [
'client start',
'handler start',
'recv wait',
('handler', b'Msg1'),
'recv wait',
('client', b'Msg1'),
('handler', b'Msg2'),
'recv wait',
('client', b'Msg2'),
'client close',
'handler done'
]
if not sys.platform.startswith('win'):
def test_unix_echo(kernel):
results = []
async def handler(client, addr):
results.append('handler start')
while True:
results.append('recv wait')
data = await client.recv(100)
if not data:
break
results.append(('handler', data))
await client.sendall(data)
results.append('handler done')
async def client(address, serv):
results.append('client start')
sock = await network.open_unix_connection(address)
await sock.send(b'Msg1')
await sleep(0.1)
resp = await sock.recv(100)
results.append(('client', resp))
await sock.send(b'Msg2')
await sleep(0.1)
resp = await sock.recv(100)
results.append(('client', resp))
results.append('client close')
await sock.close()
await serv.cancel()
async def main():
try:
os.remove('/tmp/curionet')
except OSError:
pass
async with TaskGroup() as g:
serv = await g.spawn(unix_server, '/tmp/curionet', handler)
await g.spawn(client, '/tmp/curionet', serv)
kernel.run(main())
assert results == [
'client start',
'handler start',
'recv wait',
('handler', b'Msg1'),
'recv wait',
('client', b'Msg1'),
('handler', b'Msg2'),
'recv wait',
('client', b'Msg2'),
'client close',
'handler done'
]
def test_ssl_server(kernel):
async def client(host, port, context):
sock = await network.open_connection(host, port, ssl=context, server_hostname=host)
await sock.sendall(b'Hello, world!')
resp = await sock.recv(4096)
return resp
async def handler(client_sock, addr):
data = await client_sock.recv(1000)
assert data == b'Hello, world!'
await client_sock.send(b'Back atcha: ' + data)
async def main():
# It might be desirable to move these out of the examples
# directory, as this test are now relying on them being around
file_path = join(dirname(dirname(__file__)), 'examples')
cert_file = join(file_path, 'ssl_test.crt')
key_file = join(file_path, 'ssl_test_rsa')
server_context = curiossl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_context.load_cert_chain(certfile=cert_file, keyfile=key_file)
stdlib_client_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
curio_client_context = curiossl.create_default_context(ssl.Purpose.SERVER_AUTH)
server_task = await spawn(partial(network.tcp_server, '', 10000, handler, ssl=server_context))
await sleep(0.1)
for test_context in (curio_client_context, stdlib_client_context):
test_context.check_hostname = False
test_context.verify_mode = ssl.CERT_NONE
resp = await client('localhost', 10000, test_context)
assert resp == b'Back atcha: Hello, world!'
await server_task.cancel()
kernel.run(main())
if not sys.platform.startswith('win'):
def test_unix_ssl_server(kernel):
async def client(address, context):
sock = await network.open_unix_connection(address, ssl=context)
await sock.sendall(b'Hello, world!')
resp = await sock.recv(4096)
return resp
async def handler(client_sock, addr):
data = await client_sock.recv(1000)
assert data == b'Hello, world!'
await client_sock.send(b'Back atcha: ' + data)
async def main():
# It might be desirable to move these out of the examples
# directory, as this test are now relying on them being around
file_path = join(dirname(dirname(__file__)), 'examples')
cert_file = join(file_path, 'ssl_test.crt')
key_file = join(file_path, 'ssl_test_rsa')
server_context = curiossl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_context.load_cert_chain(certfile=cert_file, keyfile=key_file)
stdlib_client_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
curio_client_context = curiossl.create_default_context(ssl.Purpose.SERVER_AUTH)
try:
os.remove('/tmp/curionet')
except OSError:
pass
server_task = await spawn(partial(network.unix_server, '/tmp/curionet', handler, ssl=server_context))
await sleep(0.1)
for test_context in (curio_client_context, stdlib_client_context):
test_context.check_hostname = False
test_context.verify_mode = ssl.CERT_NONE
resp = await client('/tmp/curionet', test_context)
assert resp == b'Back atcha: Hello, world!'
await server_task.cancel()
kernel.run(main())
def test_ssl_wrapping(kernel):
async def client(host, port, context):
sock = await network.open_connection(host, port, ssl=context, server_hostname=host)
await sock.sendall(b'Hello, world!')
resp = await sock.recv(4096)
return resp
async def handler(client_sock, addr):
data = await client_sock.recv(1000)
assert data == b'Hello, world!'
await client_sock.send(b'Back atcha: ' + data)
def server(host, port, context):
sock = socket(AF_INET, SOCK_STREAM)
try:
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, True)
sock.bind((host, port))
sock.listen(5)
return network.run_server(sock, handler, context)
except Exception:
sock._socket.close()
raise
async def main():
# It might be desirable to move these out of the examples
# directory, as this test are now relying on them being around
file_path = join(dirname(dirname(__file__)), 'examples')
cert_file = join(file_path, 'ssl_test.crt')
key_file = join(file_path, 'ssl_test_rsa')
server_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_context.load_cert_chain(certfile=cert_file, keyfile=key_file)
stdlib_client_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
curio_client_context = curiossl.create_default_context(ssl.Purpose.SERVER_AUTH)
server_task = await spawn(server, 'localhost', 10000, server_context)
for test_context in (curio_client_context, stdlib_client_context):
test_context.check_hostname = False
test_context.verify_mode = ssl.CERT_NONE
resp = await client('localhost', 10000, test_context)
assert resp == b'Back atcha: Hello, world!'
await server_task.cancel()
kernel.run(main())
@pytest.mark.internet
def test_ssl_outgoing(kernel):
async def main():
c = await network.open_connection('google.com', 443, ssl=True, server_hostname='google.com')
await c.close()
c = await network.open_connection('google.com', 443, ssl=True)
await c.close()
c = await network.open_connection('google.com', 443, ssl=True, alpn_protocols=['h2'])
await c.close()
kernel.run(main)
def test_ssl_manual_wrapping(kernel):
async def client(host, port, context):
sock = socket(AF_INET, SOCK_STREAM)
await sock.connect((host, port))
ssl_sock = await context.wrap_socket(sock, server_hostname=host)
await ssl_sock.sendall(b'Hello, world!')
resp = await ssl_sock.recv(4096)
return resp
async def handler(client_sock, addr):
data = await client_sock.recv(1000)
assert data == b'Hello, world!'
await client_sock.send(b'Back atcha: ' + data)
def server(host, port, context):
sock = socket(AF_INET, SOCK_STREAM)
try:
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, True)
sock.bind((host, port))
sock.listen(5)
return network.run_server(sock, handler, context)
except Exception:
sock._socket.close()
raise
async def main():
# It might be desirable to move these out of the examples
# directory, as this test are now relying on them being around
file_path = join(dirname(dirname(__file__)), 'examples')
cert_file = join(file_path, 'ssl_test.crt')
key_file = join(file_path, 'ssl_test_rsa')
server_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
server_context.load_cert_chain(certfile=cert_file, keyfile=key_file)
curio_client_context = curiossl.create_default_context(ssl.Purpose.SERVER_AUTH)
server_task = await spawn(server, 'localhost', 10000, server_context)
curio_client_context.check_hostname = False
curio_client_context.verify_mode = ssl.CERT_NONE
resp = await client('localhost', 10000, curio_client_context)
assert resp == b'Back atcha: Hello, world!'
await server_task.cancel()
kernel.run(main())
@pytest.mark.internet
def test_errors(kernel):
async def main():
with pytest.raises(ValueError):
c = await network.open_connection('google.com', 443, server_hostname='google.com')
await c.close()
with pytest.raises(Exception):
c = await network.open_connection('google.com', 443, ssl=True, server_hostname='yahoo.com')
await c.close()
with pytest.raises(ValueError):
await network.tcp_server('localhost', 25000, None, ssl=True)
if not sys.platform.startswith('win'):
with pytest.raises(OSError):
await network.tcp_server('localhost', 80, None)
with pytest.raises(OSError):
await network.unix_server('/tmp', None)
with pytest.raises(ValueError):
c = await network.open_unix_connection('/tmp/curionet', server_hostname='google.com')
await c.close()
kernel.run(main)
| 34.912791
| 113
| 0.611157
| 1,460
| 12,010
| 4.855479
| 0.110959
| 0.02257
| 0.031034
| 0.035689
| 0.92044
| 0.898011
| 0.863874
| 0.832134
| 0.805896
| 0.765975
| 0
| 0.016187
| 0.285012
| 12,010
| 343
| 114
| 35.014577
| 0.809363
| 0.040216
| 0
| 0.818868
| 0
| 0
| 0.091438
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 1
| 0.037736
| false
| 0.007547
| 0.037736
| 0
| 0.098113
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
773df4423b6d29f946978a70e942744697ac7fb6
| 125
|
py
|
Python
|
src/view/scenes/__init__.py
|
ArcosJuan/Get-out-of-my-fucking-maze
|
ca2cfeaaeecb6c6f583ad647d020f25176170805
|
[
"MIT"
] | 2
|
2021-09-09T14:03:40.000Z
|
2021-11-03T03:35:55.000Z
|
src/view/scenes/__init__.py
|
ArcosJuan/Get-out-of-my-fucking-maze
|
ca2cfeaaeecb6c6f583ad647d020f25176170805
|
[
"MIT"
] | null | null | null |
src/view/scenes/__init__.py
|
ArcosJuan/Get-out-of-my-fucking-maze
|
ca2cfeaaeecb6c6f583ad647d020f25176170805
|
[
"MIT"
] | null | null | null |
from src.view.scenes.scene import Scene
from src.view.scenes.main_menu import MainMenu
from src.view.scenes.game import Game
| 31.25
| 46
| 0.832
| 22
| 125
| 4.681818
| 0.454545
| 0.203884
| 0.320388
| 0.495146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096
| 125
| 3
| 47
| 41.666667
| 0.911504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
77508dd2aeb0c35af663e0129bb4b22cb9713ce8
| 6,235
|
py
|
Python
|
example/theme_example.py
|
lakdred/pyecharts
|
02050acb0e94bb9453b88a25028de7a0ce23f125
|
[
"MIT"
] | 1
|
2019-06-29T09:37:45.000Z
|
2019-06-29T09:37:45.000Z
|
example/theme_example.py
|
lakdred/pyecharts
|
02050acb0e94bb9453b88a25028de7a0ce23f125
|
[
"MIT"
] | null | null | null |
example/theme_example.py
|
lakdred/pyecharts
|
02050acb0e94bb9453b88a25028de7a0ce23f125
|
[
"MIT"
] | 1
|
2021-01-18T10:17:01.000Z
|
2021-01-18T10:17:01.000Z
|
# coding=utf-8
from example.commons import Collector, Faker
from pyecharts import options as opts
from pyecharts.charts import Bar, Page
from pyecharts.globals import ThemeType
C = Collector()
@C.funcs
def theme_default() -> Bar:
c = (
Bar()
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-default"))
)
return c
@C.funcs
def theme_light() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.LIGHT))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-light"))
)
return c
@C.funcs
def theme_dark() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.DARK))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-dark"))
)
return c
@C.funcs
def theme_chalk() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.CHALK))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-chalk"))
)
return c
@C.funcs
def theme_essos() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.ESSOS))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-essos"))
)
return c
@C.funcs
def theme_infographic() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.INFOGRAPHIC))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-infographic"))
)
return c
@C.funcs
def theme_macarons() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.MACARONS))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-macarons"))
)
return c
@C.funcs
def theme_purple_passion() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.PURPLE_PASSION))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-purple-passion"))
)
return c
@C.funcs
def theme_roma() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.ROMA))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-roma"))
)
return c
@C.funcs
def theme_romantic() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.ROMANTIC))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-romantic"))
)
return c
@C.funcs
def theme_shine() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.SHINE))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-shine"))
)
return c
@C.funcs
def theme_vintage() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.VINTAGE))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-vintage"))
)
return c
@C.funcs
def theme_walden() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.WALDEN))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-walden"))
)
return c
@C.funcs
def theme_westeros() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.WESTEROS))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-westeros"))
)
return c
@C.funcs
def theme_wonderland() -> Bar:
c = (
Bar(init_opts=opts.InitOpts(theme=ThemeType.WONDERLAND))
.add_xaxis(Faker.choose())
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.add_yaxis("商家C", Faker.values())
.add_yaxis("商家D", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts("Theme-wonderland"))
)
return c
Page().add(*[fn() for fn, _ in C.charts]).render()
| 28.21267
| 75
| 0.601443
| 783
| 6,235
| 4.595147
| 0.074074
| 0.133407
| 0.175097
| 0.237632
| 0.854086
| 0.854086
| 0.772374
| 0.772374
| 0.772374
| 0.61284
| 0
| 0.000207
| 0.226784
| 6,235
| 220
| 76
| 28.340909
| 0.746111
| 0.001925
| 0
| 0.645161
| 0
| 0
| 0.060601
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.080645
| false
| 0.016129
| 0.021505
| 0
| 0.182796
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
620af4c0b7fd64623f602007943851471dca59db
| 6,895
|
py
|
Python
|
src/estimator.py
|
edgartutu/edgar-covid-19-estimator
|
6727c41cc1e59adc6c587b73d8957f89230d815c
|
[
"MIT"
] | null | null | null |
src/estimator.py
|
edgartutu/edgar-covid-19-estimator
|
6727c41cc1e59adc6c587b73d8957f89230d815c
|
[
"MIT"
] | 1
|
2021-05-11T10:32:55.000Z
|
2021-05-11T10:32:55.000Z
|
src/estimator.py
|
edgartutu/edgar-covid-19-estimator
|
6727c41cc1e59adc6c587b73d8957f89230d815c
|
[
"MIT"
] | null | null | null |
def estimator(data):
reportedCases=data['reportedCases']
totalHospitalBeds=data['totalHospitalBeds']
output={"data": {},"impact": {},"severeImpact":{}}
output['impact']['currentlyInfected']=reportedCases * 10
output['severeImpact']['currentlyInfected']=reportedCases * 50
days=28
if days:
factor=int(days/3)
estimate_impact=output['impact']['currentlyInfected'] *pow(2,factor)
output['impact']['infectionsByRequestedTime']=estimate_impact
estimate_severeimpact=output['severeImpact']['currentlyInfected']* pow(2,factor)
output['severeImpact']['infectionsByRequestedTime']=estimate_severeimpact
impact_=output['impact']['infectionsByRequestedTime'] *0.15
severimpact_=output['severeImpact']['infectionsByRequestedTime']*0.15
output['impact']['severeCasesByRequestedTime']=impact_
output['severeImpact']['severeCasesByRequestedTime']=severimpact_
beds_available =round(totalHospitalBeds*0.35,0)
available_hospital_beds_impact=beds_available - output['impact']['severeCasesByRequestedTime']
available_hospital_beds_severeImpact= beds_available - output['severeImpact']['severeCasesByRequestedTime']
output['impact']['hospitalBedsByRequestedTime']=available_hospital_beds_impact
output['severeImpact']['hospitalBedsByRequestedTime']=available_hospital_beds_severeImpact
output['data']=data
impact_icu=output['impact']['infectionsByRequestedTime'] *0.05
severimpact_icu=output['severeImpact']['infectionsByRequestedTime']*0.05
output['impact']['casesForICUByRequestedTime']=impact_icu
output['severeImpact']['casesForICUByRequestedTime']=severimpact_icu
impact_vetilator=output['impact']['infectionsByRequestedTime'] *0.02
severimpact_vetilator=output['severeImpact']['infectionsByRequestedTime']*0.02
output['impact']['casesForVentilatorsByRequestedTime']=impact_vetilator
output['severeImpact']['casesForVentilatorsByRequestedTime']=severimpact_vetilator
dollarsInFlight_1=output['impact']['infectionsByRequestedTime']
dollarsInFlight_2=output['severeImpact']['infectionsByRequestedTime']
estimated_money=dollarsInFlight_1*0.85*5*30
estimated_money1=dollarsInFlight_2*0.85*5*30
output['impact']['dollarsInFlight']=estimated_money
output['severeImpact']['dollarsInFlight']=estimated_money1
final_output={"data":{}, "estimate":{}}
final_output['data']=data
final_output['estimate']["impact"]=output["impact"]
final_output['estimate']["severeImpact"]=output["severeImpact"]
return final_output
elif data['weeks']:
days=data['weeks']*7
factor=round(days/3,0)
estimate_impact=output['impact']['currentlyInfected'] *pow(2,factor)
output['impact']['infectionsByRequestedTime']=estimate_impact
estimate_severeimpact=output['severeImpact']['currentlyInfected']* pow(2,factor)
output['severeImpact']['infectionsByRequestedTime']=estimate_severeimpact
impact_=output['impact']['infectionsByRequestedTime'] *0.15
severimpact_=output['severeImpact']['infectionsByRequestedTime']*0.15
output['impact']['severeCasesByRequestedTime']=impact_
output['severeImpact']['severeCasesByRequestedTime']=severimpact_
beds_available =round(totalHospitalBeds*0.35,0)
available_hospital_beds_impact=beds_available - output['impact']['severeCasesByRequestedTime']
available_hospital_beds_severeImpact= beds_available - output['severeImpact']['severeCasesByRequestedTime']
output['impact']['hospitalBedsByRequestedTime']=available_hospital_beds_impact
output['severeImpact']['hospitalBedsByRequestedTime']=available_hospital_beds_severeImpact
output['data']=data
impact_icu=output['impact']['infectionsByRequestedTime'] *0.05
severimpact_icu=output['severeImpact']['infectionsByRequestedTime']*0.05
output['impact']['casesForICUByRequestedTime']=impact_icu
output['severeImpact']['casesForICUByRequestedTime']=severimpact_icu
impact_vetilator=output['impact']['infectionsByRequestedTime'] *0.02
severimpact_vetilator=output['severeImpact']['infectionsByRequestedTime']*0.02
output['impact']['casesForVentilatorsByRequestedTime']=impact_vetilator
output['severeImpact']['casesForVentilatorsByRequestedTime']=severimpact_vetilator
dollarsInFlight_1=output['impact']['infectionsByRequestedTime']
dollarsInFlight_2=output['severeImpact']['infectionsByRequestedTime']
estimated_money=dollarsInFlight_1*0.85*5*30
estimated_money1=dollarsInFlight_2*0.85*5*30
output['impact']['dollarsInFlight']=estimated_money
output['severeImpact']['dollarsInFlight']=estimated_money1
return output
elif data['months']:
days= data['months']*30
factor=round(days/3,0)
estimate_impact=output['impact']['currentlyInfected'] *pow(2,factor)
output['impact']['infectionsByRequestedTime']=estimate_impact
estimate_severeimpact=output['severeImpact']['currentlyInfected']* pow(2,factor)
output['severeImpact']['infectionsByRequestedTime']=estimate_severeimpact
impact_=output['impact']['infectionsByRequestedTime'] *0.15
severimpact_=output['severeImpact']['infectionsByRequestedTime']*0.15
output['impact']['severeCasesByRequestedTime']=impact_
output['severeImpact']['severeCasesByRequestedTime']=severimpact_
beds_available =round(totalHospitalBeds*0.35,0)
available_hospital_beds_impact=beds_available - output['impact']['severeCasesByRequestedTime']
available_hospital_beds_severeImpact= beds_available - output['severeImpact']['severeCasesByRequestedTime']
output['impact']['hospitalBedsByRequestedTime']=available_hospital_beds_impact
output['severeImpact']['hospitalBedsByRequestedTime']=available_hospital_beds_severeImpact
output['data']=data
impact_icu=output['impact']['infectionsByRequestedTime'] *0.05
severimpact_icu=output['severeImpact']['infectionsByRequestedTime']*0.05
output['impact']['casesForICUByRequestedTime']=impact_icu
output['severeImpact']['casesForICUByRequestedTime']=severimpact_icu
impact_vetilator=output['impact']['infectionsByRequestedTime'] *0.02
severimpact_vetilator=output['severeImpact']['infectionsByRequestedTime']*0.02
output['impact']['casesForVentilatorsByRequestedTime']=impact_vetilator
output['severeImpact']['casesForVentilatorsByRequestedTime']=severimpact_vetilator
dollarsInFlight_1=output['impact']['infectionsByRequestedTime']
dollarsInFlight_2=output['severeImpact']['infectionsByRequestedTime']
estimated_money=dollarsInFlight_1*0.85*5*30
estimated_money1=dollarsInFlight_2*0.85*5*30
output['impact']['dollarsInFlight']=estimated_money
output['severeImpact']['dollarsInFlight']=estimated_money1
return output
else:
return{'error':"no data "}
| 59.956522
| 113
| 0.762582
| 603
| 6,895
| 8.517413
| 0.089552
| 0.088785
| 0.108061
| 0.066589
| 0.91141
| 0.91141
| 0.91141
| 0.91141
| 0.91141
| 0.91141
| 0
| 0.022657
| 0.103843
| 6,895
| 114
| 114
| 60.482456
| 0.808545
| 0
| 0
| 0.814815
| 0
| 0
| 0.383321
| 0.229732
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009259
| false
| 0
| 0
| 0
| 0.037037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
622ae47c9eaa3191591a9b811ff797693749a07d
| 11,052
|
py
|
Python
|
ChimuApi/chimu_api.py
|
lenforiee/python-chimu-api
|
464310741bc58aa1702c9810a50d061e40f63ec2
|
[
"MIT"
] | null | null | null |
ChimuApi/chimu_api.py
|
lenforiee/python-chimu-api
|
464310741bc58aa1702c9810a50d061e40f63ec2
|
[
"MIT"
] | null | null | null |
ChimuApi/chimu_api.py
|
lenforiee/python-chimu-api
|
464310741bc58aa1702c9810a50d061e40f63ec2
|
[
"MIT"
] | null | null | null |
import aiohttp
import requests
import orjson
from .classes import Beatmap, BeatmapSet
class ChimuAPI:
"""Synchronous ChimuAPI class for making requests"""
def __init__(self):
pass
@staticmethod
def get_map(map_id: int) -> Beatmap:
"""Gets a beatmap from chimu's API
Params:
- map_id: int = map id to be fetched.
Returns:
Beatmap class full of beatmap data.
"""
# We create request first.
request = requests.get(f"https://api.chimu.moe/v1/map/{map_id}").json()
if request['code']:
raise Exception(f"The error was debugged: {request['message']}")
# Code is 0 means its all good.
beatmap = Beatmap(
BeatmapId= request['data']['BeatmapId'],
ParentSetId= request['data']['ParentSetId'],
DiffName= request['data']['DiffName'],
FileMD5= request['data']['FileMD5'],
Mode= request['data']['Mode'],
BPM= request['data']['BPM'],
AR= request['data']['AR'],
OD= request['data']['OD'],
CS= request['data']['CS'],
HP= request['data']['HP'],
TotalLength= request['data']['TotalLength'],
HitLength= request['data']['HitLength'],
Playcount= request['data']['Playcount'],
Passcount= request['data']['Passcount'],
MaxCombo= request['data']['MaxCombo'],
DifficultyRating= request['data']['DifficultyRating'],
OsuFile= request['data']['OsuFile'],
DownloadPath= request['data']['DownloadPath']
)
# Return it.
return beatmap
@staticmethod
def get_set(set_id: int) -> BeatmapSet:
"""Gets a beatmap set from chimu's API
Params:
- set_id: int = set id to be fetched.
Returns:
BeatmapSet class full of beatmap set data.
"""
# We create request first.
request = requests.get(f"https://api.chimu.moe/v1/set/{set_id}").json()
if request['code']:
raise Exception(f"The error was debugged: {request['message']}")
# This is not gonna be the best code.
beatmaps = []
for mapa in request['data']['ChildrenBeatmaps']:
beatmaps.append(Beatmap(
BeatmapId= mapa['BeatmapId'],
ParentSetId= mapa['ParentSetId'],
DiffName= mapa['DiffName'],
FileMD5= mapa['FileMD5'],
Mode= mapa['Mode'],
BPM= mapa['BPM'],
AR= mapa['AR'],
OD= mapa['OD'],
CS= mapa['CS'],
HP= mapa['HP'],
TotalLength= mapa['TotalLength'],
HitLength= mapa['HitLength'],
Playcount= mapa['Playcount'],
Passcount= mapa['Passcount'],
MaxCombo= mapa['MaxCombo'],
DifficultyRating= mapa['DifficultyRating'],
OsuFile= mapa['OsuFile'],
DownloadPath= mapa['DownloadPath']
))
beatmap_set = BeatmapSet(
SetId= request['data']['SetId'],
ChildrenBeatmaps= beatmaps,
RankedStatus= request['data']['RankedStatus'],
ApprovedDate= request['data']['ApprovedDate'],
LastUpdate= request['data']['LastUpdate'],
LastChecked= request['data']['LastChecked'],
Artist= request['data']['Artist'],
Title= request['data']['Title'],
Creator= request['data']['Creator'],
Source= request['data']['Source'],
Tags= request['data']['Tags'],
HasVideo= request['data']['HasVideo'],
Genre= request['data']['Genre'],
Language= request['data']['Language'],
Favourites= request['data']['Favourites'],
Disabled= request['data']['Disabled']
)
# Return it.
return beatmap_set
@staticmethod
def search(search_params: dict = {}):
"""Search for a Beatmap.
Params:
- search_params: dict = Dict of params for search.
Returns:
Returns json callback data from request.
"""
# We create request first.
request = requests.get("https://api.chimu.moe/v1/search", params= search_params).json()
if request['code']:
raise Exception(f"The error was debugged: {request['message']}")
return request['data']
@staticmethod
def download_file(set_id: int, key: str, state: str = "hcaptcha"):
"""Download a Beatmap.
Params:
- set_id: int = Set to be downloaded.
- key: str = API key to download without captcha.
- state: str = State of verification either of hcaptcha or success.
Returns:
Returns file bytes for user to save it.
"""
# We create request first.
request = requests.get(f"https://api.chimu.moe/v1/download/{set_id}", params= {
"k": key,
"s": state
})
if request.status_code != 200:
raise Exception(f"Map file of ID {set_id} couldnt be fetched!")
return request.content
class AsyncChimuAPI:
"""Asynchronous ChimuAPI class for making requests"""
def __init__(self):
pass
@staticmethod
async def get_map(map_id: int):
"""Gets a beatmap from chimu's API
Params:
- map_id: int = map ID to be fetched.
Returns:
Beatmap class full of beatmap data.
"""
# Create async session & make request.
async with aiohttp.ClientSession(json_serialize= orjson.dumps) as session:
async with session.get(f"https://api.chimu.moe/v1/map/{map_id}") as resp:
request = await resp.json()
if request['code']:
raise Exception(f"The error was debugged: {request['message']}")
# Code is 0 means its all good.
beatmap = Beatmap(
BeatmapId= request['data']['BeatmapId'],
ParentSetId= request['data']['ParentSetId'],
DiffName= request['data']['DiffName'],
FileMD5= request['data']['FileMD5'],
Mode= request['data']['Mode'],
BPM= request['data']['BPM'],
AR= request['data']['AR'],
OD= request['data']['OD'],
CS= request['data']['CS'],
HP= request['data']['HP'],
TotalLength= request['data']['TotalLength'],
HitLength= request['data']['HitLength'],
Playcount= request['data']['Playcount'],
Passcount= request['data']['Passcount'],
MaxCombo= request['data']['MaxCombo'],
DifficultyRating= request['data']['DifficultyRating'],
OsuFile= request['data']['OsuFile'],
DownloadPath= request['data']['DownloadPath']
)
# return it
return beatmap
@staticmethod
async def get_set(set_id: int) -> BeatmapSet:
"""Gets a beatmap set from chimu's API
Params:
- set_id: int = set id to be fetched.
Returns:
BeatmapSet class full of beatmap set data.
"""
# Create async session & make request.
async with aiohttp.ClientSession(json_serialize= orjson.dumps) as session:
async with session.get(f"https://api.chimu.moe/v1/set/{set_id}") as resp:
request = await resp.json()
if request['code']:
raise Exception(f"The error was debugged: {request['message']}")
# This is not gonna be the best code.
beatmaps = []
for mapa in request['data']['ChildrenBeatmaps']:
beatmaps.append(Beatmap(
BeatmapId= mapa['BeatmapId'],
ParentSetId= mapa['ParentSetId'],
DiffName= mapa['DiffName'],
FileMD5= mapa['FileMD5'],
Mode= mapa['Mode'],
BPM= mapa['BPM'],
AR= mapa['AR'],
OD= mapa['OD'],
CS= mapa['CS'],
HP= mapa['HP'],
TotalLength= mapa['TotalLength'],
HitLength= mapa['HitLength'],
Playcount= mapa['Playcount'],
Passcount= mapa['Passcount'],
MaxCombo= mapa['MaxCombo'],
DifficultyRating= mapa['DifficultyRating'],
OsuFile= mapa['OsuFile'],
DownloadPath= mapa['DownloadPath']
))
beatmap_set = BeatmapSet(
SetId= request['data']['SetId'],
ChildrenBeatmaps= beatmaps,
RankedStatus= request['data']['RankedStatus'],
ApprovedDate= request['data']['ApprovedDate'],
LastUpdate= request['data']['LastUpdate'],
LastChecked= request['data']['LastChecked'],
Artist= request['data']['Artist'],
Title= request['data']['Title'],
Creator= request['data']['Creator'],
Source= request['data']['Source'],
Tags= request['data']['Tags'],
HasVideo= request['data']['HasVideo'],
Genre= request['data']['Genre'],
Language= request['data']['Language'],
Favourites= request['data']['Favourites'],
Disabled= request['data']['Disabled']
)
# Return it.
return beatmap_set
@staticmethod
async def search(search_params: dict = {}):
"""Search for a Beatmap.
Params:
- search_params: dict = Dict of params for search.
Returns:
Returns json callback data from request.
"""
# Create async session & make request.
async with aiohttp.ClientSession(json_serialize= orjson.dumps) as session:
async with session.get("https://api.chimu.moe/v1/search", params= search_params) as resp:
request = await resp.json()
if request['code']:
raise Exception(f"The error was debugged: {request['message']}")
return request['data']
@staticmethod
async def download_file(set_id: int, key: str, state: str = "hcaptcha"):
"""Download a Beatmap.
Params:
- set_id: int = Set to be downloaded.
- key: str = API key to download without captcha.
- state: str = State of verification either of hcaptcha or success.
Returns:
Returns file bytes for user to save it.
"""
# Create async session & make request.
async with aiohttp.ClientSession(json_serialize= orjson.dumps) as session:
async with session.get(f"https://api.chimu.moe/v1/download/{set_id}", params= {
"k": key,
"s": state
}) as resp:
if resp.status != 200:
raise Exception(f"Map file of ID {set_id} couldnt be fetched!")
request = await resp.read()
return request
| 36.117647
| 101
| 0.536283
| 1,101
| 11,052
| 5.338783
| 0.128065
| 0.130997
| 0.017693
| 0.021776
| 0.958489
| 0.958489
| 0.952705
| 0.946751
| 0.946751
| 0.946751
| 0
| 0.003234
| 0.328447
| 11,052
| 305
| 102
| 36.236066
| 0.788736
| 0.106678
| 0
| 0.828125
| 0
| 0
| 0.200091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0.03125
| 0.020833
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6231152e6fc5cf150c60575b33abb09a2c09dea1
| 5,739
|
py
|
Python
|
backend/base/migrations/0001_initial.py
|
AimeneNouri/Invetory-Management-WebApp
|
83db8ebecc315a00ff1b974af5ba31d44d0377a2
|
[
"MIT"
] | null | null | null |
backend/base/migrations/0001_initial.py
|
AimeneNouri/Invetory-Management-WebApp
|
83db8ebecc315a00ff1b974af5ba31d44d0377a2
|
[
"MIT"
] | null | null | null |
backend/base/migrations/0001_initial.py
|
AimeneNouri/Invetory-Management-WebApp
|
83db8ebecc315a00ff1b974af5ba31d44d0377a2
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2 on 2021-07-04 22:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Fournisseurs',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200, null=True)),
('lastname', models.CharField(blank=True, max_length=200, null=True)),
('adress', models.CharField(blank=True, max_length=200, null=True)),
('email', models.EmailField(blank=True, max_length=254, null=True)),
('city', models.CharField(blank=True, max_length=200, null=True)),
('phone', models.CharField(blank=True, max_length=20, null=True)),
('website', models.URLField(blank=True, max_length=254, null=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Compte',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200, null=True)),
('lastname', models.CharField(blank=True, max_length=200, null=True)),
('adress', models.CharField(blank=True, max_length=200, null=True)),
('email', models.EmailField(blank=True, max_length=254, null=True)),
('city', models.CharField(blank=True, max_length=200, null=True)),
('phone', models.CharField(blank=True, max_length=20, null=True)),
('cin', models.CharField(blank=True, max_length=12, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='')),
('login', models.CharField(blank=True, max_length=200, null=True)),
('password', models.CharField(blank=True, max_length=200, null=True)),
('task', models.CharField(blank=True, max_length=200, null=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Commande',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('articleList', models.CharField(blank=True, max_length=200, null=True)),
('commandDate', models.DateTimeField()),
('etat', models.CharField(blank=True, max_length=20, null=True)),
('qte', models.IntegerField(blank=True, default=0, null=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Client',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200, null=True)),
('lastname', models.CharField(blank=True, max_length=200, null=True)),
('adress', models.CharField(blank=True, max_length=200, null=True)),
('email', models.EmailField(blank=True, max_length=254, null=True)),
('city', models.CharField(blank=True, max_length=200, null=True)),
('phone', models.CharField(blank=True, max_length=20, null=True)),
('website', models.URLField(blank=True, max_length=254, null=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200, null=True)),
('description', models.TextField(blank=True, null=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Article',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200, null=True)),
('price', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('description', models.TextField(blank=True, null=True)),
('countInStock', models.IntegerField(blank=True, default=0, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='')),
('category', models.CharField(blank=True, max_length=200, null=True)),
('options', models.CharField(blank=True, max_length=200, null=True)),
('taille', models.CharField(blank=True, max_length=200, null=True)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
]
| 57.969697
| 130
| 0.59993
| 640
| 5,739
| 5.254688
| 0.148438
| 0.104668
| 0.110616
| 0.165923
| 0.860244
| 0.860244
| 0.850431
| 0.850431
| 0.829616
| 0.72584
| 0
| 0.024611
| 0.249521
| 5,739
| 98
| 131
| 58.561224
| 0.756211
| 0.007493
| 0
| 0.681319
| 1
| 0
| 0.058307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.010989
| 0.032967
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0295f912cd7ea62d9bee67414a85b11bc7c13f77
| 4,465
|
py
|
Python
|
models/ebm.py
|
noahcao/icebeem
|
202b0cae8d98d45428c1a08d14c07a67184e3ca6
|
[
"MIT"
] | 48
|
2020-07-03T13:31:36.000Z
|
2022-03-18T17:48:49.000Z
|
models/ebm.py
|
noahcao/icebeem
|
202b0cae8d98d45428c1a08d14c07a67184e3ca6
|
[
"MIT"
] | 5
|
2020-08-07T03:33:26.000Z
|
2022-03-30T11:06:15.000Z
|
models/ebm.py
|
noahcao/icebeem
|
202b0cae8d98d45428c1a08d14c07a67184e3ca6
|
[
"MIT"
] | 15
|
2020-11-27T19:51:57.000Z
|
2021-12-09T21:00:40.000Z
|
import torch
import torch.nn.functional as F
from torch import nn
from .nets import CleanMLP
class UnnormalizedConditialEBM(nn.Module):
def __init__(self, input_size, hidden_size, n_hidden, output_size, condition_size, activation='lrelu',
augment=False, positive=False):
super().__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.cond_size = condition_size
self.n_hidden = n_hidden
self.activation = activation
self.augment = augment
self.positive = positive
self.f = CleanMLP(input_size, hidden_size, n_hidden, output_size, activation=activation)
self.g = nn.Linear(condition_size, output_size, bias=False)
def forward(self, x, y):
fx, gy = self.f(x).view(-1, self.output_size), self.g(y)
if self.positive:
fx = F.relu(fx)
gy = F.relu(gy)
if self.augment:
return torch.einsum('bi,bi->b', [fx, gy]) + torch.einsum('bi,bi->b', [fx.pow(2), gy.pow(2)])
else:
return torch.einsum('bi,bi->b', [fx, gy])
class ModularUnnormalizedConditionalEBM(nn.Module):
def __init__(self, f_net, g_net, augment=False, positive=False):
super().__init__()
assert f_net.output_size == g_net.output_size
self.input_size = f_net.input_size
self.output_size = f_net.output_size
self.cond_size = g_net.input_size
self.augment = augment
self.positive = positive
self.f = f_net
self.g = g_net
def forward(self, x, y):
fx, gy = self.f(x).view(-1, self.output_size), self.g(y)
if self.positive:
fx = F.relu(fx)
gy = F.relu(gy)
if self.augment:
return torch.einsum('bi,bi->b', [fx, gy]) + torch.einsum('bi,bi->b', [fx.pow(2), gy.pow(2)])
else:
return torch.einsum('bi,bi->b', [fx, gy])
class ConditionalEBM(UnnormalizedConditialEBM):
def __init__(self, input_size, hidden_size, n_hidden, output_size, condition_size, activation='lrelu'):
super().__init__(input_size, hidden_size, n_hidden, output_size, condition_size, activation)
self.log_norm = nn.Parameter(torch.randn(1) - 5, requires_grad=True)
def forward(self, x, y, augment=True, positive=False):
return super().forward(x, y, augment, positive) + self.log_norm
class ModularConditionalEBM(ModularUnnormalizedConditionalEBM):
def __init__(self, f_net, g_net):
super().__init__(f_net, g_net)
self.log_norm = nn.Parameter(torch.randn(1) - 5, requires_grad=True)
def forward(self, x, y, augment=True, positive=False):
return super().forward(x, y, augment, positive) + self.log_norm
class UnnormalizedEBM(nn.Module):
def __init__(self, input_size, hidden_size, n_hidden, output_size, activation='lrelu'):
super().__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.n_hidden = n_hidden
self.activation = activation
self.f = CleanMLP(input_size, hidden_size, n_hidden, output_size, activation=activation)
self.g = torch.ones(output_size)
def forward(self, x, y=None):
fx = self.f(x).view(-1, self.output_size)
return torch.einsum('bi,i->b', [fx, self.g])
class ModularUnnormalizedEBM(nn.Module):
def __init__(self, f_net):
super().__init__()
self.input_size = f_net.input_size
self.output_size = f_net.output_size
self.f = f_net
self.g = torch.ones(self.output_size)
def forward(self, x, y=None):
fx = self.f(x).view(-1, self.output_size)
return torch.einsum('bi,i->b', [fx, self.g])
class EBM(UnnormalizedEBM):
def __init__(self, input_size, hidden_size, n_hidden, output_size, activation='lrelu'):
super().__init__(input_size, hidden_size, n_hidden, output_size, activation)
self.log_norm = nn.Parameter(torch.randn(1) - 5, requires_grad=True)
def forward(self, x, y=None):
return super().forward(x, y) + self.log_norm
class ModularEBM(ModularUnnormalizedEBM):
def __init__(self, f_net):
super().__init__(f_net)
self.log_norm = nn.Parameter(torch.randn(1) - 5, requires_grad=True)
def forward(self, x, y=None):
return super().forward(x, y) + self.log_norm
| 32.122302
| 107
| 0.643001
| 624
| 4,465
| 4.331731
| 0.107372
| 0.09249
| 0.051794
| 0.056234
| 0.829449
| 0.824269
| 0.795413
| 0.757677
| 0.72919
| 0.72919
| 0
| 0.004657
| 0.230459
| 4,465
| 138
| 108
| 32.355072
| 0.782014
| 0
| 0
| 0.702128
| 0
| 0
| 0.018365
| 0
| 0
| 0
| 0
| 0
| 0.010638
| 1
| 0.170213
| false
| 0
| 0.042553
| 0.042553
| 0.404255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02e052e583adad0ffd7b1fa21077cf92474f25a7
| 17,934
|
py
|
Python
|
pytests/tuqquery/n1ql_window_functions_syntax_check.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/n1ql_window_functions_syntax_check.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
pytests/tuqquery/n1ql_window_functions_syntax_check.py
|
ramalingam-cb/testrunner
|
81cea7a5a493cf0c67fca7f97c667cd3c6ad2142
|
[
"Apache-2.0"
] | null | null | null |
from tuq import QueryTests
import random
import string
from random import randint
from membase.api.exception import CBQError
import threading
import copy
class WindowFunctionsSyntaxTest(QueryTests):
def setUp(self):
super(WindowFunctionsSyntaxTest, self).setUp()
self.log_config_info()
self.log.info("============== WindowFunctionsSyntaxTest setup has started ==============")
self.primary_idx = {'name': '#primary', 'bucket': 'test_bucket', 'fields': (), 'state': 'online', 'using': self.index_type.lower(), 'is_primary': True}
self.idx_1 = {'name': 'ix_char', 'bucket': 'test_bucket', 'fields': [('char_field', 0)], 'state': 'online', 'using': self.index_type.lower(), 'is_primary': False}
self.idx_2 = {'name': 'ix_decimal', 'bucket': 'test_bucket', 'fields': [('decimal_field', 0)], 'state': 'online', 'using': self.index_type.lower(), 'is_primary': False}
self.idx_3 = {'name': 'ix_int', 'bucket': 'test_bucket', 'fields': [('int_field', 0)], 'state': 'online', 'using': self.index_type.lower(), 'is_primary': False}
self.indexes = [self.primary_idx, self.idx_1, self.idx_2, self.idx_3]
self.log.info("============== WindowFunctionsTest setup has completed ==============")
def tearDown(self):
self.log_config_info()
self.log.info("============== WindowFunctionsSyntaxTest tearDown has started ==============")
super(WindowFunctionsSyntaxTest, self).tearDown()
self.log.info("============== WindowFunctionsSyntaxTest tearDown has completed ==============")
def suite_setUp(self):
super(WindowFunctionsSyntaxTest, self).suite_setUp()
self.init_nodes()
self.load_test_data("test_bucket")
self.create_primary_index('test_bucket')
self.create_secondary_indexes('test_bucket')
self.adopt_test_data("test_bucket")
self.log_config_info()
self.log.info("============== WindowFunctionsSyntaxTest suite_setup has started ==============")
self.log.info("============== WindowFunctionsSyntaxTest suite_setup has completed ==============")
def suite_tearDown(self):
self.log_config_info()
self.log.info("============== WindowFunctionsSyntaxTest suite_tearDown has started ==============")
super(WindowFunctionsSyntaxTest, self).suite_tearDown()
self.log.info("============== WindowFunctionsSyntaxTest suite_tearDown has completed ==============")
def run_all(self):
self.test_from_select_batches()
self.test_select_from_batches()
def generate_from_select_queries(self):
result = []
counter = 0
window_function_values = [' LAST_VALUE(t1.decimal_field) OVER (PARTITION BY t1.char_field ORDER BY t1.decimal_field RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) ']
alias_values = [' wf ']
bucket_alias_values = [' ', ' as ']
let_where_values = [' ', ' where t1.int_field > 1000 ', ' let int_val=1000 where t1.int_field > int_val ']
group_by_values = [' ', ' group by t1.char_field, t1.decimal_field ']
letting_having_values = [' ', ' having t1.char_field="E" ', ' letting char_val="E" having t1.char_field=char_val ']
order_by_values = [' ', ' order by t1.char_field ']
asc_desc_values = [' ', ' asc ', ' desc ']
limit_values = [' ', ' limit 100 ']
offset_values = [' ', ' offset 10 ']
join_values = [' ', ' inner join ', ' left join ', ' left outer join ', ' inner nest ', ' left nest ']
union_values = [' ', ' union ', ' union all ']
namespace_values = ['', 'default:']
use_keys_values = [' ', ' use primary keys[\'test\'] ', ' use keys[\'test\'] ', ' use index(`ix_char`) ', ' use index(`ix_char`, `ix_decimal`) ', ' use index(`ix_char` using gsi) ']
join_predicate_values = [' on t1.primary_key=t2.primary_key ', ' on primary keys[\'test\']', ' on keys t1.char_field ', ' on key t2.char_field for t1 ', ' on primary key t2.primary_key for t1 ']
unnest_flatten_values = [' unnest ', ' left unnest ', ' flatten ', ' left flatten ']
for window_function_value in window_function_values:
for alias_value in alias_values:
for let_where_value in let_where_values:
for group_by_value in group_by_values:
for letting_having_value in letting_having_values:
if group_by_value == ' ':
letting_having_value = ' '
for order_by_value in order_by_values:
for asc_desc_value in asc_desc_values:
if order_by_value == ' ':
asc_desc_value = ' '
for limit_value in limit_values:
for offset_value in offset_values:
for bucket_alias_value in bucket_alias_values:
for namespace_value in namespace_values:
for use_keys_value in use_keys_values:
for unnest_flatten_value in unnest_flatten_values:
for join_value in join_values:
for join_predicate_value in join_predicate_values:
join_expression = ' '
if join_value!=' ':
join_expression = join_value+' '+namespace_value+'test_bucket '+bucket_alias_value+' t2 '+use_keys_value+join_predicate_value
else:
join_expression = unnest_flatten_value+' t1.char_field '
for union_value in union_values:
union_left_parenthesis = ''
union_right_parenthesis = ''
right_union_expression = ''
if union_value!=' ':
union_left_parenthesis='('
union_right_parenthesis=')'
right_union_expression = union_left_parenthesis+"select t1.char_field, t1.decimal_field, "+window_function_value+alias_value+" " \
"from "+namespace_value+"test_bucket "+bucket_alias_value+" t1 "+use_keys_value+join_expression+let_where_value+group_by_value+letting_having_value+ \
order_by_value+asc_desc_value+limit_value+offset_value+union_right_parenthesis
query = "from ("+union_left_parenthesis+"select t1.char_field, t1.decimal_field, "+window_function_value+alias_value+" " \
"from "+namespace_value+"test_bucket "+bucket_alias_value+" t1 "+use_keys_value+join_expression+let_where_value+group_by_value+letting_having_value+ \
order_by_value+asc_desc_value+limit_value+offset_value+union_right_parenthesis+union_value+right_union_expression+") a select a.wf"
result.append(query)
counter+=1
return result
def generate_select_from_queries(self):
result = []
counter = 0
window_function_values = [' LAST_VALUE(t1.decimal_field) OVER (PARTITION BY t1.char_field ORDER BY t1.decimal_field RANGE BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) ']
alias_values = [' wf ']
bucket_alias_values = [' ', ' as ']
let_where_values = [' ', ' where t1.int_field > 1000 ', ' let int_val=1000 where t1.int_field > int_val ']
group_by_values = [' ', ' group by t1.char_field, t1.decimal_field ']
letting_having_values = [' ', ' having t1.char_field="E" ', ' letting char_val="E" having t1.char_field=char_val ']
order_by_values = [' ', ' order by t1.char_field ']
asc_desc_values = [' ', ' asc ', ' desc ']
limit_values = [' ', ' limit 100 ']
offset_values = [' ', ' offset 10 ']
join_values = [' ', ' inner join ', ' left join ', ' left outer join ', ' inner nest ', ' left nest ']
union_values = [' ', ' union ', ' union all ']
namespace_values = ['', 'default:']
use_keys_values = [' ', ' use primary keys[\'test\'] ', ' use keys[\'test\'] ', ' use index(`ix_char`) ', ' use index(`ix_char`, `ix_decimal`) ', ' use index(`ix_char` using gsi) ']
join_predicate_values = [' on t1.primary_key=t2.primary_key ', ' on primary keys[\'test\']', ' on keys t1.char_field ', ' on key t2.char_field for t1 ', ' on primary key t2.primary_key for t1 ']
unnest_flatten_values = [' unnest ', ' left unnest ', ' flatten ', ' left flatten ']
for window_function_value in window_function_values:
for alias_value in alias_values:
for let_where_value in let_where_values:
for group_by_value in group_by_values:
for letting_having_value in letting_having_values:
if group_by_value == ' ':
letting_having_value = ' '
for order_by_value in order_by_values:
for asc_desc_value in asc_desc_values:
if order_by_value == ' ':
asc_desc_value = ' '
for limit_value in limit_values:
for offset_value in offset_values:
for bucket_alias_value in bucket_alias_values:
for namespace_value in namespace_values:
for use_keys_value in use_keys_values:
for unnest_flatten_value in unnest_flatten_values:
for join_value in join_values:
for join_predicate_value in join_predicate_values:
join_expression = ' '
if join_value!=' ':
join_expression = join_value+' '+namespace_value+'test_bucket '+bucket_alias_value+' t2 '+use_keys_value+join_predicate_value
else:
join_expression = unnest_flatten_value+' t1.char_field '
for union_value in union_values:
union_left_parenthesis = ''
union_right_parenthesis = ''
right_union_expression = ''
if union_value!=' ':
union_left_parenthesis='('
union_right_parenthesis=')'
right_union_expression = union_left_parenthesis+"select t1.char_field, t1.decimal_field, "+window_function_value+alias_value+" " \
"from "+namespace_value+"test_bucket "+bucket_alias_value+" t1 "+use_keys_value+join_expression+let_where_value+group_by_value+letting_having_value+ \
order_by_value+asc_desc_value+limit_value+offset_value+union_right_parenthesis
query = union_left_parenthesis+"select t1.char_field, t1.decimal_field, "+window_function_value+alias_value+\
" from "+namespace_value+"test_bucket "+bucket_alias_value+" t1 "+use_keys_value+join_expression+let_where_value+group_by_value+\
letting_having_value+order_by_value+asc_desc_value+limit_value+offset_value+union_right_parenthesis+union_value+right_union_expression
result.append(query)
counter+=1
return result
def test_from_select_batches(self):
queries = self.generate_from_select_queries()
batches = self.produce_batches(queries, 4)
for batch in batches:
threads = []
for b in batch:
t = threading.Thread(target=self._run_test, args=(b,))
t.daemon = True
threads.append(t)
t.start()
for th in threads:
th.join()
threads.remove(th)
def _run_test(self, query):
try:
self.run_cbq_query(query)
except CBQError, e:
self.assertEquals('True', 'False', 'Wrong query - '+str(query))
def test_select_from_batches(self):
queries = self.generate_select_from_queries()
batches = self.produce_batches(queries, 4)
for batch in batches:
threads = []
for b in batch:
t = threading.Thread(target=self._run_test, args=(b,))
t.daemon = True
threads.append(t)
t.start()
for th in threads:
th.join()
threads.remove(th)
def produce_batches(self, queries, batch_size):
result = []
counter = 0
arr = []
for query in queries:
if counter<batch_size:
arr.append(query)
counter+=1
else:
add = copy.copy(arr)
result.append(add)
arr = []
arr.append(query)
counter = 1
return result
def init_nodes(self):
test_bucket_params = self._create_bucket_params(server=self.master, size=self.bucket_size,
replicas=self.num_replicas, bucket_type=self.bucket_type,
enable_replica_index=self.enable_replica_index,
eviction_policy=self.eviction_policy, lww=self.lww)
self.cluster.create_standard_bucket("test_bucket", 11222, test_bucket_params)
def load_test_data(self, bucket_name='test_bucket'):
for i in range(0, 1, 1):
initial_statement = (" INSERT INTO {0} (KEY, VALUE) VALUES ('primary_key_"+str(i)+"',").format(bucket_name)
initial_statement += "{"
initial_statement += "'primary_key':'primary_key_"+str(i) + "','char_field':'" + random.choice(string.ascii_uppercase) + \
"','decimal_field':"+str(round(10000*random.random(), 0))+",'int_field':"+str(randint(0, 100000000))+"})"
self.run_cbq_query(initial_statement)
def adopt_test_data(self, bucket_name='test_bucket'):
self.run_cbq_query("update {0} set decimal_field=null where char_field='A'".format(bucket_name))
self.run_cbq_query("update {0} set decimal_field=missing where char_field='B'".format(bucket_name))
self.run_cbq_query("update {0} set decimal_field=null where char_field='C' and decimal_field%2=0".format(bucket_name))
self.run_cbq_query("update {0} set decimal_field=missing where char_field='C' and decimal_field%3=0".format(bucket_name))
self.run_cbq_query("update {0} set decimal_field=2 where char_field='D' and decimal_field%2=0".format(bucket_name))
self.run_cbq_query("update {0} set decimal_field=1 where char_field='E'".format(bucket_name))
def create_primary_index(self, bucket_name='test_bucket'):
self.run_cbq_query("CREATE PRIMARY INDEX `#primary` ON `{0}`".format(bucket_name))
def create_secondary_indexes(self, bucket_name='test_bucket'):
self.run_cbq_query('CREATE INDEX ix_char ON {0}(char_field);'.format(bucket_name))
self.run_cbq_query('CREATE INDEX ix_decimal ON {0}(decimal_field);'.format(bucket_name))
self.run_cbq_query('CREATE INDEX ix_int ON {0}(int_field);'.format(bucket_name))
self.run_cbq_query('CREATE INDEX ix_primary ON {0}(primary_key);'.format(bucket_name))
| 67.675472
| 226
| 0.510873
| 1,783
| 17,934
| 4.803141
| 0.102075
| 0.026156
| 0.02312
| 0.02277
| 0.801845
| 0.764946
| 0.748599
| 0.723143
| 0.707613
| 0.692433
| 0
| 0.012567
| 0.387699
| 17,934
| 265
| 227
| 67.675472
| 0.767325
| 0
| 0
| 0.62931
| 0
| 0.008621
| 0.208642
| 0.022414
| 0
| 0
| 0
| 0
| 0.00431
| 0
| null | null | 0
| 0.030172
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f307c0ab763fb7fbaecd2eff21d0eaf3eccddf92
| 150
|
py
|
Python
|
msbd/metriche/__init__.py
|
mnslarcher/metodi-statistici-big-data
|
4587b4e4104557e50d09d028259d6c42c44d2814
|
[
"MIT"
] | 1
|
2019-02-17T09:28:04.000Z
|
2019-02-17T09:28:04.000Z
|
msbd/metriche/__init__.py
|
mnslarcher/metodi-statistici-big-data
|
4587b4e4104557e50d09d028259d6c42c44d2814
|
[
"MIT"
] | null | null | null |
msbd/metriche/__init__.py
|
mnslarcher/metodi-statistici-big-data
|
4587b4e4104557e50d09d028259d6c42c44d2814
|
[
"MIT"
] | null | null | null |
from .metriche import criterio_informazione_akaike
from .metriche import MetricheClassificazione
from .metriche import radice_errore_quadratico_medio
| 37.5
| 52
| 0.9
| 17
| 150
| 7.647059
| 0.647059
| 0.276923
| 0.415385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 150
| 3
| 53
| 50
| 0.942029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b87f05e7c7523d69c9a15fc54426113818019ff2
| 24,107
|
py
|
Python
|
nintendo/nex/authentication.py
|
azillion/DodoTrafficControl
|
9aa014f6d1ac3ad4ea5747d7ded4749ea60f7422
|
[
"MIT"
] | 209
|
2017-05-15T19:38:34.000Z
|
2020-11-30T03:31:07.000Z
|
nintendo/nex/authentication.py
|
azillion/DodoTrafficControl
|
9aa014f6d1ac3ad4ea5747d7ded4749ea60f7422
|
[
"MIT"
] | 44
|
2018-07-06T16:08:54.000Z
|
2020-11-29T20:04:32.000Z
|
nintendo/nex/authentication.py
|
azillion/DodoTrafficControl
|
9aa014f6d1ac3ad4ea5747d7ded4749ea60f7422
|
[
"MIT"
] | 34
|
2017-05-23T17:35:57.000Z
|
2020-11-29T17:37:16.000Z
|
# This file was generated automatically by generate_protocols.py
from nintendo.nex import notification, rmc, common, streams
import logging
logger = logging.getLogger(__name__)
class AuthenticationInfo(common.Data):
def __init__(self):
super().__init__()
self.token = None
self.ngs_version = 3
self.token_type = 1
self.server_version = 0
def check_required(self, settings, version):
for field in ['token']:
if getattr(self, field) is None:
raise ValueError("No value assigned to required field: %s" %field)
def load(self, stream, version):
self.token = stream.string()
self.ngs_version = stream.u32()
self.token_type = stream.u8()
self.server_version = stream.u32()
def save(self, stream, version):
self.check_required(stream.settings, version)
stream.string(self.token)
stream.u32(self.ngs_version)
stream.u8(self.token_type)
stream.u32(self.server_version)
common.DataHolder.register(AuthenticationInfo, "AuthenticationInfo")
class RVConnectionData(common.Structure):
def __init__(self):
super().__init__()
self.main_station = common.StationURL.parse("prudp:/")
self.special_protocols = []
self.special_station = common.StationURL.parse("prudp:/")
self.server_time = common.DateTime(0)
def max_version(self, settings):
version = 0
if settings["nex.version"] >= 30500:
version = 1
return version
def check_required(self, settings, version):
if settings["nex.version"] >= 30500:
if version >= 1:
pass
def load(self, stream, version):
self.main_station = stream.stationurl()
self.special_protocols = stream.list(stream.u8)
self.special_station = stream.stationurl()
if stream.settings["nex.version"] >= 30500:
if version >= 1:
self.server_time = stream.datetime()
def save(self, stream, version):
self.check_required(stream.settings, version)
stream.stationurl(self.main_station)
stream.list(self.special_protocols, stream.u8)
stream.stationurl(self.special_station)
if stream.settings["nex.version"] >= 30500:
if version >= 1:
stream.datetime(self.server_time)
class ValidateAndRequestTicketParam(common.Structure):
def __init__(self):
super().__init__()
self.platform = 3
self.username = None
self.data = None
self.skip_version_check = False
self.nex_version = None
self.client_version = None
def check_required(self, settings, version):
for field in ['username', 'data', 'nex_version', 'client_version']:
if getattr(self, field) is None:
raise ValueError("No value assigned to required field: %s" %field)
def load(self, stream, version):
self.platform = stream.u32()
self.username = stream.string()
self.data = stream.anydata()
self.skip_version_check = stream.bool()
self.nex_version = stream.u32()
self.client_version = stream.u32()
def save(self, stream, version):
self.check_required(stream.settings, version)
stream.u32(self.platform)
stream.string(self.username)
stream.anydata(self.data)
stream.bool(self.skip_version_check)
stream.u32(self.nex_version)
stream.u32(self.client_version)
class ValidateAndRequestTicketResult(common.Structure):
def __init__(self):
super().__init__()
self.pid = None
self.ticket = None
self.server_url = None
self.server_time = None
self.server_name = None
self.source_key = None
def check_required(self, settings, version):
for field in ['pid', 'ticket', 'server_url', 'server_time', 'server_name', 'source_key']:
if getattr(self, field) is None:
raise ValueError("No value assigned to required field: %s" %field)
def load(self, stream, version):
self.pid = stream.pid()
self.ticket = stream.buffer()
self.server_url = stream.stationurl()
self.server_time = stream.datetime()
self.server_name = stream.string()
self.source_key = stream.string()
def save(self, stream, version):
self.check_required(stream.settings, version)
stream.pid(self.pid)
stream.buffer(self.ticket)
stream.stationurl(self.server_url)
stream.datetime(self.server_time)
stream.string(self.server_name)
stream.string(self.source_key)
class AuthenticationProtocol:
METHOD_LOGIN = 1
METHOD_LOGIN_EX = 2
METHOD_REQUEST_TICKET = 3
METHOD_GET_PID = 4
METHOD_GET_NAME = 5
METHOD_LOGIN_WITH_CONTEXT = 6
PROTOCOL_ID = 0xA
class AuthenticationProtocolNX:
METHOD_VALIDATE_AND_REQUEST_TICKET = 1
METHOD_VALIDATE_AND_REQUEST_TICKET_WITH_CUSTOM_DATA = 2
METHOD_REQUEST_TICKET = 3
METHOD_GET_PID = 4
METHOD_GET_NAME = 5
METHOD_VALIDATE_AND_REQUEST_TICKET_WITH_PARAM = 6
PROTOCOL_ID = 0xA
class AuthenticationClient(AuthenticationProtocol):
def __init__(self, client):
self.settings = client.settings
self.client = client
async def login(self, username):
logger.info("AuthenticationClient.login()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.string(username)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_LOGIN, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.pid = stream.pid()
obj.ticket = stream.buffer()
obj.connection_data = stream.extract(RVConnectionData)
obj.server_name = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClient.login -> done")
return obj
async def login_ex(self, username, extra_data):
logger.info("AuthenticationClient.login_ex()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.string(username)
stream.anydata(extra_data)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_LOGIN_EX, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.pid = stream.pid()
obj.ticket = stream.buffer()
obj.connection_data = stream.extract(RVConnectionData)
obj.server_name = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClient.login_ex -> done")
return obj
async def request_ticket(self, source, target):
logger.info("AuthenticationClient.request_ticket()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.pid(source)
stream.pid(target)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_REQUEST_TICKET, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.ticket = stream.buffer()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClient.request_ticket -> done")
return obj
async def get_pid(self, username):
logger.info("AuthenticationClient.get_pid()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.string(username)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_GET_PID, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
pid = stream.pid()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClient.get_pid -> done")
return pid
async def get_name(self, pid):
logger.info("AuthenticationClient.get_name()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.pid(pid)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_GET_NAME, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
name = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClient.get_name -> done")
return name
async def login_with_context(self, login_data):
logger.info("AuthenticationClient.login_with_context()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.anydata(login_data)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_LOGIN_WITH_CONTEXT, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.pid = stream.pid()
obj.ticket = stream.buffer()
obj.connection_data = stream.extract(RVConnectionData)
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClient.login_with_context -> done")
return obj
class AuthenticationClientNX(AuthenticationProtocolNX):
def __init__(self, client):
self.settings = client.settings
self.client = client
async def validate_and_request_ticket(self, username):
logger.info("AuthenticationClientNX.validate_and_request_ticket()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.string(username)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_VALIDATE_AND_REQUEST_TICKET, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.pid = stream.pid()
obj.ticket = stream.buffer()
obj.connection_data = stream.extract(RVConnectionData)
obj.server_name = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClientNX.validate_and_request_ticket -> done")
return obj
async def validate_and_request_ticket_with_custom_data(self, username, extra_data):
logger.info("AuthenticationClientNX.validate_and_request_ticket_with_custom_data()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.string(username)
stream.anydata(extra_data)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_VALIDATE_AND_REQUEST_TICKET_WITH_CUSTOM_DATA, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.pid = stream.pid()
obj.ticket = stream.buffer()
obj.connection_data = stream.extract(RVConnectionData)
obj.server_name = stream.string()
obj.source_key = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClientNX.validate_and_request_ticket_with_custom_data -> done")
return obj
async def request_ticket(self, source, target):
logger.info("AuthenticationClientNX.request_ticket()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.pid(source)
stream.pid(target)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_REQUEST_TICKET, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
obj = rmc.RMCResponse()
obj.result = stream.result()
obj.ticket = stream.buffer()
obj.key = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClientNX.request_ticket -> done")
return obj
async def get_pid(self, username):
logger.info("AuthenticationClientNX.get_pid()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.string(username)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_GET_PID, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
pid = stream.pid()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClientNX.get_pid -> done")
return pid
async def get_name(self, pid):
logger.info("AuthenticationClientNX.get_name()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.pid(pid)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_GET_NAME, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
name = stream.string()
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClientNX.get_name -> done")
return name
async def validate_and_request_ticket_with_param(self, param):
logger.info("AuthenticationClientNX.validate_and_request_ticket_with_param()")
#--- request ---
stream = streams.StreamOut(self.settings)
stream.add(param)
data = await self.client.request(self.PROTOCOL_ID, self.METHOD_VALIDATE_AND_REQUEST_TICKET_WITH_PARAM, stream.get())
#--- response ---
stream = streams.StreamIn(data, self.settings)
result = stream.extract(ValidateAndRequestTicketResult)
if not stream.eof():
raise ValueError("Response is bigger than expected (got %i bytes, but only %i were read)" %(stream.size(), stream.tell()))
logger.info("AuthenticationClientNX.validate_and_request_ticket_with_param -> done")
return result
class AuthenticationServer(AuthenticationProtocol):
def __init__(self):
self.methods = {
self.METHOD_LOGIN: self.handle_login,
self.METHOD_LOGIN_EX: self.handle_login_ex,
self.METHOD_REQUEST_TICKET: self.handle_request_ticket,
self.METHOD_GET_PID: self.handle_get_pid,
self.METHOD_GET_NAME: self.handle_get_name,
self.METHOD_LOGIN_WITH_CONTEXT: self.handle_login_with_context,
}
async def logout(self, client):
pass
async def handle(self, client, method_id, input, output):
if method_id in self.methods:
await self.methods[method_id](client, input, output)
else:
logger.warning("Unknown method called on AuthenticationServer: %i", method_id)
raise common.RMCError("Core::NotImplemented")
async def handle_login(self, client, input, output):
logger.info("AuthenticationServer.login()")
#--- request ---
username = input.string()
response = await self.login(client, username)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'pid', 'ticket', 'connection_data', 'server_name']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.pid(response.pid)
output.buffer(response.ticket)
output.add(response.connection_data)
output.string(response.server_name)
async def handle_login_ex(self, client, input, output):
logger.info("AuthenticationServer.login_ex()")
#--- request ---
username = input.string()
extra_data = input.anydata()
response = await self.login_ex(client, username, extra_data)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'pid', 'ticket', 'connection_data', 'server_name']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.pid(response.pid)
output.buffer(response.ticket)
output.add(response.connection_data)
output.string(response.server_name)
async def handle_request_ticket(self, client, input, output):
logger.info("AuthenticationServer.request_ticket()")
#--- request ---
source = input.pid()
target = input.pid()
response = await self.request_ticket(client, source, target)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'ticket']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.buffer(response.ticket)
async def handle_get_pid(self, client, input, output):
logger.info("AuthenticationServer.get_pid()")
#--- request ---
username = input.string()
response = await self.get_pid(client, username)
#--- response ---
if not isinstance(response, int):
raise RuntimeError("Expected int, got %s" %response.__class__.__name__)
output.pid(response)
async def handle_get_name(self, client, input, output):
logger.info("AuthenticationServer.get_name()")
#--- request ---
pid = input.pid()
response = await self.get_name(client, pid)
#--- response ---
if not isinstance(response, str):
raise RuntimeError("Expected str, got %s" %response.__class__.__name__)
output.string(response)
async def handle_login_with_context(self, client, input, output):
logger.info("AuthenticationServer.login_with_context()")
#--- request ---
login_data = input.anydata()
response = await self.login_with_context(client, login_data)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'pid', 'ticket', 'connection_data']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.pid(response.pid)
output.buffer(response.ticket)
output.add(response.connection_data)
async def login(self, *args):
logger.warning("AuthenticationServer.login not implemented")
raise common.RMCError("Core::NotImplemented")
async def login_ex(self, *args):
logger.warning("AuthenticationServer.login_ex not implemented")
raise common.RMCError("Core::NotImplemented")
async def request_ticket(self, *args):
logger.warning("AuthenticationServer.request_ticket not implemented")
raise common.RMCError("Core::NotImplemented")
async def get_pid(self, *args):
logger.warning("AuthenticationServer.get_pid not implemented")
raise common.RMCError("Core::NotImplemented")
async def get_name(self, *args):
logger.warning("AuthenticationServer.get_name not implemented")
raise common.RMCError("Core::NotImplemented")
async def login_with_context(self, *args):
logger.warning("AuthenticationServer.login_with_context not implemented")
raise common.RMCError("Core::NotImplemented")
class AuthenticationServerNX(AuthenticationProtocolNX):
def __init__(self):
self.methods = {
self.METHOD_VALIDATE_AND_REQUEST_TICKET: self.handle_validate_and_request_ticket,
self.METHOD_VALIDATE_AND_REQUEST_TICKET_WITH_CUSTOM_DATA: self.handle_validate_and_request_ticket_with_custom_data,
self.METHOD_REQUEST_TICKET: self.handle_request_ticket,
self.METHOD_GET_PID: self.handle_get_pid,
self.METHOD_GET_NAME: self.handle_get_name,
self.METHOD_VALIDATE_AND_REQUEST_TICKET_WITH_PARAM: self.handle_validate_and_request_ticket_with_param,
}
async def logout(self, client):
pass
async def handle(self, client, method_id, input, output):
if method_id in self.methods:
await self.methods[method_id](client, input, output)
else:
logger.warning("Unknown method called on AuthenticationServerNX: %i", method_id)
raise common.RMCError("Core::NotImplemented")
async def handle_validate_and_request_ticket(self, client, input, output):
logger.info("AuthenticationServerNX.validate_and_request_ticket()")
#--- request ---
username = input.string()
response = await self.validate_and_request_ticket(client, username)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'pid', 'ticket', 'connection_data', 'server_name']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.pid(response.pid)
output.buffer(response.ticket)
output.add(response.connection_data)
output.string(response.server_name)
async def handle_validate_and_request_ticket_with_custom_data(self, client, input, output):
logger.info("AuthenticationServerNX.validate_and_request_ticket_with_custom_data()")
#--- request ---
username = input.string()
extra_data = input.anydata()
response = await self.validate_and_request_ticket_with_custom_data(client, username, extra_data)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'pid', 'ticket', 'connection_data', 'server_name', 'source_key']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.pid(response.pid)
output.buffer(response.ticket)
output.add(response.connection_data)
output.string(response.server_name)
output.string(response.source_key)
async def handle_request_ticket(self, client, input, output):
logger.info("AuthenticationServerNX.request_ticket()")
#--- request ---
source = input.pid()
target = input.pid()
response = await self.request_ticket(client, source, target)
#--- response ---
if not isinstance(response, rmc.RMCResponse):
raise RuntimeError("Expected RMCResponse, got %s" %response.__class__.__name__)
for field in ['result', 'ticket', 'key']:
if not hasattr(response, field):
raise RuntimeError("Missing field in RMCResponse: %s" %field)
output.result(response.result)
output.buffer(response.ticket)
output.string(response.key)
async def handle_get_pid(self, client, input, output):
logger.info("AuthenticationServerNX.get_pid()")
#--- request ---
username = input.string()
response = await self.get_pid(client, username)
#--- response ---
if not isinstance(response, int):
raise RuntimeError("Expected int, got %s" %response.__class__.__name__)
output.pid(response)
async def handle_get_name(self, client, input, output):
logger.info("AuthenticationServerNX.get_name()")
#--- request ---
pid = input.pid()
response = await self.get_name(client, pid)
#--- response ---
if not isinstance(response, str):
raise RuntimeError("Expected str, got %s" %response.__class__.__name__)
output.string(response)
async def handle_validate_and_request_ticket_with_param(self, client, input, output):
logger.info("AuthenticationServerNX.validate_and_request_ticket_with_param()")
#--- request ---
param = input.extract(ValidateAndRequestTicketParam)
response = await self.validate_and_request_ticket_with_param(client, param)
#--- response ---
if not isinstance(response, ValidateAndRequestTicketResult):
raise RuntimeError("Expected ValidateAndRequestTicketResult, got %s" %response.__class__.__name__)
output.add(response)
async def validate_and_request_ticket(self, *args):
logger.warning("AuthenticationServerNX.validate_and_request_ticket not implemented")
raise common.RMCError("Core::NotImplemented")
async def validate_and_request_ticket_with_custom_data(self, *args):
logger.warning("AuthenticationServerNX.validate_and_request_ticket_with_custom_data not implemented")
raise common.RMCError("Core::NotImplemented")
async def request_ticket(self, *args):
logger.warning("AuthenticationServerNX.request_ticket not implemented")
raise common.RMCError("Core::NotImplemented")
async def get_pid(self, *args):
logger.warning("AuthenticationServerNX.get_pid not implemented")
raise common.RMCError("Core::NotImplemented")
async def get_name(self, *args):
logger.warning("AuthenticationServerNX.get_name not implemented")
raise common.RMCError("Core::NotImplemented")
async def validate_and_request_ticket_with_param(self, *args):
logger.warning("AuthenticationServerNX.validate_and_request_ticket_with_param not implemented")
raise common.RMCError("Core::NotImplemented")
| 36.525758
| 125
| 0.740988
| 3,046
| 24,107
| 5.664478
| 0.054169
| 0.045207
| 0.037557
| 0.050075
| 0.870871
| 0.835516
| 0.80022
| 0.755767
| 0.713632
| 0.682161
| 0
| 0.00325
| 0.132119
| 24,107
| 659
| 126
| 36.581184
| 0.821432
| 0.033434
| 0
| 0.629191
| 1
| 0
| 0.19218
| 0.084778
| 0
| 0
| 0.000258
| 0
| 0
| 1
| 0.04142
| false
| 0.005917
| 0.003945
| 0
| 0.118343
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8a946c37eabbb3f925c7788e91ecbd7d2262466
| 98
|
py
|
Python
|
explorer/resources/ui/__init__.py
|
shalevy1/gexplorer
|
5216a506aace8259bc84495018c4a67dda220403
|
[
"Apache-2.0"
] | null | null | null |
explorer/resources/ui/__init__.py
|
shalevy1/gexplorer
|
5216a506aace8259bc84495018c4a67dda220403
|
[
"Apache-2.0"
] | 1
|
2022-03-21T22:21:30.000Z
|
2022-03-21T22:21:30.000Z
|
explorer/resources/ui/__init__.py
|
shalevy1/gexplorer
|
5216a506aace8259bc84495018c4a67dda220403
|
[
"Apache-2.0"
] | null | null | null |
from explorer.resources.ui.index import Index
from explorer.resources.ui.template import Template
| 32.666667
| 51
| 0.857143
| 14
| 98
| 6
| 0.5
| 0.285714
| 0.5
| 0.547619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 98
| 2
| 52
| 49
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
b22c99edc1775bc837c6ae49c47b4513cb17bd87
| 7,189
|
py
|
Python
|
tests/models/boundary/test_is_point_in_boundary.py
|
EderVs/Voronoi-Diagrams
|
6e69f9b6eb516dee12d66f187cf267a7b527da5f
|
[
"MIT"
] | 3
|
2021-11-12T17:43:08.000Z
|
2022-01-03T02:47:34.000Z
|
tests/models/boundary/test_is_point_in_boundary.py
|
EderVs/Voronoi-Diagrams
|
6e69f9b6eb516dee12d66f187cf267a7b527da5f
|
[
"MIT"
] | 3
|
2021-11-19T20:12:31.000Z
|
2021-11-19T20:14:39.000Z
|
tests/models/boundary/test_is_point_in_boundary.py
|
EderVs/Voronoi-Diagrams
|
6e69f9b6eb516dee12d66f187cf267a7b527da5f
|
[
"MIT"
] | null | null | null |
"""Test is_point_in_boundary method in WeightedPointBoundary."""
# Standard
from typing import List, Any
from random import randint
# Models
from voronoi_diagrams.models import (
WeightedSite,
WeightedPointBisector,
WeightedPointBoundary,
Point,
)
# Math
from decimal import Decimal
class TestWeightedPointBoundaryIsPointInAllRegion:
"""Test is_point_in_boundary method in WeightedPointBoundary."""
def test_with_concave_to_y_boundary(self):
"""Test with a boundary that is concave to y."""
p = WeightedSite(Decimal(16), Decimal(10), Decimal(2))
# q is the one in the top.
q = WeightedSite(Decimal(40), Decimal(10), Decimal(6))
bisector = WeightedPointBisector(sites=(p, q))
boundary_plus = WeightedPointBoundary(bisector=bisector, sign=True)
boundary_minus = WeightedPointBoundary(bisector=bisector, sign=False)
# Points in boundary
# Point in event point
point = Point(Decimal("40"), Decimal("16"))
assert boundary_plus.is_point_in_boundary(point)
assert not boundary_minus.is_point_in_boundary(point)
# Point in Boundary-
point = Point(Decimal("36"), Decimal("16.17424305044159994757531098"))
assert boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("36"), Decimal("107.8257569495584071586485307"))
assert boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("45"), Decimal("215.8749217771908888306107530"))
assert boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
x = bisector.get_vertical_tangents()[0]
point = Point(x, boundary_minus.formula_y(x)[0])
assert boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
# Point in Boundary+
point = Point(Decimal("45"), Decimal("16.12507822280910540692058362"))
assert not boundary_minus.is_point_in_boundary(point)
assert boundary_plus.is_point_in_boundary(point)
# Point inside
point = Point(Decimal("45"), Decimal("25"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
# Points outside
point = Point(Decimal("50"), Decimal("10"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("70"), Decimal("17"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("31"), Decimal("17"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("0"), Decimal("40"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
x = bisector.get_vertical_tangents()[0]
point = Point(x, boundary_minus.formula_y(x)[0] + Decimal(5))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_minus.is_point_in_boundary(point)
def test_with_normal_boundary(self):
"""Test with a boundary that is not concave to y."""
p = WeightedSite(Decimal(16), Decimal(10), Decimal(2))
# q is the one in the top.
q = WeightedSite(Decimal(40), Decimal(30), Decimal(6))
bisector = WeightedPointBisector(sites=(p, q))
boundary_plus = WeightedPointBoundary(bisector=bisector, sign=True)
boundary_minus = WeightedPointBoundary(bisector=bisector, sign=False)
# Points in boundary
# Point in event point
point = Point(Decimal("40"), Decimal("36"))
assert not boundary_minus.is_point_in_boundary(point)
assert boundary_plus.is_point_in_boundary(point)
# Point in Boundary+
point = Point(Decimal("70"), Decimal("44.51646544245032821756886326"))
assert not boundary_minus.is_point_in_boundary(point)
assert boundary_plus.is_point_in_boundary(point)
# Point in Boundary-
point = Point(Decimal("24"), Decimal("50.49390153191919183928135506"))
assert boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
# Point inside
point = Point(Decimal("30"), Decimal("70"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
# Points outside
point = Point(Decimal("40"), Decimal("30"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("90"), Decimal("50"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("10"), Decimal("50"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
def test_with_stopped_boundary(self):
"""Test with a boundary that is not concave to y."""
p = WeightedSite(Decimal(16), Decimal(10), Decimal(2))
# q is the one in the top.
q = WeightedSite(Decimal(30), Decimal(14), Decimal(6))
bisector = WeightedPointBisector(sites=(p, q))
boundary_plus = WeightedPointBoundary(bisector=bisector, sign=True)
boundary_minus = WeightedPointBoundary(bisector=bisector, sign=False)
# Points in boundary
# Point in event point
point = Point(Decimal("30"), Decimal("20"))
assert not boundary_minus.is_point_in_boundary(point)
assert boundary_plus.is_point_in_boundary(point)
# Point in Boundary+
point = Point(Decimal("60"), Decimal("26.94980694980695009479400205"))
assert not boundary_minus.is_point_in_boundary(point)
assert boundary_plus.is_point_in_boundary(point)
# Point in Boundary-
point = Point(Decimal("24"), Decimal("30.28571428571428495693446374"))
assert boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
# Point inside
point = Point(Decimal("30"), Decimal("70"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
# Points outside
point = Point(Decimal("35"), Decimal("15"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("40"), Decimal("21"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
point = Point(Decimal("25"), Decimal("21"))
assert not boundary_minus.is_point_in_boundary(point)
assert not boundary_plus.is_point_in_boundary(point)
| 46.681818
| 78
| 0.696203
| 909
| 7,189
| 5.242024
| 0.10121
| 0.09255
| 0.192025
| 0.243442
| 0.881217
| 0.865687
| 0.865268
| 0.865268
| 0.828541
| 0.828541
| 0
| 0.059309
| 0.207261
| 7,189
| 153
| 79
| 46.986928
| 0.776803
| 0.093059
| 0
| 0.62963
| 0
| 0
| 0.048061
| 0.035852
| 0
| 0
| 0
| 0
| 0.481481
| 1
| 0.027778
| false
| 0
| 0.037037
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b234d0dd0544e896fe9db00a9ac48cf9a8870038
| 4,450
|
py
|
Python
|
tests/fields/test_arithmetic_exceptions.py
|
BK-Modding/galois
|
5da4db84d90083e337ebe2c1838df5c6db88fd3f
|
[
"MIT"
] | null | null | null |
tests/fields/test_arithmetic_exceptions.py
|
BK-Modding/galois
|
5da4db84d90083e337ebe2c1838df5c6db88fd3f
|
[
"MIT"
] | null | null | null |
tests/fields/test_arithmetic_exceptions.py
|
BK-Modding/galois
|
5da4db84d90083e337ebe2c1838df5c6db88fd3f
|
[
"MIT"
] | null | null | null |
"""
A pytest module to test exception raising for invalid Galois field array arithmetic.
"""
import pytest
import numpy as np
import galois
from ..helper import randint
def test_add_int_scalar(field):
x = field.Random(10)
y = int(randint(0, field.order, 1, field.dtypes[-1]))
with pytest.raises(TypeError):
z = x + y
with pytest.raises(TypeError):
z = y + x
def test_add_int_array(field):
x = field.Random(10)
y = randint(0, field.order, 10, field.dtypes[-1])
with pytest.raises(TypeError):
z = x + y
with pytest.raises(TypeError):
z = y + x
def test_right_add_int_scalar(field):
x = field.Random(10)
y = int(randint(0, field.order, 1, field.dtypes[-1]))
with pytest.raises(TypeError):
x += y
with pytest.raises(TypeError):
y += x
def test_right_add_int_array(field):
x = field.Random(10)
y = randint(0, field.order, 10, field.dtypes[-1])
with pytest.raises(TypeError):
x += y
with pytest.raises(TypeError):
y += x
def test_subtract_int_scalar(field):
x = field.Random(10)
y = int(randint(0, field.order, 1, field.dtypes[-1]))
with pytest.raises(TypeError):
z = x - y
with pytest.raises(TypeError):
z = y - x
def test_subtract_int_array(field):
x = field.Random(10)
y = randint(0, field.order, 10, field.dtypes[-1])
with pytest.raises(TypeError):
z = x - y
with pytest.raises(TypeError):
z = y - x
def test_right_subtract_int_scalar(field):
x = field.Random(10)
y = int(randint(0, field.order, 1, field.dtypes[-1]))
with pytest.raises(TypeError):
x -= y
with pytest.raises(TypeError):
y -= x
def test_right_subtract_int_array(field):
x = field.Random(10)
y = randint(0, field.order, 10, field.dtypes[-1])
with pytest.raises(TypeError):
x -= y
with pytest.raises(TypeError):
y -= x
# NOTE: Don't test multiply with integer because that is a valid operation, namely "multiple addition"
def test_divide_int_scalar(field):
x = field.Random(10, low=1)
y = int(randint(1, field.order, 1, field.dtypes[-1]))
with pytest.raises(TypeError):
z = x / y
with pytest.raises(TypeError):
z = x // y
with pytest.raises(TypeError):
z = y / x
with pytest.raises(TypeError):
z = y // x
def test_divide_int_array(field):
x = field.Random(10, low=1)
y = randint(1, field.order, 10, field.dtypes[-1])
with pytest.raises(TypeError):
z = x / y
with pytest.raises(TypeError):
z = x // y
with pytest.raises(TypeError):
z = y / x
with pytest.raises(TypeError):
z = y // x
def test_right_divide_int_scalar(field):
x = field.Random(10)
y = int(randint(1, field.order, 1, field.dtypes[-1]))
with pytest.raises(TypeError):
x /= y
with pytest.raises(TypeError):
x //= y
with pytest.raises(TypeError):
y /= x
with pytest.raises(TypeError):
y //= x
def test_right_divide_int_array(field):
x = field.Random(10)
y = randint(1, field.order, 10, field.dtypes[-1])
with pytest.raises(TypeError):
x /= y
with pytest.raises(TypeError):
x //= y
with pytest.raises(TypeError):
y /= x
with pytest.raises(TypeError):
y //= x
def test_divide_by_zero(field):
x = field.Random(10)
with pytest.raises(ZeroDivisionError):
y = field(0)
z = x / y
with pytest.raises(ZeroDivisionError):
y = field.Random(10)
y[0] = 0 # Ensure one value is zero
z = x / y
def test_multiplicative_inverse_of_zero(field):
x = field.Random(10)
x[0] = 0 # Ensure one value is zero
with pytest.raises(ZeroDivisionError):
z = x ** -1
# NOTE: Don't test power to integer because that's valid
def test_zero_to_negative_power(field):
x = field.Random(10)
x[0] = 0 # Ensure one value is zero
with pytest.raises(ZeroDivisionError):
y = -3
z = x ** y
with pytest.raises(ZeroDivisionError):
y = -3*np.ones(x.size, field.dtypes[-1])
z = x ** y
def test_log_of_zero(field):
with pytest.raises(ArithmeticError):
x = field(0)
z = np.log(x)
with pytest.raises(ArithmeticError):
x = field.Random(10)
x[0] = 0 # Ensure one value is zero
z = np.log(x)
| 24.860335
| 102
| 0.605618
| 650
| 4,450
| 4.06
| 0.107692
| 0.147783
| 0.236453
| 0.303145
| 0.853732
| 0.844259
| 0.790451
| 0.790451
| 0.720349
| 0.708223
| 0
| 0.028186
| 0.266517
| 4,450
| 178
| 103
| 25
| 0.780331
| 0.076629
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.118519
| false
| 0
| 0.02963
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b25573ea17afc6e0899bed6da89336286b7b8854
| 3,622
|
py
|
Python
|
lensit/qcinv/chain_samples.py
|
Sebastian-Belkner/LensIt
|
3e746ceeaa53b2845af31cc8372cd897e34ad53f
|
[
"MIT"
] | null | null | null |
lensit/qcinv/chain_samples.py
|
Sebastian-Belkner/LensIt
|
3e746ceeaa53b2845af31cc8372cd897e34ad53f
|
[
"MIT"
] | null | null | null |
lensit/qcinv/chain_samples.py
|
Sebastian-Belkner/LensIt
|
3e746ceeaa53b2845af31cc8372cd897e34ad53f
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from lensit.qcinv import cd_solve
import numpy as np
def get_defaultmgchain(lmax_sky, lsides, datshape, tol=1e-5, iter_max=np.inf, dense_file='', **kwargs):
# FIXME :
assert datshape[0] == datshape[1], datshape
nside_max = datshape[0]
if lmax_sky > 4000:
dense_size = 2000
if np.prod(lsides) >= (4. * np.pi) - 0.1:
lmax_dense = 64
else:
lmax_dense = np.sqrt(2. / 2. / np.pi * (2 * np.pi) ** 2 / np.prod(lsides) * dense_size)
lmax_dense = int(np.round(min(lmax_dense, 1300)))
print("chain_samples : setting lmax_dense to ", lmax_dense)
chain_descr = [
[3, ["split(dense(" + dense_file + "), %s, diag_cl)" % (int(lmax_dense))], 1400, nside_max / 4, 3, 0.,
cd_solve.tr_cg,
cd_solve.cache_mem()],
[2, ["split(stage(3), %s, diag_cl)" % 1400], 3000, nside_max / 2, 3, 0., cd_solve.tr_cg,
cd_solve.cache_mem()],
[1, ["split(stage(2), %s, diag_cl)" % 3000], 4000, nside_max / 2, 3, 0., cd_solve.tr_cg,
cd_solve.cache_mem()],
[0, ["split(stage(1), %s, diag_cl)" % 4000], lmax_sky, nside_max, iter_max, tol, cd_solve.tr_cg,
cd_solve.cache_mem()]]
elif lmax_sky > 3000:
dense_size = 2000
lmax_dense = np.sqrt(2. / 2. / np.pi * (2 * np.pi) ** 2 / np.prod(lsides) * dense_size)
lmax_dense = int(np.round(min(lmax_dense, 1300)))
print("chain_samples : setting lmax_dense to " + str(lmax_dense))
chain_descr = [
[2, ["split(dense(" + dense_file + "), %s, diag_cl)" % (int(lmax_dense))], 1400, nside_max / 4, 3, 0.,
cd_solve.tr_cg, cd_solve.cache_mem()],
[1, ["split(stage(2), %s, diag_cl)" % 1400], 3000, nside_max / 2, 3, 0., cd_solve.tr_cg,
cd_solve.cache_mem()],
[0, ["split(stage(1), %s, diag_cl)" % 3000], lmax_sky, nside_max / 2, iter_max, tol, cd_solve.tr_cg,
cd_solve.cache_mem()]]
else:
res = lambda fac: max(10, nside_max / fac)
chain_descr = [
[3, ["split(dense(" + dense_file + "), %s, diag_cl)" % 64], 256, res(16), 3, 0., cd_solve.tr_cg,
cd_solve.cache_mem()],
[2, ["split(stage(3), %s, diag_cl)" % 256], 512, res(8), 3, 0., cd_solve.tr_cg,
cd_solve.cache_mem()],
[1, ["split(stage(2), %s, diag_cl)" % 512], 1024, res(4), 3, 0., cd_solve.tr_cg,
cd_solve.cache_mem()],
[0, ["split(stage(1), %s, diag_cl)" % 1024], lmax_sky, nside_max, iter_max, tol, cd_solve.tr_cg,
cd_solve.cache_mem()]]
return chain_descr
def get_densediagchain(lsides,lmax_sky,datshape,dense_file,tol = 1e-5,iter_max = np.inf):
assert datshape[0] == datshape[1], datshape
dense_size = 2000
if np.prod(lsides) >= (4. * np.pi) - 0.1:
lmax_dense = 64
else:
lmax_dense = np.sqrt(2. / 2. / np.pi * (2 * np.pi) ** 2 / np.prod(lsides) * dense_size)
lmax_dense = int(np.round(min(lmax_dense, 1300)))
print("chain_samples : setting lmax_dense to " + str(lmax_dense))
chain_descr = [
[0, ["split(dense(" + dense_file + "), %s, diag_cl)" % (int(lmax_dense))], lmax_sky, datshape[0], iter_max,tol,cd_solve.tr_cg, cd_solve.cache_mem()]]
return chain_descr
def get_isomgchain(lmax_sky, datshape, tol=1e-5, iter_max=np.inf, **kwargs):
assert datshape[0] == datshape[1], datshape
nside_max = datshape[0]
return [[0, ["diag_cl"], lmax_sky, nside_max, iter_max, tol, cd_solve.tr_cg, cd_solve.cache_mem()]]
| 48.945946
| 157
| 0.576753
| 557
| 3,622
| 3.490126
| 0.141831
| 0.097222
| 0.060185
| 0.07356
| 0.809156
| 0.809156
| 0.792695
| 0.783436
| 0.756687
| 0.756687
| 0
| 0.066494
| 0.252623
| 3,622
| 73
| 158
| 49.616438
| 0.651644
| 0.001933
| 0
| 0.609375
| 0
| 0
| 0.125381
| 0
| 0
| 0
| 0
| 0.013699
| 0.046875
| 1
| 0.046875
| false
| 0
| 0.046875
| 0
| 0.140625
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b270d816df88163f798ae697243e694e5e4267f2
| 12,471
|
py
|
Python
|
misago/users/tests/test_user_create_api.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | 1
|
2017-07-25T03:04:36.000Z
|
2017-07-25T03:04:36.000Z
|
misago/users/tests/test_user_create_api.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
misago/users/tests/test_user_create_api.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.core import mail
from django.test import override_settings
from django.urls import reverse
from misago.conf import settings
from misago.users.models import Ban, Online
from misago.users.testutils import UserTestCase
UserModel = get_user_model()
class UserCreateTests(UserTestCase):
"""tests for new user registration (POST to /api/users/)"""
def setUp(self):
super(UserCreateTests, self).setUp()
self.api_link = '/api/users/'
def test_empty_request(self):
"""empty request errors with code 400"""
response = self.client.post(self.api_link)
self.assertEqual(response.status_code, 400)
def test_authenticated_request(self):
"""authentiated user request errors with code 403"""
self.login_user(self.get_authenticated_user())
response = self.client.post(self.api_link)
self.assertEqual(response.status_code, 403)
def test_registration_off_request(self):
"""registrations off request errors with code 403"""
settings.override_setting('account_activation', 'closed')
response = self.client.post(self.api_link)
self.assertContains(response, 'closed', status_code=403)
def test_registration_validates_ip_ban(self):
"""api validates ip ban"""
Ban.objects.create(
check_type=Ban.IP,
banned_value='127.*',
user_message="You can't register account like this.",
)
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 403)
def test_registration_validates_ip_registration_ban(self):
"""api validates ip registration-only ban"""
Ban.objects.create(
check_type=Ban.IP,
banned_value='127.*',
user_message="You can't register account like this.",
registration_only=True,
)
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'__all__': ["You can't register account like this."],
}
)
def test_registration_validates_username(self):
"""api validates usernames"""
user = self.get_authenticated_user()
response = self.client.post(
self.api_link,
data={
'username': user.username,
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'username': ["This username is not available."],
})
def test_registration_validates_username_ban(self):
"""api validates username ban"""
Ban.objects.create(
banned_value='totally*',
user_message="You can't register account like this.",
)
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'username': ["You can't register account like this."],
}
)
def test_registration_validates_username_registration_ban(self):
"""api validates username registration-only ban"""
Ban.objects.create(
banned_value='totally*',
user_message="You can't register account like this.",
registration_only=True,
)
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'username': ["You can't register account like this."],
}
)
def test_registration_validates_email(self):
"""api validates usernames"""
user = self.get_authenticated_user()
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': user.email,
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'email': ["This e-mail address is not available."],
})
def test_registration_validates_email_ban(self):
"""api validates email ban"""
Ban.objects.create(
check_type=Ban.EMAIL,
banned_value='lorem*',
user_message="You can't register account like this.",
)
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'email': ["You can't register account like this."],
})
def test_registration_validates_email_registration_ban(self):
"""api validates email registration-only ban"""
Ban.objects.create(
check_type=Ban.EMAIL,
banned_value='lorem*',
user_message="You can't register account like this.",
registration_only=True,
)
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json(), {
'email': ["You can't register account like this."],
})
def test_registration_validates_password(self):
"""api uses django's validate_password to validate registrations"""
response = self.client.post(
self.api_link,
data={
'username': 'Bob',
'email': 'l.o.r.e.m.i.p.s.u.m@gmail.com',
'password': '123',
},
)
self.assertContains(response, "password is too short", status_code=400)
self.assertContains(response, "password is entirely numeric", status_code=400)
self.assertContains(response, "email is not allowed", status_code=400)
def test_registration_validates_password_similiarity(self):
"""api uses validate_password to validate registrations"""
response = self.client.post(
self.api_link,
data={
'username': 'BobBoberson',
'email': 'l.o.r.e.m.i.p.s.u.m@gmail.com',
'password': 'BobBoberson',
},
)
self.assertContains(response, "password is too similar to the username", status_code=400)
@override_settings(captcha_type='qa', qa_question='Test', qa_answers='Lorem\nIpsum')
def test_registration_validates_captcha(self):
"""api validates captcha"""
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
'captcha': 'dolor'
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(
response.json(), {
'captcha': ['Entered answer is incorrect.'],
}
)
# valid captcha
response = self.client.post(
self.api_link,
data={
'username': 'totallyNew',
'email': 'loremipsum@dolor.met',
'password': 'LoremP4ssword',
'captcha': 'ipSUM'
},
)
self.assertEqual(response.status_code, 200)
def test_registration_calls_validate_new_registration(self):
"""api uses validate_new_registration to validate registrations"""
response = self.client.post(
self.api_link,
data={
'username': 'Bob',
'email': 'l.o.r.e.m.i.p.s.u.m@gmail.com',
'password': 'pas123',
},
)
self.assertContains(response, "email is not allowed", status_code=400)
def test_registration_creates_active_user(self):
"""api creates active and signed in user on POST"""
settings.override_setting('account_activation', 'none')
response = self.client.post(
self.api_link,
data={
'username': 'Bob',
'email': 'bob@bob.com',
'password': 'pass123',
},
)
self.assertContains(response, 'active')
self.assertContains(response, 'Bob')
self.assertContains(response, 'bob@bob.com')
UserModel.objects.get_by_username('Bob')
test_user = UserModel.objects.get_by_email('bob@bob.com')
self.assertEqual(Online.objects.filter(user=test_user).count(), 1)
self.assertTrue(test_user.check_password('pass123'))
response = self.client.get(reverse('misago:index'))
self.assertContains(response, 'Bob')
self.assertIn('Welcome', mail.outbox[0].subject)
def test_registration_creates_inactive_user(self):
"""api creates inactive user on POST"""
settings.override_setting('account_activation', 'user')
response = self.client.post(
self.api_link,
data={
'username': 'Bob',
'email': 'bob@bob.com',
'password': 'pass123',
},
)
self.assertContains(response, 'user')
self.assertContains(response, 'Bob')
self.assertContains(response, 'bob@bob.com')
UserModel.objects.get_by_username('Bob')
UserModel.objects.get_by_email('bob@bob.com')
self.assertIn('Welcome', mail.outbox[0].subject)
def test_registration_creates_admin_activated_user(self):
"""api creates admin activated user on POST"""
settings.override_setting('account_activation', 'admin')
response = self.client.post(
self.api_link,
data={
'username': 'Bob',
'email': 'bob@bob.com',
'password': 'pass123',
},
)
self.assertContains(response, 'admin')
self.assertContains(response, 'Bob')
self.assertContains(response, 'bob@bob.com')
UserModel.objects.get_by_username('Bob')
UserModel.objects.get_by_email('bob@bob.com')
self.assertIn('Welcome', mail.outbox[0].subject)
def test_registration_creates_user_with_whitespace_password(self):
"""api creates user with spaces around password"""
settings.override_setting('account_activation', 'none')
response = self.client.post(
self.api_link,
data={
'username': 'Bob',
'email': 'bob@bob.com',
'password': ' pass123 ',
},
)
self.assertContains(response, 'active')
self.assertContains(response, 'Bob')
self.assertContains(response, 'bob@bob.com')
UserModel.objects.get_by_username('Bob')
test_user = UserModel.objects.get_by_email('bob@bob.com')
self.assertEqual(Online.objects.filter(user=test_user).count(), 1)
self.assertTrue(test_user.check_password(' pass123 '))
response = self.client.get(reverse('misago:index'))
self.assertContains(response, 'Bob')
self.assertIn('Welcome', mail.outbox[0].subject)
| 32.30829
| 97
| 0.565632
| 1,236
| 12,471
| 5.553398
| 0.127023
| 0.037733
| 0.057692
| 0.064103
| 0.827943
| 0.776078
| 0.754371
| 0.742133
| 0.71285
| 0.701777
| 0
| 0.012765
| 0.315291
| 12,471
| 385
| 98
| 32.392208
| 0.791076
| 0.06471
| 0
| 0.64966
| 0
| 0.010204
| 0.178846
| 0.007524
| 0
| 0
| 0
| 0
| 0.163265
| 1
| 0.068027
| false
| 0.085034
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b27c38908424a6ddf59f889e21b07f4a069eb638
| 71
|
py
|
Python
|
hello.py
|
gabriel-flyrlabs/hello-world-test
|
5a209215479d6e56cfea19177040fae9ca5acb24
|
[
"MIT"
] | null | null | null |
hello.py
|
gabriel-flyrlabs/hello-world-test
|
5a209215479d6e56cfea19177040fae9ca5acb24
|
[
"MIT"
] | null | null | null |
hello.py
|
gabriel-flyrlabs/hello-world-test
|
5a209215479d6e56cfea19177040fae9ca5acb24
|
[
"MIT"
] | null | null | null |
print "Hello World!"
print "Hello World again!"
print 'more more more'
| 17.75
| 26
| 0.732394
| 11
| 71
| 4.727273
| 0.454545
| 0.384615
| 0.576923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15493
| 71
| 3
| 27
| 23.666667
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0.619718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a253c8e23d8b2dc804b57b1afc90c2cb489caf1e
| 410
|
py
|
Python
|
models/utils.py
|
sgalkina/svae_spectra
|
d443e6c0839875c7f19fc9362075bb8e54cdbca8
|
[
"MIT"
] | null | null | null |
models/utils.py
|
sgalkina/svae_spectra
|
d443e6c0839875c7f19fc9362075bb8e54cdbca8
|
[
"MIT"
] | null | null | null |
models/utils.py
|
sgalkina/svae_spectra
|
d443e6c0839875c7f19fc9362075bb8e54cdbca8
|
[
"MIT"
] | 1
|
2022-03-18T12:33:15.000Z
|
2022-03-18T12:33:15.000Z
|
def unsupervised_distr(distr):
variables = {k: k + '_u' for k in distr.var + distr.cond_var if k != 'z'}
distr_unsupervised = distr.replace_var(**variables)
return distr_unsupervised, variables
def unsupervised_distr_no_var(distr):
variables = {k: k + '_u' for k in distr.var + distr.cond_var if k != 'z'}
distr_unsupervised = distr.replace_var(**variables)
return distr_unsupervised
| 37.272727
| 77
| 0.707317
| 59
| 410
| 4.677966
| 0.254237
| 0.246377
| 0.144928
| 0.115942
| 0.804348
| 0.804348
| 0.804348
| 0.804348
| 0.804348
| 0.804348
| 0
| 0
| 0.178049
| 410
| 10
| 78
| 41
| 0.818991
| 0
| 0
| 0.5
| 0
| 0
| 0.014634
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2a5a99a77212c0d1c5154f19871a23c9caef251
| 341
|
py
|
Python
|
Voice Assitent/Detec/l.py
|
AlbertBagdos256/Voice-Assistent
|
6280e19c22da04a0135c418b96b6273357e334e4
|
[
"MIT"
] | null | null | null |
Voice Assitent/Detec/l.py
|
AlbertBagdos256/Voice-Assistent
|
6280e19c22da04a0135c418b96b6273357e334e4
|
[
"MIT"
] | null | null | null |
Voice Assitent/Detec/l.py
|
AlbertBagdos256/Voice-Assistent
|
6280e19c22da04a0135c418b96b6273357e334e4
|
[
"MIT"
] | null | null | null |
import os
import subprocess
prog_name = 'D:/Python.main/PETROVICH/Detec/obj_detec_oop.py'
arg = '--prototxt MobileNetSSD_deploy.prototxt.txt --model MobileNetSSD_deploy.caffemodel'
os.system(r'D:/Python.main/PETROVICH/Detec/obj_detec_oop.py --prototxt MobileNetSSD_deploy.prototxt.txt --model MobileNetSSD_deploy.caffemodel')
| 31
| 145
| 0.788856
| 46
| 341
| 5.652174
| 0.478261
| 0.276923
| 0.084615
| 0.153846
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0
| 0
| 0
| 0.096774
| 341
| 10
| 146
| 34.1
| 0.844156
| 0
| 0
| 0
| 0
| 0.2
| 0.792049
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
a2cc7c8d046f57686330ef825bdc65b839580b8e
| 218
|
py
|
Python
|
http_responses/models/__init__.py
|
parveenchahal/python-common
|
4e5488615db3e0f8ba7f0bfeee87304a98fee2d5
|
[
"MIT"
] | null | null | null |
http_responses/models/__init__.py
|
parveenchahal/python-common
|
4e5488615db3e0f8ba7f0bfeee87304a98fee2d5
|
[
"MIT"
] | null | null | null |
http_responses/models/__init__.py
|
parveenchahal/python-common
|
4e5488615db3e0f8ba7f0bfeee87304a98fee2d5
|
[
"MIT"
] | null | null | null |
from ._oauth2_token_response import Oath2TokenResponse
from ._message_response import MessageResponseModel
from ._error_response_model import ErrorResponseModel
from ._session_token_response import SessionTokenResponse
| 54.5
| 57
| 0.912844
| 23
| 218
| 8.173913
| 0.565217
| 0.223404
| 0.202128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009852
| 0.068807
| 218
| 4
| 57
| 54.5
| 0.916256
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a7d43861875dce212aa6f1f7535c0c09e0a3802c
| 36,508
|
py
|
Python
|
generators/generators.py
|
XingangPan/ShadeGAN
|
b151b3105e868373724f6719d7bf916d781cdec7
|
[
"MIT"
] | 96
|
2021-11-01T01:49:38.000Z
|
2022-03-23T04:31:26.000Z
|
generators/generators.py
|
XingangPan/ShadeGAN
|
b151b3105e868373724f6719d7bf916d781cdec7
|
[
"MIT"
] | 3
|
2021-11-08T03:15:40.000Z
|
2022-03-01T21:31:02.000Z
|
generators/generators.py
|
XingangPan/ShadeGAN
|
b151b3105e868373724f6719d7bf916d781cdec7
|
[
"MIT"
] | 8
|
2021-11-02T13:37:03.000Z
|
2022-03-23T04:29:33.000Z
|
"""Implicit generator for 3D volumes"""
import torch.nn as nn
import torch
from .volumetric_rendering import *
from .decoder import ResDecoder
from .utils import *
class ImplicitGenerator3d(nn.Module):
def __init__(self, siren, z_dim, shading, view_condition, light_condition, surf_track, ldist=None, **kwargs):
super().__init__()
self.z_dim = z_dim
self.shading = shading
self.surf_track = surf_track
self.siren = siren(output_dim=4, z_dim=self.z_dim, input_dim=3, shading=shading,
view_condition=view_condition, light_condition=light_condition, device=None)
if self.surf_track:
self.surfacenet = ResDecoder(4608, nf=16)
self.epoch = 0
self.step = 0
self.ldist = ldist if ldist is not None else LSampler(device=self.siren.device)
self.init_cam2world()
def set_device(self, device):
self.device = device
self.siren.device = device
self.ldist.device = device
if self.surf_track:
self.surfacenet.device = device
self.generate_avg_frequencies()
self.cam2world_matrix = self.cam2world_matrix.to(device)
def init_cam2world(self):
camera_origin = torch.zeros((1,3))
camera_origin[:, 2] = 1
forward_vector = -camera_origin
self.cam2world_matrix = create_cam2world_matrix(forward_vector, camera_origin)
def lambertian_shading(self, inputs, l, l_ratio=1):
normal, albedo = inputs['normal'], inputs['rgb']
b = normal.size(0)
rand_light_dxy = l[:,2:]
rand_light_d = torch.cat([rand_light_dxy, torch.ones(b, 1).to(rand_light_dxy)], 1)
rand_light_d = rand_light_d / (torch.norm(rand_light_d, dim=-1, keepdim=True) + 1e-7)
transformed_light = torch.bmm(self.cam2world_matrix.expand(b,4,4)[..., :3, :3], rand_light_d.reshape(b,1,3).permute(0,2,1)).permute(0, 2, 1).expand_as(normal)
rand_diffuse_shading = (normal * transformed_light).sum(-1, keepdim=True).clamp(min=0, max=1)
rand_diffuse_shading[torch.isnan(rand_diffuse_shading)] = 1.0
ambience = l[:,None,:1]/2+0.5
diffuse = l[:,None,1:2]/2+0.5
rand_shading = ambience + diffuse*rand_diffuse_shading
# smoothly transfer from no shading to shading
rand_shading = l_ratio * rand_shading + (1 - l_ratio)
rgb = (albedo * rand_shading).clamp(min=0, max=1)
inputs['albedo'] = albedo
inputs['rgb'] = rgb
inputs['shading'] = rand_shading
return inputs
def forward(self, z, l, img_size, fov, ray_start, ray_end, num_steps, h_stddev, v_stddev, h_mean, v_mean, hierarchical_sample,
sample_dist=None, lock_view_dependence=False, delta=-1, pose=None, l_ratio=1, rt_normal=False, **kwargs):
"""
Generates images from a noise vector, rendering parameters, and camera distribution.
Uses the hierarchical sampling scheme described in NeRF.
"""
batch_size = z.shape[0]
with_grad = torch.is_grad_enabled()
# Generate initial camera rays and sample points.
with torch.no_grad():
if pose is None:
camera_origin, pitch, yaw = sample_camera_positions(n=batch_size, r=1, horizontal_stddev=h_stddev, vertical_stddev=v_stddev, horizontal_mean=h_mean, vertical_mean=v_mean, device=self.device, mode=sample_dist)
else:
pitch, yaw = pose[:,:1], pose[:,1:2]
camera_origin = pose2origin(self.device, pitch, yaw, batch_size, 1)
if self.surf_track:
with torch.set_grad_enabled(with_grad):
freq, phase = self.siren.mapping_network(z)
freq, phase = freq.detach(), phase.detach()
pose_scaled = (torch.cat([pitch, yaw], -1) - math.pi/2) * 10
freq_phase = torch.cat([freq, phase], -1)/10
pred = self.surfacenet(freq_phase, pose_scaled)
depth_pred = pred[:,0,...] # 2nd channel not used for now
depth_pred = ray_start + depth_pred * (ray_end - ray_start)
if delta > 0:
sample_depth = resize(depth_pred, img_size)
delta = torch.ones_like(sample_depth) * delta
sample_depth = torch.max(sample_depth, ray_start+delta/2)
sample_depth = torch.min(sample_depth, ray_end-delta/2)
points_cam, z_vals, rays_d_cam = get_rays_from_depth(batch_size, num_steps, sample_depth, delta, resolution=(img_size, img_size), device=self.device, fov=fov) # batch_size, pixels, num_steps, 1
else:
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
else:
depth_pred = None
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
transformed_points, z_vals, transformed_ray_directions, transformed_ray_origins = transform_sampled_points(points_cam, z_vals, rays_d_cam, camera_origin, self.device)
transformed_ray_directions_expanded = torch.unsqueeze(transformed_ray_directions, -2)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.expand(-1, -1, num_steps, -1)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.reshape(batch_size, img_size*img_size*num_steps, 3)
transformed_points = transformed_points.reshape(batch_size, img_size*img_size*num_steps, 3)
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
# Model prediction on course points
coarse_output = self.siren(transformed_points, z, l, ray_directions=transformed_ray_directions_expanded, rt_normal=rt_normal)
for k, v in coarse_output.items():
coarse_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
# Re-sample fine points alont camera rays, as described in NeRF
if hierarchical_sample:
with torch.no_grad():
transformed_points = transformed_points.reshape(batch_size, img_size * img_size, num_steps, 3)
course_results = fancy_integration(coarse_output, z_vals, device=self.device, clamp_mode=kwargs['clamp_mode'], noise_std=kwargs['nerf_noise'])
weights = course_results['weights']
weights = weights.reshape(batch_size * img_size * img_size, num_steps) + 1e-5
#### Start new importance sampling
z_vals = z_vals.reshape(batch_size * img_size * img_size, num_steps)
z_vals_mid = 0.5 * (z_vals[: ,:-1] + z_vals[: ,1:])
z_vals = z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_z_vals = sample_pdf(z_vals_mid, weights[:, 1:-1],
num_steps, det=False).detach()
fine_z_vals = fine_z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_points = transformed_ray_origins.unsqueeze(2).contiguous() + transformed_ray_directions.unsqueeze(2).contiguous() * fine_z_vals.expand(-1,-1,-1,3).contiguous()
fine_points = fine_points.reshape(batch_size, img_size*img_size*num_steps, 3)
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
#### end new importance sampling
# Model prediction on re-sampled find points
fine_output = self.siren(fine_points, z, l, ray_directions=transformed_ray_directions_expanded, rt_normal=rt_normal)
for k, v in fine_output.items():
fine_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
# Combine course and fine points
all_z_vals = torch.cat([fine_z_vals, z_vals], dim = -2)
_, indices = torch.sort(all_z_vals, dim=-2)
all_z_vals = torch.gather(all_z_vals, -2, indices)
all_outputs = {}
for k, v in coarse_output.items():
all_outputs[k] = torch.cat([fine_output[k], v], dim = -2)
# Target sizes: [-1, -1, -1, 4]. Tensor sizes: [240, 512, 12]
all_outputs[k] = torch.gather(all_outputs[k], -2, indices.expand(-1, -1, -1, all_outputs[k].size(-1)))
else:
all_outputs = coarse_output
all_z_vals = z_vals
# Create images with NeRF
results = fancy_integration(all_outputs, all_z_vals, device=self.device, white_back=kwargs.get('white_back', False), last_back=kwargs.get('last_back', False), clamp_mode=kwargs['clamp_mode'], noise_std=kwargs['nerf_noise'])
if self.shading:
results = self.lambertian_shading(results, l, l_ratio=l_ratio)
results['depth_std'] = course_results['depth_std']
for k in ['rgb', 'rgb_refer']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size, 3)
results[k] = results[k].permute(0, 3, 1, 2).contiguous() * 2 - 1
if 'normal' in results:
results['normal'] = results['normal'].reshape(batch_size, img_size, img_size, 3).permute(0, 3, 1, 2).contiguous()
for k in ['depth', 'depth_std']:
results[k] = results[k].reshape(batch_size, img_size, img_size).contiguous()
if depth_pred is not None:
results[k] = resize(results[k], depth_pred.size(-1))
results['depth'] = results['depth'].clamp(min=ray_start, max=ray_end)
results['pose'] = torch.cat([pitch, yaw], -1)
results['depth_pred'] = depth_pred
return results
def generate_avg_frequencies(self):
"""Calculates average frequencies and phase shifts"""
z = torch.randn((10000, self.z_dim), device=self.siren.device)
with torch.no_grad():
frequencies, phase_shifts = self.siren.mapping_network(z)
self.avg_frequencies = frequencies.mean(0, keepdim=True)
self.avg_phase_shifts = phase_shifts.mean(0, keepdim=True)
def staged_forward(self, z, l, img_size, fov, ray_start, ray_end, num_steps, h_stddev, v_stddev, h_mean, v_mean, psi=0.8, lock_view_dependence=False, max_batch_size=50000, sample_dist=None, hierarchical_sample=False, delta=-1, pose=None, l_ratio=1, rt_normal=False, **kwargs):
batch_size = z.shape[0]
self.generate_avg_frequencies()
with torch.no_grad():
raw_frequencies, raw_phase_shifts = self.siren.mapping_network(z)
truncated_frequencies = self.avg_frequencies + psi * (raw_frequencies - self.avg_frequencies)
truncated_phase_shifts = self.avg_phase_shifts + psi * (raw_phase_shifts - self.avg_phase_shifts)
if pose is None:
camera_origin, pitch, yaw = sample_camera_positions(n=batch_size, r=1, horizontal_stddev=h_stddev, vertical_stddev=v_stddev, horizontal_mean=h_mean, vertical_mean=v_mean, device=self.device, mode=sample_dist)
else:
pitch, yaw = pose[:,:1], pose[:,1:2]
camera_origin = pose2origin(self.device, pitch, yaw, batch_size, 1)
if self.surf_track:
pose_scaled = (torch.cat([pitch, yaw], -1) - math.pi/2) * 10
freq_phase = torch.cat([truncated_frequencies, truncated_phase_shifts], -1)/10
pred = self.surfacenet(freq_phase, pose_scaled)
depth_pred = pred[:,0,...] # 2nd channel not used for now
depth_pred = ray_start + depth_pred * (ray_end - ray_start)
if delta > 0:
sample_depth = resize(depth_pred, img_size)
delta = torch.ones_like(sample_depth) * delta
sample_depth = torch.max(sample_depth, ray_start+delta/2)
sample_depth = torch.min(sample_depth, ray_end-delta/2)
points_cam, z_vals, rays_d_cam = get_rays_from_depth(batch_size, num_steps, sample_depth, delta, resolution=(img_size, img_size), device=self.device, fov=fov) # batch_size, pixels, num_steps, 1
else:
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
else:
depth_pred = None
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
transformed_points, z_vals, transformed_ray_directions, transformed_ray_origins = transform_sampled_points(points_cam, z_vals, rays_d_cam, camera_origin, self.device)
transformed_ray_directions_expanded = torch.unsqueeze(transformed_ray_directions, -2)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.expand(-1, -1, num_steps, -1)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.reshape(batch_size, img_size*img_size*num_steps, 3)
transformed_points = transformed_points.reshape(batch_size, img_size*img_size*num_steps, 3)
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
# BATCHED SAMPLE
coarse_output = {}
for b in range(batch_size):
head = 0
while head < transformed_points.shape[1]:
tail = head + max_batch_size
output = self.siren.forward_with_frequencies_phase_shifts(transformed_points[b:b+1, head:tail], truncated_frequencies[b:b+1], truncated_phase_shifts[b:b+1], l=l[b:b+1],
ray_directions=transformed_ray_directions_expanded[b:b+1, head:tail], rt_normal=rt_normal)
for k, v in output.items():
if not k in coarse_output:
coarse_output[k] = torch.zeros((batch_size, transformed_points.shape[1], v.size(-1)), device=self.device)
coarse_output[k][b:b+1, head:tail] = v
head += max_batch_size
for k, v in coarse_output.items():
coarse_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
# END BATCHED SAMPLE
if hierarchical_sample:
with torch.no_grad():
transformed_points = transformed_points.reshape(batch_size, img_size * img_size, num_steps, 3)
weights = fancy_integration(coarse_output, z_vals, device=self.device, clamp_mode=kwargs['clamp_mode'], noise_std=kwargs['nerf_noise'])['weights']
weights = weights.reshape(batch_size * img_size * img_size, num_steps) + 1e-5
z_vals = z_vals.reshape(batch_size * img_size * img_size, num_steps) # We squash the dimensions here. This means we importance sample for every batch for every ray
z_vals_mid = 0.5 * (z_vals[: ,:-1] + z_vals[: ,1:]) # (N_rays, N_samples-1) interval mid points
z_vals = z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_z_vals = sample_pdf(z_vals_mid, weights[:, 1:-1],
num_steps, det=False).detach().to(self.device) # batch_size, num_pixels**2, num_steps
fine_z_vals = fine_z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_points = transformed_ray_origins.unsqueeze(2).contiguous() + transformed_ray_directions.unsqueeze(2).contiguous() * fine_z_vals.expand(-1,-1,-1,3).contiguous() # dimensions here not matching
fine_points = fine_points.reshape(batch_size, img_size*img_size*num_steps, 3)
#### end new importance sampling
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
# BATCHED SAMPLE
fine_output = {}
for b in range(batch_size):
head = 0
while head < fine_points.shape[1]:
tail = head + max_batch_size
output = self.siren.forward_with_frequencies_phase_shifts(fine_points[b:b+1, head:tail], truncated_frequencies[b:b+1], truncated_phase_shifts[b:b+1], l=l[b:b+1],
ray_directions=transformed_ray_directions_expanded[b:b+1, head:tail], rt_normal=rt_normal)
for k, v in output.items():
if not k in fine_output:
fine_output[k] = torch.zeros((batch_size, fine_points.shape[1], v.size(-1)), device=self.device)
fine_output[k][b:b+1, head:tail] = v
head += max_batch_size
for k, v in fine_output.items():
fine_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
# END BATCHED SAMPLE
all_z_vals = torch.cat([fine_z_vals, z_vals], dim = -2)
_, indices = torch.sort(all_z_vals, dim=-2)
all_z_vals = torch.gather(all_z_vals, -2, indices)
all_outputs = {}
for k, v in coarse_output.items():
all_outputs[k] = torch.cat([fine_output[k], v], dim = -2)
all_outputs[k] = torch.gather(all_outputs[k], -2, indices.expand(-1, -1, -1, all_outputs[k].size(-1)))
else:
all_outputs = coarse_output
all_z_vals = z_vals
results = fancy_integration(all_outputs, all_z_vals, device=self.device, white_back=kwargs.get('white_back', False), clamp_mode = kwargs['clamp_mode'], last_back=kwargs.get('last_back', False), fill_mode=kwargs.get('fill_mode', None), noise_std=kwargs['nerf_noise'])
if self.shading:
results = self.lambertian_shading(results, l, l_ratio=l_ratio)
for k in ['rgb', 'rgb_refer', 'albedo']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size, 3)
results[k] = results[k].permute(0, 3, 1, 2).contiguous() * 2 - 1
if 'normal' in results:
results['normal'] = results['normal'].reshape(batch_size, img_size, img_size, 3).permute(0, 3, 1, 2).contiguous()
for k in ['depth', 'shading']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size).contiguous()
results['depth'] = results['depth'].clamp(min=ray_start, max=ray_end)
results['pose'] = torch.cat([pitch, yaw], -1)
results['depth_pred'] = depth_pred
return results
# Used for rendering interpolations
def staged_forward_with_frequencies(self, z, frequencies, phase_shifts, l, img_size, fov, ray_start, ray_end, num_steps, h_stddev, v_stddev, h_mean, v_mean, psi=0.8, lock_view_dependence=False, max_batch_size=50000, sample_dist=None, hierarchical_sample=False, delta=-1, pose=None, l_ratio=1, rt_normal=False, **kwargs):
batch_size = z.shape[0]
self.generate_avg_frequencies()
with torch.no_grad():
truncated_frequencies = self.avg_frequencies + psi * (frequencies - self.avg_frequencies)
truncated_phase_shifts = self.avg_phase_shifts + psi * (phase_shifts - self.avg_phase_shifts)
if pose is None:
camera_origin, pitch, yaw = sample_camera_positions(n=batch_size, r=1, horizontal_stddev=h_stddev, vertical_stddev=v_stddev, horizontal_mean=h_mean, vertical_mean=v_mean, device=self.device, mode=sample_dist)
else:
pitch, yaw = pose[:,:1], pose[:,1:2]
camera_origin = pose2origin(self.device, pitch, yaw, batch_size, 1)
if self.surf_track:
pred = self.surfacenet(torch.cat([z, pitch, yaw], -1))
if pred.size(2) > img_size:
pred = F.interpolate(pred, img_size, mode='area')
elif pred.size(2) < img_size:
pred = F.interpolate(pred, img_size, mode='bilinear')
depth_pred = pred[:,0,...] # 2nd channel not used for now
depth_pred = ray_start + depth_pred * (ray_end - ray_start)
if delta > 0:
sample_depth = resize(depth_pred, img_size)
delta = torch.ones_like(sample_depth) * delta
sample_depth = torch.max(sample_depth, ray_start+delta/2)
sample_depth = torch.min(sample_depth, ray_end-delta/2)
points_cam, z_vals, rays_d_cam = get_rays_from_depth(batch_size, num_steps, sample_depth, delta, resolution=(img_size, img_size), device=self.device, fov=fov) # batch_size, pixels, num_steps, 1
else:
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
else:
depth_pred = None
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
transformed_points, z_vals, transformed_ray_directions, transformed_ray_origins = transform_sampled_points(points_cam, z_vals, rays_d_cam, camera_origin, self.device)
transformed_ray_directions_expanded = torch.unsqueeze(transformed_ray_directions, -2)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.expand(-1, -1, num_steps, -1)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.reshape(batch_size, img_size*img_size*num_steps, 3)
transformed_points = transformed_points.reshape(batch_size, img_size*img_size*num_steps, 3)
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
# BATCHED SAMPLE
coarse_output = {}
for b in range(batch_size):
head = 0
while head < transformed_points.shape[1]:
tail = head + max_batch_size
output = self.siren.forward_with_frequencies_phase_shifts(transformed_points[b:b+1, head:tail], truncated_frequencies[b:b+1], truncated_phase_shifts[b:b+1], l=l[b:b+1],
ray_directions=transformed_ray_directions_expanded[b:b+1, head:tail], rt_normal=rt_normal)
for k, v in output.items():
if not k in coarse_output:
coarse_output[k] = torch.zeros((batch_size, transformed_points.shape[1], v.size(-1)), device=self.device)
coarse_output[k][b:b+1, head:tail] = v
head += max_batch_size
for k, v in coarse_output.items():
coarse_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
# END BATCHED SAMPLE
if hierarchical_sample:
with torch.no_grad():
transformed_points = transformed_points.reshape(batch_size, img_size * img_size, num_steps, 3)
weights = fancy_integration(coarse_output, z_vals, device=self.device, clamp_mode=kwargs['clamp_mode'], noise_std=kwargs['nerf_noise'])['weights']
weights = weights.reshape(batch_size * img_size * img_size, num_steps) + 1e-5
z_vals = z_vals.reshape(batch_size * img_size * img_size, num_steps) # We squash the dimensions here. This means we importance sample for every batch for every ray
z_vals_mid = 0.5 * (z_vals[: ,:-1] + z_vals[: ,1:]) # (N_rays, N_samples-1) interval mid points
z_vals = z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_z_vals = sample_pdf(z_vals_mid, weights[:, 1:-1],
num_steps, det=False).detach().to(self.device) # batch_size, num_pixels**2, num_steps
fine_z_vals = fine_z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_points = transformed_ray_origins.unsqueeze(2).contiguous() + transformed_ray_directions.unsqueeze(2).contiguous() * fine_z_vals.expand(-1,-1,-1,3).contiguous() # dimensions here not matching
fine_points = fine_points.reshape(batch_size, img_size*img_size*num_steps, 3)
#### end new importance sampling
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
# BATCHED SAMPLE
fine_output = {}
for b in range(batch_size):
head = 0
while head < fine_points.shape[1]:
tail = head + max_batch_size
output = self.siren.forward_with_frequencies_phase_shifts(fine_points[b:b+1, head:tail], truncated_frequencies[b:b+1], truncated_phase_shifts[b:b+1], l=l[b:b+1],
ray_directions=transformed_ray_directions_expanded[b:b+1, head:tail], rt_normal=rt_normal)
for k, v in output.items():
if not k in fine_output:
fine_output[k] = torch.zeros((batch_size, fine_points.shape[1], v.size(-1)), device=self.device)
fine_output[k][b:b+1, head:tail] = v
head += max_batch_size
for k, v in fine_output.items():
fine_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
# END BATCHED SAMPLE
all_z_vals = torch.cat([fine_z_vals, z_vals], dim = -2)
_, indices = torch.sort(all_z_vals, dim=-2)
all_z_vals = torch.gather(all_z_vals, -2, indices)
all_outputs = {}
for k, v in coarse_output.items():
all_outputs[k] = torch.cat([fine_output[k], v], dim = -2)
all_outputs[k] = torch.gather(all_outputs[k], -2, indices.expand(-1, -1, -1, all_outputs[k].size(-1)))
else:
all_outputs = coarse_output
all_z_vals = z_vals
results = fancy_integration(all_outputs, all_z_vals, device=self.device, white_back=kwargs.get('white_back', False), clamp_mode = kwargs['clamp_mode'], last_back=kwargs.get('last_back', False), fill_mode=kwargs.get('fill_mode', None), noise_std=kwargs['nerf_noise'])
if self.shading:
results = self.lambertian_shading(l, results, l_ratio=l_ratio)
for k in ['rgb', 'rgb_refer', 'albedo']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size, 3)
results[k] = results[k].permute(0, 3, 1, 2).contiguous() * 2 - 1
if 'normal' in results:
results['normal'] = results['normal'].reshape(batch_size, img_size, img_size, 3).permute(0, 3, 1, 2).contiguous()
for k in ['depth', 'shading']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size).contiguous()
results['depth'] = results['depth'].clamp(min=ray_start, max=ray_end)
results['pose'] = torch.cat([pitch, yaw], -1)
results['depth_pred'] = depth_pred
return results
def forward_with_frequencies(self, z, frequencies, phase_shifts, l, img_size, fov, ray_start, ray_end, num_steps, h_stddev, v_stddev, h_mean, v_mean,
hierarchical_sample, sample_dist=None, lock_view_dependence=False, delta=-1, l_ratio=1, pose=None, rt_normal=False, **kwargs):
batch_size = frequencies.shape[0]
with_grad = torch.is_grad_enabled()
with torch.no_grad():
if pose is None:
camera_origin, pitch, yaw = sample_camera_positions(n=batch_size, r=1, horizontal_stddev=h_stddev, vertical_stddev=v_stddev, horizontal_mean=h_mean, vertical_mean=v_mean, device=self.device, mode=sample_dist)
else:
pitch, yaw = pose[:,:1], pose[:,1:2]
camera_origin = pose2origin(self.device, pitch, yaw, batch_size, 1)
if self.surf_track:
with torch.set_grad_enabled(with_grad):
freq, phase = self.siren.mapping_network(z)
freq, phase = freq.detach(), phase.detach()
pose_scaled = (torch.cat([pitch, yaw], -1) - math.pi/2) * 10
freq_phase = torch.cat([freq, phase], -1)/10
pred = self.surfacenet(freq_phase, pose_scaled)
depth_pred = pred[:,0,...] # 2nd channel not used for now
depth_pred = ray_start + depth_pred * (ray_end - ray_start)
if delta > 0:
sample_depth = resize(depth_pred, img_size)
delta = torch.ones_like(sample_depth) * delta
sample_depth = torch.max(sample_depth, ray_start+delta/2)
sample_depth = torch.min(sample_depth, ray_end-delta/2)
points_cam, z_vals, rays_d_cam = get_rays_from_depth(batch_size, num_steps, sample_depth, delta, resolution=(img_size, img_size), device=self.device, fov=fov) # batch_size, pixels, num_steps, 1
else:
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
else:
depth_pred = None
points_cam, z_vals, rays_d_cam = get_initial_rays_trig(batch_size, num_steps, resolution=(img_size, img_size), device=self.device, fov=fov, ray_start=ray_start, ray_end=ray_end) # batch_size, pixels, num_steps, 1
transformed_points, z_vals, transformed_ray_directions, transformed_ray_origins = transform_sampled_points(points_cam, z_vals, rays_d_cam, camera_origin, self.device)
transformed_ray_directions_expanded = torch.unsqueeze(transformed_ray_directions, -2)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.expand(-1, -1, num_steps, -1)
transformed_ray_directions_expanded = transformed_ray_directions_expanded.reshape(batch_size, img_size*img_size*num_steps, 3)
transformed_points = transformed_points.reshape(batch_size, img_size*img_size*num_steps, 3)
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
coarse_output = self.siren.forward_with_frequencies_phase_shifts(transformed_points, frequencies, phase_shifts, l=l, ray_directions=transformed_ray_directions_expanded, rt_normal=rt_normal)
for k, v in coarse_output.items():
coarse_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
if hierarchical_sample:
with torch.no_grad():
transformed_points = transformed_points.reshape(batch_size, img_size * img_size, num_steps, 3)
weights = fancy_integration(coarse_output, z_vals, device=self.device, clamp_mode=kwargs['clamp_mode'], noise_std=kwargs['nerf_noise'])['weights']
weights = weights.reshape(batch_size * img_size * img_size, num_steps) + 1e-5
#### Start new importance sampling
# RuntimeError: Sizes of tensors must match except in dimension 1. Got 3072 and 6144 (The offending index is 0)
z_vals = z_vals.reshape(batch_size * img_size * img_size, num_steps) # We squash the dimensions here. This means we importance sample for every batch for every ray
z_vals_mid = 0.5 * (z_vals[: ,:-1] + z_vals[: ,1:]) # (N_rays, N_samples-1) interval mid points
z_vals = z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_z_vals = sample_pdf(z_vals_mid, weights[:, 1:-1],
num_steps, det=False).detach() # batch_size, num_pixels**2, num_steps
fine_z_vals = fine_z_vals.reshape(batch_size, img_size * img_size, num_steps, 1)
fine_points = transformed_ray_origins.unsqueeze(2).contiguous() + transformed_ray_directions.unsqueeze(2).contiguous() * fine_z_vals.expand(-1,-1,-1,3).contiguous() # dimensions here not matching
fine_points = fine_points.reshape(batch_size, img_size*img_size*num_steps, 3)
#### end new importance sampling
if lock_view_dependence:
transformed_ray_directions_expanded = torch.zeros_like(transformed_ray_directions_expanded)
transformed_ray_directions_expanded[..., -1] = -1
fine_output = self.siren.forward_with_frequencies_phase_shifts(fine_points, frequencies, phase_shifts, l=l, ray_directions=transformed_ray_directions_expanded, rt_normal=rt_normal)
for k, v in fine_output.items():
fine_output[k] = v.reshape(batch_size, img_size * img_size, num_steps, -1)
all_z_vals = torch.cat([fine_z_vals, z_vals], dim = -2)
_, indices = torch.sort(all_z_vals, dim=-2)
all_z_vals = torch.gather(all_z_vals, -2, indices)
all_outputs = {}
for k, v in coarse_output.items():
all_outputs[k] = torch.cat([fine_output[k], v], dim = -2)
all_outputs[k] = torch.gather(all_outputs[k], -2, indices.expand(-1, -1, -1, all_outputs[k].size(-1)))
else:
all_outputs = coarse_output
all_z_vals = z_vals
results = fancy_integration(all_outputs, all_z_vals, device=self.device, white_back=kwargs.get('white_back', False), last_back=kwargs.get('last_back', False), clamp_mode=kwargs['clamp_mode'], noise_std=kwargs['nerf_noise'])
if self.shading:
results = self.lambertian_shading(results, l, l_ratio=l_ratio)
for k in ['rgb', 'rgb_refer']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size, 3)
results[k] = results[k].permute(0, 3, 1, 2).contiguous() * 2 - 1
if 'normal' in results:
results['normal'] = results['normal'].reshape(batch_size, img_size, img_size, 3).permute(0, 3, 1, 2).contiguous()
for k in ['depth', 'shading']:
if k in results:
results[k] = results[k].reshape(batch_size, img_size, img_size).contiguous()
results['depth'] = results['depth'].clamp(min=ray_start, max=ray_end)
results['pose'] = torch.cat([pitch, yaw], -1)
return results
| 66.137681
| 324
| 0.624603
| 4,837
| 36,508
| 4.405003
| 0.054786
| 0.045994
| 0.059886
| 0.042052
| 0.877552
| 0.875862
| 0.867978
| 0.864692
| 0.861689
| 0.861689
| 0
| 0.016886
| 0.27005
| 36,508
| 551
| 325
| 66.257713
| 0.782656
| 0.05859
| 0
| 0.817978
| 0
| 0
| 0.01766
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020225
| false
| 0
| 0.011236
| 0
| 0.044944
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac260519216470963df403693d0e432ebea528b2
| 57
|
py
|
Python
|
passwordgenerator/__init__.py
|
aminbeigi/Password-Generator-Rest-API
|
b64159cd14cecabb6481be2ae10780e1435fb6a7
|
[
"MIT"
] | null | null | null |
passwordgenerator/__init__.py
|
aminbeigi/Password-Generator-Rest-API
|
b64159cd14cecabb6481be2ae10780e1435fb6a7
|
[
"MIT"
] | null | null | null |
passwordgenerator/__init__.py
|
aminbeigi/Password-Generator-Rest-API
|
b64159cd14cecabb6481be2ae10780e1435fb6a7
|
[
"MIT"
] | null | null | null |
from .app import API_RESPONSE_LIMIT
from .app import app
| 28.5
| 36
| 0.824561
| 10
| 57
| 4.5
| 0.6
| 0.311111
| 0.577778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 57
| 2
| 37
| 28.5
| 0.918367
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac46ecacef572c8618aded349c3ea24560fff672
| 147
|
py
|
Python
|
streamvbyte/__init__.py
|
iiSeymour/pystreamvbyte
|
7110399ac2dbcb98433185700828fee2d9b79a28
|
[
"Apache-2.0"
] | 3
|
2020-12-28T02:15:35.000Z
|
2022-02-22T19:59:33.000Z
|
streamvbyte/__init__.py
|
iiSeymour/pystreamvbyte
|
7110399ac2dbcb98433185700828fee2d9b79a28
|
[
"Apache-2.0"
] | 4
|
2019-05-05T22:25:31.000Z
|
2021-07-04T16:56:29.000Z
|
streamvbyte/__init__.py
|
iiSeymour/pystreamvbyte
|
7110399ac2dbcb98433185700828fee2d9b79a28
|
[
"Apache-2.0"
] | 1
|
2021-07-01T19:06:23.000Z
|
2021-07-01T19:06:23.000Z
|
__version__ = '0.4.1'
from streamvbyte.lib import encode, encode_0124, encode_delta
from streamvbyte.lib import decode, decode_0124, decode_delta
| 29.4
| 61
| 0.816327
| 22
| 147
| 5.090909
| 0.545455
| 0.267857
| 0.321429
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083969
| 0.108844
| 147
| 4
| 62
| 36.75
| 0.770992
| 0
| 0
| 0
| 0
| 0
| 0.034014
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac619334ac18ce8de78877de38c168ac683f17ce
| 5,542
|
py
|
Python
|
gymnasiums/tests/tests_gymnasium_update_view.py
|
hbuyse/dj-gymnasiums
|
39f590dc703eec01c753ea54d7f4afd06f81a582
|
[
"MIT"
] | null | null | null |
gymnasiums/tests/tests_gymnasium_update_view.py
|
hbuyse/dj-gymnasiums
|
39f590dc703eec01c753ea54d7f4afd06f81a582
|
[
"MIT"
] | null | null | null |
gymnasiums/tests/tests_gymnasium_update_view.py
|
hbuyse/dj-gymnasiums
|
39f590dc703eec01c753ea54d7f4afd06f81a582
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# coding=utf-8
"""Tests the views."""
# Django
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
# Current django project
from gymnasiums.models import Gymnasium
class TestGymnasiumUpdateViewAsAnonymous(TestCase):
"""Tests."""
def setUp(self):
"""Setup for al the following tests."""
self.gymnasium_data = {
'name': 'Watteau',
'address': '37 rue Lequesne',
'city': 'Nogent-Sur-Marne',
'zip_code': '94130',
'phone': '0100000000',
'surface': '123',
'capacity': '456',
}
self.gymnasium = Gymnasium.objects.create(**self.gymnasium_data)
def test_get(self):
"""Tests."""
r = self.client.get(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 403)
def test_post(self):
"""Tests."""
self.gymnasium_data['name'] = 'Watteau2'
r = self.client.post(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}), self.gymnasium_data)
self.assertEqual(r.status_code, 403)
class TestGymnasiumUpdateViewAsLogged(TestCase):
"""Tests."""
def setUp(self):
"""Setup for al the following tests."""
self.dict = {
'username': "hbuyse",
'password': "usermodel",
'first_name': "Henri",
'last_name': "Buyse"
}
get_user_model().objects.create_user(**self.dict)
self.gymnasium_data = {
'name': 'Watteau',
'address': '37 rue Lequesne',
'city': 'Nogent-Sur-Marne',
'zip_code': '94130',
'phone': '0100000000',
'surface': '123',
'capacity': '456',
}
self.gymnasium = Gymnasium.objects.create(**self.gymnasium_data)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.get(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 403)
def test_post(self):
"""Tests."""
self.gymnasium_data['name'] = 'Watteau2'
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.post(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}), self.gymnasium_data)
self.assertEqual(r.status_code, 403)
class TestGymnasiumUpdateViewAsStaff(TestCase):
"""Tests."""
def setUp(self):
"""Setup for al the following tests."""
self.dict = {
'username': "hbuyse",
'password': "usermodel",
'first_name': "Henri",
'last_name': "Buyse",
'is_staff': True
}
get_user_model().objects.create_user(**self.dict)
self.gymnasium_data = {
'name': 'Watteau',
'address': '37 rue Lequesne',
'city': 'Nogent-Sur-Marne',
'zip_code': '94130',
'phone': '0100000000',
'surface': '123',
'capacity': '456',
}
self.gymnasium = Gymnasium.objects.create(**self.gymnasium_data)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.get(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 200)
def test_post(self):
"""Tests."""
self.gymnasium_data['name'] = 'Watteau2'
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.post(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}), self.gymnasium_data)
self.assertEqual(r.status_code, 302)
self.assertEqual("/{}".format(self.gymnasium.id), r.url)
class TestGymnasiumUpdateViewAsSuperuser(TestCase):
"""Tests."""
def setUp(self):
"""Setup for al the following tests."""
self.dict = {
'username': "hbuyse",
'password': "usermodel",
'first_name': "Henri",
'last_name': "Buyse",
'email': 'henri.buyse@gmail.com'
}
get_user_model().objects.create_superuser(**self.dict)
self.gymnasium_data = {
'name': 'Watteau',
'address': '37 rue Lequesne',
'city': 'Nogent-Sur-Marne',
'zip_code': '94130',
'phone': '0100000000',
'surface': '123',
'capacity': '456',
}
self.gymnasium = Gymnasium.objects.create(**self.gymnasium_data)
def test_get(self):
"""Tests."""
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.get(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}))
self.assertEqual(r.status_code, 200)
def test_post(self):
"""Tests."""
self.gymnasium_data['name'] = 'Watteau2'
self.assertTrue(self.client.login(username=self.dict['username'], password=self.dict['password']))
r = self.client.post(reverse('gymnasiums:update', kwargs={'pk': self.gymnasium.id}), self.gymnasium_data)
self.assertEqual(r.status_code, 302)
self.assertEqual("/{}".format(self.gymnasium.id), r.url)
| 32.409357
| 113
| 0.579935
| 587
| 5,542
| 5.383305
| 0.158433
| 0.123418
| 0.086076
| 0.053165
| 0.881329
| 0.873418
| 0.873418
| 0.873418
| 0.873418
| 0.873418
| 0
| 0.029319
| 0.255323
| 5,542
| 170
| 114
| 32.6
| 0.73637
| 0.054313
| 0
| 0.828829
| 0
| 0
| 0.179348
| 0.004076
| 0
| 0
| 0
| 0
| 0.144144
| 1
| 0.108108
| false
| 0.081081
| 0.036036
| 0
| 0.18018
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
3bc1d0358b5d3b8925f0da8fcc5e6a7bc84ce430
| 28,195
|
py
|
Python
|
test/functional-tests-legacy/PfwTestCase/Domains/tDomain_Configuration.py
|
mgaio/parameter-framework
|
320b4c11211706810c9f38d7599cac37dde54888
|
[
"BSD-3-Clause"
] | 40
|
2015-01-29T16:00:41.000Z
|
2017-10-25T22:00:23.000Z
|
test/functional-tests-legacy/PfwTestCase/Domains/tDomain_Configuration.py
|
mgaio/parameter-framework
|
320b4c11211706810c9f38d7599cac37dde54888
|
[
"BSD-3-Clause"
] | 365
|
2015-01-02T14:33:40.000Z
|
2017-10-13T00:49:58.000Z
|
test/functional-tests-legacy/PfwTestCase/Domains/tDomain_Configuration.py
|
mgaio/parameter-framework
|
320b4c11211706810c9f38d7599cac37dde54888
|
[
"BSD-3-Clause"
] | 43
|
2015-01-22T10:54:58.000Z
|
2017-07-15T12:26:43.000Z
|
# -*-coding:utf-8 -*
# Copyright (c) 2011-2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Creation, renaming and deletion configuration testcases
List of tested functions :
--------------------------
- [listConfigurations] function
- [createConfiguration] function
- [deleteConfiguration] function
- [renameConfiguration] function
Test cases :
------------
- Testing configuration creation error
- Testing configuration renaming error
- Testing configuration deletion error
- Testing nominal case
"""
import os
from Util.PfwUnitTestLib import PfwTestCase
from Util import ACTLogging
log=ACTLogging.Logger()
# Test of Domains - Rename
class TestCases(PfwTestCase):
def setUp(self):
self.pfw.sendCmd("setTuningMode", "on")
self.domain_name = "domain_test"
self.conf_test = "conf_white"
self.conf_test_renamed = "conf_black"
self.new_conf_number = 5
def tearDown(self):
self.pfw.sendCmd("setTuningMode", "off")
def test_Conf_Creation_Error(self):
"""
Testing configuration creation error
------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- Create an already existent configuration
- Create a configuration with no name specified
- Create a configuration on a wrong domain name
Tested commands :
~~~~~~~~~~~~~~~~~
- [createConfiguration] function
- [createDomain] function
- [listConfigurations] function
- [deleteConfiguration] function
- [deleteDomain] function
Expected result :
~~~~~~~~~~~~~~~~~
- no configuration created
- existent configurations not affected by error
"""
log.D(self.test_Conf_Creation_Error.__doc__)
# New domain creation for testing purpose
log.I("New domain creation for testing purpose : %s" % (self.domain_name))
log.I("command [createDomain]")
out, err = self.pfw.sendCmd("createDomain",self.domain_name, "")
assert out == "Done", out
assert err == None, "ERROR : command [createDomain] - Error while creating domain %s" % (self.domain_name)
log.I("command [createDomain] correctly executed")
log.I("Domain %s created" % (self.domain_name))
# New configurations creation for testing purpose
for iteration in range (self.new_conf_number):
new_conf_name = "".join([self.conf_test, "_", str(iteration)])
log.I("New configuration %s creation for domain %s" % (new_conf_name,self.domain_name))
log.I("command [createConfiguration]")
out, err = self.pfw.sendCmd("createConfiguration",self.domain_name,new_conf_name)
assert out == "Done", out
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration %s" % (new_conf_name)
log.I("command [createConfiguration] correctly executed")
log.I("Configuration %s created for domain %s" % (new_conf_name,self.domain_name))
# Domain configurations listing backup
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations_backup = open("f_configurations_backup", "w")
f_configurations_backup.write(out)
f_configurations_backup.close()
# New configurations creation error
log.I("Creating an already existent configurations names")
for iteration in range (self.new_conf_number):
new_conf_name = "".join([self.conf_test, "_", str(iteration)])
log.I("Trying to create already existent %s configuration for domain %s" % (new_conf_name,self.domain_name))
log.I("command [createConfiguration]")
out, err = self.pfw.sendCmd("createConfiguration",self.domain_name,new_conf_name, expectSuccess=False)
assert out != "Done", "ERROR : command [createConfiguration] - Error not detected while creating already existent configuration %s" % (new_conf_name)
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration %s" % (new_conf_name)
log.I("command [createConfiguration] correctly executed")
log.I("error correctly detected, no configuration created")
log.I("Creating a configuration without specifying a name")
out, err = self.pfw.sendCmd("createConfiguration",self.domain_name, expectSuccess=False)
assert out != "Done", "ERROR : command [createConfiguration] - Error not detected while creating a configuration without specifying a name"
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration"
log.I("error correctly detected")
log.I("Creating a configuration on a wrong domain name")
new_conf_name = "new_conf"
out, err = self.pfw.sendCmd("createConfiguration","wrong_domain_name",new_conf_name, expectSuccess=False)
assert out != "Done", "ERROR : command [createConfiguration] - Error not detected while creating a configuration on a wrong domain name"
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration"
log.I("error correctly detected")
# New domain configurations listing
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]" )
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations = open("f_configurations", "w")
f_configurations.write(out)
f_configurations.close()
# Checking configurations names integrity
log.I("Configurations listing conformity check")
f_configurations = open("f_configurations", "r")
f_configurations_backup = open("f_configurations_backup", "r")
listed_conf_backup = f_configurations_backup.read().splitlines()
listed_conf = f_configurations.read().splitlines()
assert listed_conf==listed_conf_backup, "ERROR : Error while listing configuration %s (found %s)" % (listed_conf_backup, listed_conf)
log.I("No change detected, listed configurations names conform to expected values")
# New domain deletion
log.I("End of test, new domain deletion")
log.I("command [deleteDomain]")
out, err = self.pfw.sendCmd("deleteDomain",self.domain_name, "")
assert out == "Done", "ERROR : %s" % (out)
assert err == None, "ERROR : command [deleteDomain] - Error while deleting domain %s" % (self.domain_name)
log.I("command [deleteDomain] correctly executed")
# Closing and deleting temp files
f_configurations_backup.close()
os.remove("f_configurations_backup")
f_configurations.close()
os.remove("f_configurations")
def test_Conf_Renaming_Error(self):
"""
Testing configuration renaming error
------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- Rename a configuration with an already used name
- Rename a configuration with no name specified
- Rename a configuration on a wrong domain name
Tested commands :
~~~~~~~~~~~~~~~~~
- [renameConfiguration] function
- [createDomain] function
- [listConfigurations] function
- [createConfiguration] function
- [deleteConfiguration] function
- [deleteDomain] function
Expected result :
~~~~~~~~~~~~~~~~~
- error detected
- no configuration created
- existent configurations not affected by error
"""
log.D(self.test_Conf_Renaming_Error.__doc__)
# New domain creation for testing purpose
log.I("New domain creation for testing purpose : %s" % (self.domain_name))
log.I("command [createDomain]")
out, err = self.pfw.sendCmd("createDomain",self.domain_name, "")
assert out == "Done", out
assert err == None, "ERROR : command [createDomain] - Error while creating domain %s" % (self.domain_name)
log.I("command [createDomain] correctly executed")
log.I("Domain %s created" % (self.domain_name))
# New configurations creation for testing purpose
for iteration in range (self.new_conf_number):
new_conf_name = "".join([self.conf_test, "_", str(iteration)])
log.I("New configuration %s creation for domain %s" % (new_conf_name,self.domain_name))
log.I("command [createConfiguration]")
out, err = self.pfw.sendCmd("createConfiguration",self.domain_name,new_conf_name)
assert out == "Done", out
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration %s" % (new_conf_name)
log.I("command [createConfiguration] correctly executed")
log.I("Configuration %s created for domain %s" % (new_conf_name,self.domain_name))
# Domain configurations listing backup
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations_backup = open("f_configurations_backup", "w")
f_configurations_backup.write(out)
f_configurations_backup.close()
# New configurations renaming error
log.I("renaming a configuration with an already used name")
for iteration in range (self.new_conf_number-1):
conf_name = "".join([self.conf_test, "_", str(iteration)])
new_conf_name = "".join([self.conf_test, "_", str(iteration+1)])
log.I("Trying to rename %s on domain %s with an already used name : %s" % (conf_name,self.domain_name,new_conf_name))
log.I("command [renameConfiguration]" )
out, err = self.pfw.sendCmd("renameConfiguration",self.domain_name,conf_name,new_conf_name, expectSuccess=False)
assert out != "Done", "ERROR : command [renameConfiguration] - Error not detected while renaming configuration %s with an already used name" % (new_conf_name)
assert err == None, "ERROR : command [renameConfiguration] - Error while renaming configuration %s" % (new_conf_name)
log.I("command [renameConfiguration] correctly executed")
log.I("error correctly detected, no configuration renamed")
log.I("renaming a configuration without specifying a new name")
out, err = self.pfw.sendCmd("renameConfiguration",self.domain_name,new_conf_name, expectSuccess=False)
assert out != "Done", "ERROR : command [renameConfiguration] - Error not detected while renaming a configuration without specifying a new name"
assert err == None, "ERROR : command [renameConfiguration] - Error while renaming configuration"
log.I("error correctly detected, no configuration renamed")
log.I("renaming a configuration on a wrong domain name")
new_conf_name = "new_conf"
out, err = self.pfw.sendCmd("renameConfiguration","wrong_domain_name",new_conf_name,"Configuration", expectSuccess=False)
assert out != "Done", "ERROR : command [renameConfiguration] - Error not detected while renaming a configuration on a wrong domain name"
assert err == None, "ERROR : command [renameConfiguration] - Error while renaming configuration"
log.I("error correctly detected, no configuration renamed")
# New domain configurations listing
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations = open("f_configurations", "w")
f_configurations.write(out)
f_configurations.close()
# Checking configurations names integrity
log.I("Configurations listing conformity check")
f_configurations = open("f_configurations", "r")
f_configurations_backup = open("f_configurations_backup", "r")
listed_conf_backup = f_configurations_backup.read().splitlines()
listed_conf = f_configurations.read().splitlines()
assert listed_conf==listed_conf_backup, "ERROR : Error while listing configuration %s (found %s)" % (listed_conf_backup, listed_conf)
log.I("No change detected, listed configurations names conform to expected values")
# Testing domain deletion
log.I("End of test, new domain deletion")
log.I("command [deleteDomain]")
out, err = self.pfw.sendCmd("deleteDomain",self.domain_name, "")
assert out == "Done", "ERROR : %s" % (out)
assert err == None, "ERROR : command [deleteDomain] - Error while deleting domain %s" % (self.domain_name)
log.I("command [deleteDomain] correctly executed")
# Closing and deleting temp files
f_configurations_backup.close()
os.remove("f_configurations_backup")
f_configurations.close()
os.remove("f_configurations")
def test_Conf_Deletion_Error(self):
"""
Testing configuration deletion error
------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- Delete a configuration with a non existent name
- Delete a configuration with no name specified
- Delete a configuration on a wrong domain name
Tested commands :
~~~~~~~~~~~~~~~~~
- [deleteConfiguration] function
- [createDomain] function
- [listConfigurations] function
- [createConfiguration] function
- [deleteDomain] function
Expected result :
~~~~~~~~~~~~~~~~~
- error detected
- no configuration created
- existent configurations not affected by error
"""
print(self.test_Conf_Renaming_Error.__doc__)
# New domain creation for testing purpose
log.I("New domain creation for testing purpose : %s" % (self.domain_name))
log.I("command [createDomain]")
out, err = self.pfw.sendCmd("createDomain",self.domain_name, "")
assert out == "Done", out
assert err == None, "ERROR : command [createDomain] - Error while creating domain %s" % (self.domain_name)
log.I("command [createDomain] correctly executed")
log.I("Domain %s created" % (self.domain_name))
# New configurations creation for testing purpose
for iteration in range (self.new_conf_number):
new_conf_name = "".join([self.conf_test, "_", str(iteration)])
log.I("New configuration %s creation for domain %s" % (new_conf_name,self.domain_name))
log.I("command [createConfiguration]")
out, err = self.pfw.sendCmd("createConfiguration",self.domain_name,new_conf_name)
assert out == "Done", out
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration %s" % (new_conf_name)
log.I("command [createConfiguration] correctly executed")
log.I("Configuration %s created for domain %s" % (new_conf_name,self.domain_name))
# Domain configurations listing backup
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations_backup = open("f_configurations_backup", "w")
f_configurations_backup.write(out)
f_configurations_backup.close()
# Configurations deletion errors
log.I("Trying various deletions error test cases")
log.I("Trying to delete a wrong configuration name on domain %s" % (self.domain_name))
log.I("command [deleteConfiguration]")
out, err = self.pfw.sendCmd("deleteConfiguration",self.domain_name,"wrong_configuration_name", expectSuccess=False)
assert out != "Done", "ERROR : command [deleteConfiguration] - Error not detected while deleting non existent configuration name"
assert err == None, "ERROR : command [deleteConfiguration] - Error while deleting configuration"
log.I("command [deleteConfiguration] correctly executed")
log.I("error correctly detected, no configuration deleted")
log.I("deleting a configuration with no name specified")
out, err = self.pfw.sendCmd("deleteConfiguration",self.domain_name, expectSuccess=False)
assert out != "Done", "ERROR : command [deleteConfiguration] - Error not detected while deleting a configuration without specifying a name"
assert err == None, "ERROR : command [deleteConfiguration] - Error while deleting configuration"
log.I("error correctly detected, no configuration deleted")
log.I("deleting a configuration on a wrong domain name")
out, err = self.pfw.sendCmd("deleteConfiguration","wrong_domain_name",new_conf_name, expectSuccess=False)
assert out != "Done", "ERROR : command [deleteConfiguration] - Error not detected while deleting a configuration on a wrong domain name"
assert err == None, "ERROR : command [deleteConfiguration] - Error while deleting configuration"
log.I("error correctly detected, no configuration deleted")
# New domain configurations listing
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations = open("f_configurations", "w")
f_configurations.write(out)
f_configurations.close()
# Checking configurations names integrity
log.I("Configurations listing conformity check")
f_configurations = open("f_configurations", "r")
f_configurations_backup = open("f_configurations_backup", "r")
listed_conf_backup = f_configurations_backup.read().splitlines()
listed_conf = f_configurations.read().splitlines()
assert listed_conf==listed_conf_backup, "ERROR : Error while listing configuration %s (found %s)" % (listed_conf_backup, listed_conf)
log.I("No change detected, listed configurations names conform to expected values")
# Testing domain deletion
log.I("End of test, new domain deletion")
log.I("command [deleteDomain]")
out, err = self.pfw.sendCmd("deleteDomain",self.domain_name, "")
assert out == "Done", "ERROR : %s" % (out)
assert err == None, "ERROR : command [deleteDomain] - Error while deleting domain %s" % (self.domain_name)
log.I("command [deleteDomain] correctly executed")
# Closing and deleting temp files
f_configurations_backup.close()
os.remove("f_configurations_backup")
f_configurations.close()
os.remove("f_configurations")
def test_Nominal_Case(self):
"""
Testing nominal cases
---------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- Create new configurations
- List domain configurations
- Rename configurations
- Delete configurations
Tested commands :
~~~~~~~~~~~~~~~~~
- [listConfigurations] function
- [createConfiguration] function
- [renameConfiguration] function
- [deleteConfiguration] function
- [createDomain] function
- [deleteDomain] function
Expected result :
~~~~~~~~~~~~~~~~~
- all operations succeed
"""
log.D(self.test_Nominal_Case.__doc__)
# New domain creation
log.I("New domain creation for testing purpose : %s" % (self.domain_name))
log.I("command [createDomain]")
out, err = self.pfw.sendCmd("createDomain",self.domain_name, "")
assert out == "Done", out
assert err == None, "ERROR : command [createDomain] - Error while creating domain %s" % (self.domain_name)
log.I("command [createDomain] correctly executed")
log.I("Domain %s created" % (self.domain_name))
# New configurations creation
for iteration in range (self.new_conf_number):
new_conf_name = "".join([self.conf_test, "_", str(iteration)])
log.I("New configuration %s creation for domain %s" % (new_conf_name,self.domain_name))
log.I("command [createConfiguration]" )
out, err = self.pfw.sendCmd("createConfiguration",self.domain_name,new_conf_name)
assert out == "Done", out
assert err == None, "ERROR : command [createConfiguration] - Error while creating configuration %s" % (new_conf_name)
log.I("command [createConfiguration] correctly executed")
log.I("Configuration %s created for domain %s" % (new_conf_name,self.domain_name))
# Listing domain configurations
log.I("Configurations listing for domain %s" % (self.domain_name))
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
f_configurations = open("f_configurations", "w")
f_configurations.write(out)
f_configurations.close()
# Checking configurations names integrity
log.I("Configurations listing conformity check")
f_configurations = open("f_configurations", "r")
new_conf_names = [self.conf_test + "_" + str(iteration) for iteration in range(self.new_conf_number)]
listed_conf = f_configurations.read().strip('\r\n').splitlines()
assert listed_conf == new_conf_names, "ERROR : Error while listing configuration, expected '%s', found '%s'" % (new_conf_names, listed_conf)
log.I("Listed configurations names conform to expected values")
# Configuration renaming
log.I("Configurations renaming")
for iteration in range (self.new_conf_number):
conf_name = "".join([self.conf_test, "_", str(iteration)])
new_conf_name = "".join([self.conf_test_renamed, "_", str(iteration)])
log.I("Configuration %s renamed to %s in domain %s" % (conf_name,new_conf_name,self.domain_name))
log.I("command [renameConfiguration]")
out, err = self.pfw.sendCmd("renameConfiguration",self.domain_name,conf_name,new_conf_name)
assert out == "Done", out
assert err == None, "ERROR : command [renameConfiguration] - Error while renaming configuration %s to %s" % (conf_name,new_conf_name)
log.I("command [renameConfiguration] correctly executed")
log.I("Configuration %s renamed to %s for domain %s" % (conf_name,new_conf_name,self.domain_name))
# Listing domain configurations
log.I("Configurations listing to check configurations renaming")
log.I("command [listConfigurations]")
out, err = self.pfw.sendCmd("listConfigurations",self.domain_name, "")
assert err == None, "ERROR : command [listConfigurations] - Error while listing configurations for domain %s" % (self.domain_name)
log.I("command [listConfigurations] correctly executed")
# Saving configurations names
configurations_renamed = out
# Checking configurations names integrity
log.I("Configurations listing conformity check")
new_conf_names = [self.conf_test_renamed + "_" + str(iteration) for iteration in range(self.new_conf_number)]
listed_conf = configurations_renamed.strip('\r\n').splitlines()
assert listed_conf == new_conf_names, "ERROR : Error while renaming configuration, expected '%s', found '%s'" % (new_conf_names, listed_conf)
log.I("Listed configurations names conform to expected values, renaming successfull")
# New domain deletion
log.I("End of test, new domain deletion")
log.I("command [deleteDomain]")
out, err = self.pfw.sendCmd("deleteDomain",self.domain_name, "")
assert out == "Done", "ERROR : %s" % (out)
assert err == None, "ERROR : command [deleteDomain] - Error while deleting domain %s" % (self.domain_name)
log.I("command [deleteDomain] correctly executed")
# Closing and deleting temp file
f_configurations.close()
os.remove("f_configurations")
| 56.844758
| 170
| 0.655648
| 3,118
| 28,195
| 5.801475
| 0.0805
| 0.024324
| 0.055724
| 0.034828
| 0.844491
| 0.829786
| 0.805794
| 0.780861
| 0.769528
| 0.748853
| 0
| 0.000695
| 0.234439
| 28,195
| 495
| 171
| 56.959596
| 0.837341
| 0.212839
| 0
| 0.747405
| 0
| 0
| 0.429064
| 0.048953
| 0
| 0
| 0
| 0
| 0.197232
| 1
| 0.020761
| false
| 0
| 0.010381
| 0
| 0.034602
| 0.00346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cbec485fe80449617e1ed8b487832a45797677e0
| 16,582
|
py
|
Python
|
phyto_photo_utils/_fitting.py
|
tjryankeogh/phytophotoutils
|
48c1747bca837f1d4c73ff48d7c232840eca8352
|
[
"MIT"
] | null | null | null |
phyto_photo_utils/_fitting.py
|
tjryankeogh/phytophotoutils
|
48c1747bca837f1d4c73ff48d7c232840eca8352
|
[
"MIT"
] | null | null | null |
phyto_photo_utils/_fitting.py
|
tjryankeogh/phytophotoutils
|
48c1747bca837f1d4c73ff48d7c232840eca8352
|
[
"MIT"
] | 1
|
2021-12-16T11:42:03.000Z
|
2021-12-16T11:42:03.000Z
|
#!/usr/bin/env python
from numpy import count_nonzero, isnan, inf, linalg, arange, repeat, nan
from scipy.optimize import least_squares
from sklearn import linear_model
import warnings
warnings.filterwarnings("ignore", category=RuntimeWarning)
from ._equations import __fit_kolber_nop__, __fit_kolber_p__, __fit_single_relaxation__, __fit_triple_relaxation__, __calculate_residual_saturation_p__, __calculate_residual_saturation_nop__, __calculate_residual_saturation_fixedp__, __calculate_residual_single_relaxation__, __calculate_residual_triple_relaxation__, __calculate_bias__, __calculate_rmse__, __calculate_nrmse__, __calculate_fit_errors__
def __fit_fixed_p_model__(pfd, flevel, ro, bounds=False, sig_lims=None, method='trf', loss='soft_l1', f_scale=0.1, max_nfev=None, xtol=1e-9):
# Count number of flashlets excluding NaNs
nfl = count_nonzero(~isnan(flevel))
m = ~isnan(flevel)
flevel = flevel[m]
pfd = pfd[m]
# Estimates of saturation parameters
model = linear_model.HuberRegressor()
try:
y = flevel[:8]
x = arange(1,9)[:,None]
fo_model = model.fit(x,y)
fo = fo_model.intercept_
except Exception:
fo = flevel[:3].mean()
try:
y = flevel[-24:]
x = arange(1,25)[:,None]
fm_model = model.fit(x,y)
fm = fm_model.intercept_
except Exception:
fm = flevel[-3:].mean()
if (fo > fm) | (fo <= 0):
(print('Fo greater than Fm - skipping fit.'))
fo, fm, sigma, ro, bias, rmse, fo_err, fm_err, sigma_err, nfl, nfev = repeat(nan, 12)
flag = -2
success = 'False'
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
pass
else:
fo10 = fo * 0.1
fm10 = fm * 0.1
sig = 500
x0 = [fo, fm, sig]
bds = [-inf, inf]
if bounds:
bds = [fo-fo10, fm-fm10, sig_lims[0]], [fo+fo10, fm+fm10, sig_lims[1]]
if (bds[0][0] > bds[1][0]) | (bds[0][1] > bds[1][1]) | (bds[0][2] > bds[1][2]):
print('Lower bounds greater than upper bounds - fitting with no bounds.')
bds = [-inf, inf]
if max_nfev is None:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'xtol':xtol}
else:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'max_nfev':max_nfev, 'xtol':xtol}
try:
popt = least_squares(__calculate_residual_saturation_fixedp__, x0, bounds=(bds), args=(pfd, flevel, ro), **opts)
fo = popt.x[0]
fm = popt.x[1]
sigma = popt.x[2]
# Calculate curve fitting statistical metrics
sol = __fit_kolber_p__(pfd, *popt.x, ro)
bias = __calculate_bias__(sol, flevel)
rmse = __calculate_rmse__(popt.fun, flevel)
nrmse = __calculate_nrmse__(popt.fun, flevel)
perr = __calculate_fit_errors__(popt.jac, popt.fun)
fo_err = (perr[0] / fo) * 100
fm_err = (perr[1] / fm) * 100
sigma_err = perr[2]
if max_nfev is None:
nfev = popt.nfev
else:
nfev = max_nfev
flag = popt.status
success = popt.success
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
except linalg.LinAlgError as err:
if str(err) == 'Singular matrix':
print('Unable to calculate fitting errors, skipping sequence.'),
fo, fm, sigma, ro, bias, rmse, fo_err, fm_err, sigma_err, nfl, nfev = repeat(nan, 12)
flag = -3
success = 'False'
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
pass
except Exception:
print('Unable to calculate fit, skipping sequence.'),
fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev = repeat(nan, 12)
flag = -1
success = 'False'
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
pass
def __fit_calc_p_model__(pfd, flevel, bounds=False, sig_lims=None, ro_lims=None, method='trf', loss='soft_l1', f_scale=0.1, max_nfev=None, xtol=1e-9):
# Count number of flashlets excluding NaNs
nfl = count_nonzero(~isnan(flevel))
m = ~isnan(flevel)
flevel = flevel[m]
pfd = pfd[m]
# Estimates of saturation parameters
model = linear_model.HuberRegressor()
try:
y = flevel[:8]
x = arange(0,8)[:,None]
fo_model = model.fit(x,y)
fo = fo_model.intercept_
except Exception:
fo = flevel[:3].mean()
try:
y = flevel[-24:]
x = arange(0,24)[:,None]
fm_model = model.fit(x,y)
fm = fm_model.intercept_
except Exception:
fm = flevel[-3:].mean()
if (fo > fm) | (fo <= 0):
(print('Fo greater than Fm - skipping fit.'))
fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev = repeat(nan, 13)
flag = -2
success = 'False'
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev, flag, success
pass
else:
fo10 = fo * 0.1
fm10 = fm * 0.1
sig = 500
ro = 0.3
x0 = [fo, fm, sig, ro]
bds = [-inf, inf]
if bounds:
bds = [fo-fo10, fm-fm10, sig_lims[0], ro_lims[0]],[fo+fo10, fm+fm10, sig_lims[1], ro_lims[1]]
if (bds[0][0] > bds[1][0]) | (bds[0][1] > bds[1][1]) | (bds[0][2] > bds[1][2]) | (bds[0][3] > bds[1][3]): #| (bds[0][0] == 0):
print('Lower bounds greater than upper bounds - fitting with no bounds.')
bds = [-inf, inf]
if max_nfev is None:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'xtol':xtol}
else:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'max_nfev':max_nfev, 'xtol':xtol}
try:
popt = least_squares(__calculate_residual_saturation_p__, x0, bounds=(bds), args=(pfd, flevel), **opts)
fo = popt.x[0]
fm = popt.x[1]
sigma = popt.x[2]
ro = popt.x[3]
# Calculate curve fitting statistical metrics
sol = __fit_kolber_p__(pfd, *popt.x)
bias = __calculate_bias__(sol, flevel)
rmse = __calculate_rmse__(popt.fun, flevel)
nrmse = __calculate_nrmse__(popt.fun, flevel)
perr = __calculate_fit_errors__(popt.jac, popt.fun)
fo_err = (perr[0] / fo) * 100
fm_err = (perr[1] / fm) * 100
sigma_err = perr[2]
ro_err = perr[3]
if max_nfev is None:
nfev = popt.nfev
else:
nfev = max_nfev
flag = popt.status
status = popt.success
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev, flag, status
except linalg.LinAlgError as err:
if str(err) == 'Singular matrix':
print('Unable to calculate fitting errors, skipping sequence.'),
fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev = repeat(nan, 13)
flag = -3
success = 'False'
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev, flag, success
pass
except Exception:
print('Unable to calculate fit, skipping sequence.'),
fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev = repeat(nan, 13)
flag = -1
success = 'False'
return fo, fm, sigma, ro, bias, rmse, nrmse, fo_err, fm_err, sigma_err, ro_err, nfl, nfev, flag, success
pass
def __fit_no_p_model__(pfd, flevel, ro=None, bounds=False, sig_lims=None, method='trf', loss='soft_l1', f_scale=0.1, max_nfev=None, xtol=1e-9):
# Count number of flashlets excluding NaNs
nfl = count_nonzero(~isnan(flevel))
m = ~isnan(flevel)
flevel = flevel[m]
pfd = pfd[m]
# Estimates of saturation parameters
model = linear_model.HuberRegressor()
try:
y = flevel[:8]
x = arange(0,8)[:,None]
fo_model = model.fit(x,y)
fo = fo_model.intercept_
except Exception:
fo = flevel[:3].mean()
try:
y = flevel[-24:]
x = arange(0,24)[:,None]
fm_model = model.fit(x,y)
fm = fm_model.intercept_
except Exception:
fm = flevel[-3:].mean()
if (fo > fm) | (fo <= 0):
(print('Fo greater than Fm - skipping fit.'))
fo, fm, sigma, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfev = repeat(nan, 10)
flag = -2
success = False
return fo, fm, sigma, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
pass
else:
fo10 = fo * 0.1
fm10 = fm * 0.1
sig = 500
x0 = [fo, fm, sig]
bds = [-inf, inf]
if bounds:
bds = [fo-fo10, fm-fm10, sig_lims[0]],[fo+fo10, fm+fm10, sig_lims[1]]
if (bds[0][0] > bds[1][0]) | (bds[0][1] > bds[1][1]) | (bds[0][2] > bds[1][2]): #| (bds[0][0] == 0):
print('Lower bounds greater than upper bounds - fitting with no bounds.')
bds = [-inf, inf]
if max_nfev is None:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'xtol':xtol}
else:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'max_nfev':max_nfev, 'xtol':xtol}
try:
popt = least_squares(__calculate_residual_saturation_nop__, x0, bounds=(bds), args=(pfd, flevel), **opts)
fo = popt.x[0]
fm = popt.x[1]
sigma = popt.x[2]
# Calculate curve fitting statistical metrics
sol = __fit_kolber_nop__(pfd, *popt.x)
bias = __calculate_bias__(sol, flevel)
rmse = __calculate_rmse__(popt.fun, flevel)
nrmse = __calculate_nrmse__(popt.fun, flevel)
perr = __calculate_fit_errors__(popt.jac, popt.fun)
fo_err = (perr[0] / fo) * 100
fm_err = (perr[1] / fm) * 100
sigma_err = perr[2]
if max_nfev is None:
nfev = popt.nfev
else:
nfev = max_nfev
flag = popt.status
success = popt.success
return fo, fm, sigma, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
except linalg.LinAlgError as err:
if str(err) == 'Singular matrix':
print('Unable to calculate fitting errors, skipping sequence.'),
fo, fm, sigma, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev = repeat(nan, 11)
flag = -3
success = 'False'
return fo, fm, sigma, bias, rmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
pass
except Exception:
print('Unable to calculate fit, skipping sequence.'),
fo, fm, sigma, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev = repeat(nan, 11)
flag = -1
success = 'False'
return fo, fm, sigma, bias, rmse, nrmse, fo_err, fm_err, sigma_err, nfl, nfev, flag, success
pass
def __fit_single_decay__(seq_time, flevel, sat_flashlets=None, bounds=False, single_lims=None, method='trf', loss='soft_l1', f_scale=0.1, max_nfev=None, xtol=1e-9):
# Count number of flashlets excluding NaNs
nfl = count_nonzero(~isnan(flevel))
m = ~isnan(flevel)
flevel = flevel[m]
seq_time = seq_time[m]
# Estimates of relaxation parameters
fo_relax = flevel[-3:].mean()
if sat_flashlets is None:
fm_relax = flevel[:3].mean()
else:
fm_relax = flevel[:3+sat_flashlets].mean()
if (fo_relax > fm_relax):
(print('Fo_relax greater than Fm_relax - skipping fit.'))
fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfev = repeat(nan, 10)
flag = -2
success = 'False'
return fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfl, nfev, flag, success
pass
fo10 = fo_relax * 0.1
fm10 = fm_relax * 0.1
tau = 4000
x0 = [fo_relax, fm_relax, tau]
bds = [-inf, inf]
if bounds:
bds = [fo_relax-fo10, fm_relax-fm10, single_lims[0]],[fo_relax+fo10, fm_relax+fm10, single_lims[1]]
if (bds[0][0] > bds[1][0]) | (bds[0][1] > bds[1][1]) | (bds[0][2] > bds[1][2]):
print('Lower bounds greater than upper bounds - fitting with no bounds.')
bds = [-inf, inf]
if max_nfev is None:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'xtol':xtol}
else:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'max_nfev':max_nfev, 'xtol':xtol}
try:
popt = least_squares(__calculate_residual_single_relaxation__, x0, bounds=(bds), args=(seq_time, flevel), **opts)
fo_r = popt.x[0]
fm_r = popt.x[1]
tau = popt.x[2]
# Calculate curve fitting statistical metrics
sol = __fit_single_relaxation__(seq_time, *popt.x)
bias = __calculate_bias__(sol, flevel)
rmse = __calculate_rmse__(popt.fun, flevel)
nrmse = __calculate_nrmse__(popt.fun, flevel)
perr = __calculate_fit_errors__(popt.jac, popt.fun)
fo_err = (perr[0] / fo_r) * 100
fm_err = (perr[1] / fm_r) * 100
tau_err = perr[2]
if max_nfev is None:
nfev = popt.nfev
else:
nfev = max_nfev
flag = popt.status
success = popt.success
return fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfl, nfev, flag, success
except linalg.LinAlgError as err:
if str(err) == 'Singular matrix':
print('Unable to calculate fitting errors, skipping sequence.'),
fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfev = repeat(nan, 10)
flag = -3
success = 'False'
return fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfl, nfev, flag, success
pass
except Exception:
print('Unable to calculate fit, skipping sequence.'),
fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfev = repeat(nan, 10)
flag = -1
success = 'False'
return fo_r, fm_r, tau, bias, rmse, nrmse, fo_err, fm_err, tau_err, nfl, nfev, flag, success
pass
def __fit_triple_decay__(seq_time, flevel, sat_flashlets=None, bounds=False, tau1_lims=None, tau2_lims=None, tau3_lims=None, method='trf', loss='soft_l1', f_scale=0.1, max_nfev=None, xtol=1e-9):
# Count number of flashlets excluding NaNs
nfl = count_nonzero(~isnan(flevel))
m = ~isnan(flevel)
flevel = flevel[m]
seq_time = seq_time[m]
# Estimates of relaxation parameters
fo_relax = flevel[-3:].mean()
if sat_flashlets is None:
fm_relax = flevel[:3].mean()
else:
fm_relax = flevel[:3+sat_flashlets].mean()
if (fo_relax > fm_relax):
(print('Fo_relax greater than Fm_relax - skipping fit.'))
fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev = repeat(nan, 21)
flag = -2
success = 'False'
return fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev, flag, success
pass
fo10 = fo_relax * 0.1
fm10 = fm_relax * 0.1
alpha1 = 0.3
tau1 = 600
alpha2 = 0.3
tau2 = 2000
alpha3 = 0.3
tau3 = 30000
x0 = [fo_relax, fm_relax, alpha1, tau1, alpha2, tau2, alpha3, tau3]
bds = [-inf, inf]
if bounds:
bds = [fo_relax-fo10, fm_relax-fm10, 0.1, tau1_lims[0], 0.1, tau2_lims[0], 0.1, tau3_lims[0]],[fo_relax+fo10, fm_relax+fm10, 1, tau1_lims[1], 1, tau2_lims[1], 1, tau3_lims[1]]
if (bds[0][0] > bds[1][0]) | (bds[0][1] > bds[1][1]) | (bds[0][2] > bds[1][2]) | (bds[0][3] > bds[1][3]) | (bds[0][4] > bds[1][4]) | (bds[0][5] > bds[1][5]) | (bds[0][6] > bds[1][6]) | (bds[0][7] > bds[1][7]):
print('Lower bounds greater than upper bounds - fitting with no bounds.')
bds = [-inf, inf]
if max_nfev is None:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'xtol':xtol}
else:
opts = {'method':method, 'loss':loss, 'f_scale':f_scale, 'max_nfev':max_nfev, 'xtol':xtol}
try:
popt = least_squares(__calculate_residual_triple_relaxation__, x0, bounds=(bds), args=(seq_time, flevel), **opts)
fo_r = popt.x[0]
fm_r = popt.x[1]
a1 = popt.x[2]
t1 = popt.x[3]
a2 = popt.x[4]
t2 = popt.x[5]
a3 = popt.x[6]
t3 = popt.x[7]
# Calculate curve fitting statistical metrics
sol = __fit_single_relaxation__(seq_time, *popt.x)
bias = __calculate_bias__(sol, flevel)
rmse = __calculate_rmse__(popt.fun, flevel)
nrmse = __calculate_nrmse__(popt.fun, flevel)
perr = __calculate_fit_errors__(popt.jac, popt.fun)
fo_err = (perr[0] / fo_r) * 100
fm_err = (perr[1] / fm_r) * 100
a1_err = perr[2]
t1_err = perr[3]
a2_err = perr[4]
t2_err = perr[5]
a3_err = perr[6]
t3_err = perr[7]
if max_nfev is None:
nfev = popt.nfev
else:
nfev = max_nfev
flag = popt.status
success = popt.success
return fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev, flag, success
except linalg.LinAlgError as err:
if str(err) == 'Singular matrix':
print('Unable to calculate fitting errors, skipping sequence.'),
fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev = repeat(nan, 21)
flag = -3
success = 'False'
return fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev, flag, success
pass
except Exception:
print('Unable to calculate fit, skipping sequence.'),
fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev = repeat(nan, 21)
flag = -1
success = 'False'
return fo_r, fm_r, a1, t1, a2, t2, a3, t3, bias, rmse, nrmse, fo_err, fm_err, a1_err, t1_err, a2_err, t2_err, a3_err, t3_err, nfl, nfev, flag, success
pass
| 34.40249
| 403
| 0.656193
| 2,744
| 16,582
| 3.72777
| 0.06086
| 0.019552
| 0.023952
| 0.034216
| 0.907127
| 0.898524
| 0.896178
| 0.896178
| 0.885717
| 0.873986
| 0
| 0.040021
| 0.192317
| 16,582
| 481
| 404
| 34.474012
| 0.723736
| 0.039682
| 0
| 0.847545
| 0
| 0
| 0.091166
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01292
| false
| 0.03876
| 0.01292
| 0
| 0.077519
| 0.05168
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0203c710abd08d0de7ac594a38c11e3c2abcc5d4
| 207
|
py
|
Python
|
Company-Project/src/main/pages/__init__.py
|
joshuadavidthomas/Wagtail-Pipit
|
1cd057590194c570c8c1674a58326a2abbd3b75c
|
[
"MIT"
] | 124
|
2019-04-30T19:51:01.000Z
|
2022-03-25T17:10:52.000Z
|
{{cookiecutter.project_name}}/src/main/pages/__init__.py
|
albertfougy/Wagtail-Pipit
|
e82991c76bb3c79804971a33d30b9e098bfb4ea9
|
[
"MIT"
] | 713
|
2019-05-20T12:10:22.000Z
|
2022-03-30T04:15:10.000Z
|
{{cookiecutter.project_name}}/src/main/pages/__init__.py
|
albertfougy/Wagtail-Pipit
|
e82991c76bb3c79804971a33d30b9e098bfb4ea9
|
[
"MIT"
] | 18
|
2019-09-11T00:38:42.000Z
|
2022-02-07T16:00:48.000Z
|
from .base import * # NOQA
from .base_serializer import * # NOQA
from .home import * # NOQA
from .home_serializer import * # NOQA
from .article import * # NOQA
from .article_serializer import * # NOQA
| 29.571429
| 41
| 0.710145
| 27
| 207
| 5.333333
| 0.259259
| 0.416667
| 0.486111
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202899
| 207
| 6
| 42
| 34.5
| 0.872727
| 0.140097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
02170db23fdf11082f1e7c10728793c3030e05b4
| 210
|
py
|
Python
|
vit_pytorch/CvT/__init__.py
|
khawar512/OPVT
|
690e540e7f54e43751d28a046009993e3e325291
|
[
"MIT"
] | null | null | null |
vit_pytorch/CvT/__init__.py
|
khawar512/OPVT
|
690e540e7f54e43751d28a046009993e3e325291
|
[
"MIT"
] | null | null | null |
vit_pytorch/CvT/__init__.py
|
khawar512/OPVT
|
690e540e7f54e43751d28a046009993e3e325291
|
[
"MIT"
] | null | null | null |
from vit_pytorch.face_losses import CosFace, ArcFace, SFaceLoss, Softmax
from vit_pytorch.CvT.cvt import CvT,ConvAttention
from vit_pytorch.CvT.module import ConvAttention,SepConv2d,FeedForward,Residual,PreNorm
| 70
| 87
| 0.871429
| 29
| 210
| 6.172414
| 0.586207
| 0.117318
| 0.234637
| 0.189944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005102
| 0.066667
| 210
| 3
| 87
| 70
| 0.908163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
02182e695786ca8bb2a560027a9744dcbe20ac94
| 44,741
|
py
|
Python
|
monk/pytorch/models/layers.py
|
gstearmit/monk_v1
|
89184ae27dc6d134620034d5b12aa86473ea47ba
|
[
"Apache-2.0"
] | null | null | null |
monk/pytorch/models/layers.py
|
gstearmit/monk_v1
|
89184ae27dc6d134620034d5b12aa86473ea47ba
|
[
"Apache-2.0"
] | null | null | null |
monk/pytorch/models/layers.py
|
gstearmit/monk_v1
|
89184ae27dc6d134620034d5b12aa86473ea47ba
|
[
"Apache-2.0"
] | 1
|
2020-10-07T12:57:44.000Z
|
2020-10-07T12:57:44.000Z
|
from pytorch.models.imports import *
from system.imports import *
@accepts(dict, int, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=True)
def get_layer(network_layer, num_ftrs):
layer_name = network_layer["name"];
layer_params = network_layer["params"];
if(layer_name == "linear"):
layer = nn.Linear(num_ftrs, layer_params["out_features"])
num_ftrs = layer_params["out_features"];
elif(layer_name == "dropout"):
layer = nn.Dropout(p=layer_params["p"]);
elif(layer_name == "elu"):
layer = nn.ELU(alpha=layer_params["alpha"]);
elif(layer_name == "hardshrink"):
layer = nn.Hardshrink(lambd=layer_params["lambd"]);
elif(layer_name == "hardtanh"):
layer = nn.Hardtanh(min_val=layer_params["min_val"], max_val=layer_params["max_val"]);
elif(layer_name == "leakyrelu"):
layer = nn.LeakyReLU(negative_slope=layer_params["negative_slope"]);
elif(layer_name == "logsigmoid"):
layer = nn.LogSigmoid();
elif(layer_name == "prelu"):
layer = nn.PReLU(num_parameters=layer_params["num_parameters"], init=layer_params["init"]);
elif(layer_name == "relu"):
layer = nn.ReLU();
elif(layer_name == "relu6"):
layer = nn.ReLU6();
elif(layer_name == "rrelu"):
layer = nn.RReLU(lower=layer_params["lower"], upper=layer_params["upper"]);
elif(layer_name == "selu"):
layer = nn.SELU();
elif(layer_name == "celu"):
layer = nn.CELU(alpha=layer_params["alpha"]);
elif(layer_name == "sigmoid"):
layer = nn.Sigmoid();
elif(layer_name == "softplus"):
layer = nn.Softplus(beta=layer_params["beta"], threshold=layer_params["threshold"]);
elif(layer_name == "softshrink"):
layer = nn.Softshrink(lambd=layer_params["lambd"]);
elif(layer_name == "softsign"):
layer = nn.Softsign();
elif(layer_name == "tanh"):
layer = nn.Tanh();
elif(layer_name == "tanhshrink"):
layer = nn.Tanhshrink();
elif(layer_name == "threshold"):
layer = nn.Threshold(threshold=layer_params["threshold"], value=layer_params["value"]);
elif(layer_name == "softmin"):
layer = nn.Softmin();
elif(layer_name == "softmax"):
layer = nn.Softmax();
elif(layer_name == "logsoftmax"):
layer = nn.LogSoftmax();
return layer, num_ftrs;
@accepts(dict, num_neurons=int, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def layer_linear(system_dict, num_neurons=512, final_layer=False):
tmp = {};
tmp["name"] = "linear";
tmp["params"] = {};
tmp["params"]["out_features"] = num_neurons;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, probability=float, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def layer_dropout(system_dict, probability=0.5, final_layer=False):
tmp = {};
tmp["name"] = "dropout";
tmp["params"] = {};
tmp["params"]["p"] = probability;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, alpha=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_elu(system_dict, alpha=1.0, final_layer=False):
tmp = {};
tmp["name"] = "elu";
tmp["params"] = {};
tmp["params"]["alpha"] = alpha;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, lambd=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_hardshrink(system_dict, lambd=0.5, final_layer=False):
tmp = {};
tmp["name"] = "hardshrink";
tmp["params"] = {};
tmp["params"]["lambd"] = lambd;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, min_val=[int, float], max_val=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_hardtanh(system_dict, min_val=-1.0, max_val=1.0, final_layer=False):
tmp = {};
tmp["name"] = "hardtanh";
tmp["params"] = {};
tmp["params"]["min_val"] = min_val;
tmp["params"]["max_val"] = max_val;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, negative_slope=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_leakyrelu(system_dict, negative_slope=0.01, final_layer=False):
tmp = {};
tmp["name"] = "leakyrelu";
tmp["params"] = {};
tmp["params"]["negative_slope"] = negative_slope;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_logsigmoid(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "logsigmoid";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, num_parameters=int, init=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_prelu(system_dict, num_parameters=1, init=0.25, final_layer=False):
tmp = {};
tmp["name"] = "prelu";
tmp["params"] = {};
tmp["params"]["num_parameters"] = num_parameters;
tmp["params"]["init"] = init;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_relu(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "relu";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_relu6(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "relu6";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, lower=[int, float], upper=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_rrelu(system_dict, lower=0.125, upper=0.333, final_layer=False):
tmp = {};
tmp["name"] = "rrelu";
tmp["params"] = {};
tmp["params"]["lower"] = lower;
tmp["params"]["upper"] = upper;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_selu(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "selu";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, alpha=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_celu(system_dict, alpha=1.0, final_layer=False):
tmp = {};
tmp["name"] = "celu";
tmp["params"] = {};
tmp["params"]["alpha"] = alpha;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_sigmoid(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "sigmoid";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, beta=[int, float], threshold=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_softplus(system_dict, beta=1, threshold=20, final_layer=False):
tmp = {};
tmp["name"] = "softplus";
tmp["params"] = {};
tmp["params"]["beta"] = beta;
tmp["params"]["threshold"] = threshold;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, lambd=[int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_softshrink(system_dict, lambd=0.5, final_layer=False):
tmp = {};
tmp["name"] = "softshrink";
tmp["params"] = {};
tmp["params"]["lambd"] = lambd;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_softsign(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "softsign";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_tanh(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "tanh";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_tanhshrink(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "tanhshrink";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, [int, float], [int, float], final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_threshold(system_dict, threshold, value, final_layer=False):
tmp = {};
tmp["name"] = "threshold";
tmp["params"] = {};
tmp["params"]["value"] = value;
tmp["params"]["threshold"] = threshold;
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_softmin(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "softmin";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_softmax(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "softmax";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, final_layer=bool, post_trace=True)
@TraceFunction(trace_args=False, trace_rv=False)
def activation_logsoftmax(system_dict, final_layer=False):
tmp = {};
tmp["name"] = "logsoftmax";
tmp["params"] = {};
system_dict["model"]["custom_network"].append(tmp);
system_dict["model"]["final_layer"] = final_layer;
return system_dict;
@accepts(dict, [int, tuple], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=True)
def custom_model_get_layer(network_layer, current_in_shape):
layer_name = network_layer["name"];
layer_params = network_layer["params"];
if(layer_name == "convolution1d"):
layer, current_in_shape = custom_model_layer_convolution1d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "convolution2d"):
layer, current_in_shape = custom_model_layer_convolution2d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "convolution3d"):
layer, current_in_shape = custom_model_layer_convolution3d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "transposed_convolution1d"):
layer, current_in_shape = custom_model_layer_transposed_convolution1d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "transposed_convolution2d"):
layer, current_in_shape = custom_model_layer_transposed_convolution2d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "transposed_convolution3d"):
layer, current_in_shape = custom_model_layer_transposed_convolution3d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "max_pooling1d"):
layer, current_in_shape = custom_model_layer_max_pooling1d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "max_pooling2d"):
layer, current_in_shape = custom_model_layer_max_pooling2d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "max_pooling3d"):
layer, current_in_shape = custom_model_layer_max_pooling3d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "average_pooling1d"):
layer, current_in_shape = custom_model_layer_average_pooling1d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "average_pooling2d"):
layer, current_in_shape = custom_model_layer_average_pooling2d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "average_pooling3d"):
layer, current_in_shape = custom_model_layer_average_pooling3d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "global_max_pooling1d"):
layer, current_in_shape = custom_model_layer_global_max_pooling1d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "global_max_pooling2d"):
layer, current_in_shape = custom_model_layer_global_max_pooling2d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "global_max_pooling3d"):
layer, current_in_shape = custom_model_layer_global_max_pooling3d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "global_average_pooling1d"):
layer, current_in_shape = custom_model_layer_global_average_pooling1d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "global_average_pooling2d"):
layer, current_in_shape = custom_model_layer_global_average_pooling2d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "global_average_pooling3d"):
layer, current_in_shape = custom_model_layer_global_average_pooling3d(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "fully_connected"):
layer, current_in_shape = custom_model_layer_fully_connected(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "flatten"):
layer, current_in_shape = custom_model_layer_flatten(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "dropout"):
layer, current_in_shape = custom_model_layer_dropout(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "identity"):
layer, current_in_shape = custom_model_layer_identity(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "batch_normalization"):
layer, current_in_shape = custom_model_layer_batch_normalization(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "instance_normalization"):
layer, current_in_shape = custom_model_layer_instance_normalization(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "layer_normalization"):
layer, current_in_shape = custom_model_layer_layer_normalization(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "relu"):
layer, current_in_shape = custom_model_activation_relu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "sigmoid"):
layer, current_in_shape = custom_model_activation_sigmoid(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "tanh"):
layer, current_in_shape = custom_model_activation_tanh(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "softplus"):
layer, current_in_shape = custom_model_activation_softplus(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "softsign"):
layer, current_in_shape = custom_model_activation_softsign(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "elu"):
layer, current_in_shape = custom_model_activation_elu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "leaky_relu"):
layer, current_in_shape = custom_model_activation_leaky_relu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "prelu"):
layer, current_in_shape = custom_model_activation_prelu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "selu"):
layer, current_in_shape = custom_model_activation_selu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "hardshrink"):
layer, current_in_shape = custom_model_activation_hardshrink(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "hardtanh"):
layer, current_in_shape = custom_model_activation_hardtanh(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "logsigmoid"):
layer, current_in_shape = custom_model_activation_logsigmoid(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "relu6"):
layer, current_in_shape = custom_model_activation_relu6(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "rrelu"):
layer, current_in_shape = custom_model_activation_rrelu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "celu"):
layer, current_in_shape = custom_model_activation_celu(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "softshrink"):
layer, current_in_shape = custom_model_activation_softshrink(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "tanhshrink"):
layer, current_in_shape = custom_model_activation_tanhshrink(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "threshold"):
layer, current_in_shape = custom_model_activation_threshold(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "softmin"):
layer, current_in_shape = custom_model_activation_softmin(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "softmax"):
layer, current_in_shape = custom_model_activation_softmax(layer_params, current_in_shape);
return layer, current_in_shape;
elif(layer_name == "logsoftmax"):
layer, current_in_shape = custom_model_activation_logsoftmax(layer_params, current_in_shape);
return layer, current_in_shape;
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_convolution1d(params, current_in_shape):
if(params["padding"] == "in_eq_out" and params["stride"]==1):
params["padding"] = (params["dilation"]*(params["kernel_size"] - 1) - params["stride"] + 1)//2;
elif(params["padding"] == "in_eq_out" and params["stride"]!=1):
params["padding"] = 0;
in_channels = current_in_shape[0];
layer = nn.Conv1d(in_channels,
params["output_channels"],
params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
dilation=params["dilation"],
groups=params["groups"],
bias=params["use_bias"]);
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_convolution2d(params, current_in_shape):
if(params["padding"] == "in_eq_out" and params["stride"]==1):
params["padding"] = (params["dilation"]*(params["kernel_size"] - 1) - params["stride"] + 1)//2;
elif(params["padding"] == "in_eq_out" and params["stride"]!=1):
params["padding"] = 0;
in_channels = current_in_shape[0];
layer = nn.Conv2d(in_channels,
params["output_channels"],
params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
dilation=params["dilation"],
groups=params["groups"],
bias=params["use_bias"]);
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_convolution3d(params, current_in_shape):
if(params["padding"] == "in_eq_out" and params["stride"]==1):
params["padding"] = (params["dilation"]*(params["kernel_size"] - 1) - params["stride"] + 1)//2;
elif(params["padding"] == "in_eq_out" and params["stride"]!=1):
params["padding"] = 0;
in_channels = current_in_shape[0];
layer = nn.Conv3d(in_channels,
params["output_channels"],
params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
dilation=params["dilation"],
groups=params["groups"],
bias=params["use_bias"]);
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3], y.shape[4])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_transposed_convolution1d(params, current_in_shape):
if(params["padding"] == "in_eq_out" and params["stride"]==1):
params["padding"] = (params["kernel_size"] + params["output_padding"])//2;
elif(params["padding"] == "in_eq_out" and params["stride"]!=1):
params["padding"] = 0;
in_channels = current_in_shape[0];
layer = nn.ConvTranspose1d(in_channels,
params["output_channels"],
params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
dilation=params["dilation"],
groups=params["groups"],
output_padding=params["output_padding"],
bias=params["use_bias"])
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_transposed_convolution2d(params, current_in_shape):
if(params["padding"] == "in_eq_out" and params["stride"]==1):
params["padding"] = (params["kernel_size"] + params["output_padding"])//2;
elif(params["padding"] == "in_eq_out" and params["stride"]!=1):
params["padding"] = 0;
in_channels = current_in_shape[0];
layer = nn.ConvTranspose2d(in_channels,
params["output_channels"],
params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
dilation=params["dilation"],
groups=params["groups"],
output_padding=params["output_padding"],
bias=params["use_bias"])
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_transposed_convolution3d(params, current_in_shape):
if(params["padding"] == "in_eq_out" and params["stride"]==1):
params["padding"] = (params["kernel_size"] + params["output_padding"])//2;
elif(params["padding"] == "in_eq_out" and params["stride"]!=1):
params["padding"] = 0;
in_channels = current_in_shape[0];
layer = nn.ConvTranspose3d(in_channels,
params["output_channels"],
params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
dilation=params["dilation"],
groups=params["groups"],
output_padding=params["output_padding"],
bias=params["use_bias"])
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3], y.shape[4])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_max_pooling1d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.MaxPool1d(params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
ceil_mode=params["ceil_mode"],
return_indices=params["return_indices"]);
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_max_pooling2d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.MaxPool2d(params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
ceil_mode=params["ceil_mode"],
return_indices=params["return_indices"]);
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_max_pooling3d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.MaxPool3d(params["kernel_size"],
stride=params["stride"],
padding=params["padding"],
ceil_mode=params["ceil_mode"],
return_indices=params["return_indices"]);
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3], y.shape[4])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_average_pooling1d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AvgPool1d(params["kernel_size"],
stride=params["stride"],
padding=params["padding"]);
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_average_pooling2d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AvgPool2d(params["kernel_size"],
stride=params["stride"],
padding=params["padding"]);
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_average_pooling3d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AvgPool3d(params["kernel_size"],
stride=params["stride"],
padding=params["padding"]);
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3], y.shape[4])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_global_max_pooling1d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AdaptiveMaxPool1d(output_size=1);
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_global_max_pooling2d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AdaptiveMaxPool2d(output_size=1);
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_global_max_pooling3d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AdaptiveMaxPool3d(output_size=1);
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3], y.shape[4])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_global_average_pooling1d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AdaptiveAvgPool1d(output_size=1);
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_global_average_pooling2d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AdaptiveAvgPool2d(output_size=1);
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_global_average_pooling3d(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.AdaptiveAvgPool3d(output_size=1);
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1], y.shape[2], y.shape[3], y.shape[4])
return layer, current_in_shape
@accepts(dict, tuple, post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_flatten(params, current_in_shape):
in_channels = current_in_shape[0];
layer = nn.Flatten();
if(len(current_in_shape) == 2):
c, w = current_in_shape
x = torch.randn(1, c, w);
y = layer(x)
current_in_shape = (y.shape[1])
elif(len(current_in_shape) == 3):
c, h, w = current_in_shape
x = torch.randn(1, c, h, w);
y = layer(x)
current_in_shape = (y.shape[1])
else:
c, d, h, w = current_in_shape
x = torch.randn(1, c, d, h, w);
y = layer(x)
current_in_shape = (y.shape[1])
return layer, current_in_shape;
@accepts(dict, [int, tuple], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_fully_connected(params, current_in_shape):
if(type(current_in_shape) == int):
in_feat = current_in_shape;
elif(type(current_in_shape) == tuple):
in_feat = current_in_shape[0];
layer = nn.Linear(in_feat,
params["units"],
bias=params["use_bias"]);
x = torch.randn(1, in_feat);
y = layer(x)
current_in_shape = (y.shape[1])
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_dropout(params, current_in_shape):
layer = nn.Dropout(p=params["drop_probability"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_identity(params, current_in_shape):
layer = nn.Identity();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_batch_normalization(params, current_in_shape):
in_channels = current_in_shape[0];
if(len(current_in_shape) == 2):
layer = nn.BatchNorm1d(in_channels,
eps=params["epsilon"],
momentum=params["moving_average_momentum"],
affine=params["use_trainable_parameters"],
track_running_stats=params["use_trainable_parameters"])
elif(len(current_in_shape) == 3):
layer = nn.BatchNorm2d(in_channels,
eps=params["epsilon"],
momentum=params["moving_average_momentum"],
affine=params["use_trainable_parameters"],
track_running_stats=params["use_trainable_parameters"])
elif(len(current_in_shape) == 4):
layer = nn.BatchNorm3d(in_channels,
eps=params["epsilon"],
momentum=params["moving_average_momentum"],
affine=params["use_trainable_parameters"],
track_running_stats=params["use_trainable_parameters"])
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_instance_normalization(params, current_in_shape):
in_channels = current_in_shape[0];
if(len(current_in_shape) == 2):
layer = nn.InstanceNorm1d(in_channels,
eps=params["epsilon"],
momentum=params["moving_average_momentum"],
affine=params["use_trainable_parameters"],
track_running_stats=params["use_trainable_parameters"])
elif(len(current_in_shape) == 3):
layer = nn.InstanceNorm2d(in_channels,
eps=params["epsilon"],
momentum=params["moving_average_momentum"],
affine=params["use_trainable_parameters"],
track_running_stats=params["use_trainable_parameters"])
elif(len(current_in_shape) == 4):
layer = nn.InstanceNorm3d(in_channels,
eps=params["epsilon"],
momentum=params["moving_average_momentum"],
affine=params["use_trainable_parameters"],
track_running_stats=params["use_trainable_parameters"])
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_layer_layer_normalization(params, current_in_shape):
layer = nn.LayerNorm(list(current_in_shape),
eps=params["epsilon"],
elementwise_affine=params["use_trainable_parameters"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_relu(params, current_in_shape):
layer = nn.ReLU();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_sigmoid(params, current_in_shape):
layer = nn.Sigmoid();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_tanh(params, current_in_shape):
layer = nn.Tanh();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_softplus(params, current_in_shape):
layer = nn.Softplus(beta=params["beta"],
threshold=params["threshold"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_softsign(params, current_in_shape):
layer = nn.Softsign();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_elu(params, current_in_shape):
layer = nn.ELU(alpha=params["alpha"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_leaky_relu(params, current_in_shape):
layer = nn.LeakyReLU(negative_slope=params["alpha"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_prelu(params, current_in_shape):
layer = nn.PReLU();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_selu(params, current_in_shape):
layer = nn.SELU();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_hardshrink(params, current_in_shape):
layer = nn.Hardshrink(lambd=params["threshold"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_hardtanh(params, current_in_shape):
layer = nn.Hardtanh(min_val=params["min_val"],
max_val=params["max_val"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_logsigmoid(params, current_in_shape):
layer = nn.LogSigmoid();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_relu6(params, current_in_shape):
layer = nn.ReLU6();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_rrelu(params, current_in_shape):
layer = nn.RReLU();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_celu(params, current_in_shape):
layer = nn.CELU(alpha=params["alpha"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_softshrink(params, current_in_shape):
layer = nn.Softshrink(lambd=params["threshold"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_tanhshrink(params, current_in_shape):
layer = nn.Tanhshrink();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_threshold(params, current_in_shape):
layer = nn.Threshold(params["threshold"],
params["value"]);
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_softmin(params, current_in_shape):
layer = nn.Softmin();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_softmax(params, current_in_shape):
layer = nn.Softmax();
return layer, current_in_shape;
@accepts(dict, [tuple, int], post_trace=True)
@TraceFunction(trace_args=True, trace_rv=False)
def custom_model_activation_logsoftmax(params, current_in_shape):
layer = nn.LogSoftmax();
return layer, current_in_shape;
class Net_Add(nn.Module):
def __init__(self, branches):
super().__init__()
self.child_names = [];
for i in range(len(branches)):
vars(self)["body" + str(i)] = nn.Sequential();
for j in range(len(branches[i])):
vars(self)["body" + str(i)].add_module("br_{}_{}".format(i, j), branches[i][j]);
self.child_names.append("body" + str(i));
for i, child in enumerate(self.child_names):
setattr(self, 'body{0}'.format(i), vars(self)["body" + str(i)])
def forward(self, x):
for i in range(len(self.child_names)):
br = getattr(self, 'body{0}'.format(i));
if(i==0):
y = br(x);
else:
y = y + br(x);
return y
class Net_Concat(nn.Module):
def __init__(self, branches):
super().__init__()
self.child_names = [];
for i in range(len(branches)):
vars(self)["body" + str(i)] = nn.Sequential();
for j in range(len(branches[i])):
vars(self)["body" + str(i)].add_module("br_{}_{}".format(i, j), branches[i][j]);
self.child_names.append("body" + str(i));
for i, child in enumerate(self.child_names):
setattr(self, 'body{0}'.format(i), vars(self)["body" + str(i)])
def forward(self, x):
outs = [];
for i in range(len(self.child_names)):
br = getattr(self, 'body{0}'.format(i));
outs.append(br(x));
return torch.cat(tuple(outs), 1)
| 35.256895
| 110
| 0.660356
| 5,714
| 44,741
| 4.855268
| 0.035177
| 0.099917
| 0.155427
| 0.095195
| 0.901885
| 0.868435
| 0.83722
| 0.797643
| 0.761994
| 0.741665
| 0
| 0.007683
| 0.205851
| 44,741
| 1,269
| 111
| 35.256895
| 0.773128
| 0
| 0
| 0.719656
| 0
| 0
| 0.091547
| 0.013768
| 0
| 0
| 0
| 0
| 0
| 1
| 0.080559
| false
| 0
| 0.002148
| 0
| 0.2116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0220447b9e033ed90d50eb6f8552e493ee18d0b0
| 3,915
|
py
|
Python
|
examples/src/Charts/AnimatingSeriesElements.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
examples/src/Charts/AnimatingSeriesElements.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
examples/src/Charts/AnimatingSeriesElements.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
import aspose.slides as slides
def charts_animating_series_elements():
#ExStart:AnimatingSeriesElements
# The path to the documents directory.
dataDir = "./examples/data/"
outDir = "./examples/out/"
# Load a presentation
with slides.Presentation(dataDir + "charts_existing_chart.pptx") as presentation:
# Get reference of the chart object
slide = presentation.slides[0]
shapes = slide.shapes
chart = shapes[0]
# Animate series elements
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectType.FADE, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 0, 0, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 0, 1, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 0, 2, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 0, 3, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 1, 0, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 1, 1, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 1, 2, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 1, 3, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 2, 0, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 2, 1, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 2, 2, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
slide.timeline.main_sequence.add_effect(chart, slides.animation.EffectChartMinorGroupingType.BY_ELEMENT_IN_SERIES, 2, 3, slides.animation.EffectType.APPEAR, slides.animation.EffectSubtype.NONE, slides.animation.EffectTriggerType.AFTER_PREVIOUS)
# Write the presentation file to disk
presentation.save(outDir + "charts_animating_series_elements_out.pptx", slides.export.SaveFormat.PPTX)
#ExEnd:AnimatingSeriesElements
| 103.026316
| 252
| 0.821967
| 446
| 3,915
| 7.029148
| 0.14574
| 0.244019
| 0.070494
| 0.103668
| 0.841786
| 0.841786
| 0.841786
| 0.841786
| 0.841786
| 0.824561
| 0
| 0.007287
| 0.088633
| 3,915
| 37
| 253
| 105.810811
| 0.871076
| 0.054151
| 0
| 0
| 0
| 0
| 0.026522
| 0.018133
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.045455
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
024dbaef9c26bc71d3bd020d037693481faf5542
| 139
|
py
|
Python
|
requests_oembed/__init__.py
|
dangayle/requests-oembed
|
7f3267ed2b67e313aea8ec72c274fb5d4ea11978
|
[
"MIT"
] | null | null | null |
requests_oembed/__init__.py
|
dangayle/requests-oembed
|
7f3267ed2b67e313aea8ec72c274fb5d4ea11978
|
[
"MIT"
] | null | null | null |
requests_oembed/__init__.py
|
dangayle/requests-oembed
|
7f3267ed2b67e313aea8ec72c274fb5d4ea11978
|
[
"MIT"
] | null | null | null |
from requests_oembed import (endpoints, get_endpoint, get_oembed, gist, oembed)
__all__=[endpoints, get_endpoint, get_oembed, gist,oembed]
| 46.333333
| 79
| 0.820144
| 19
| 139
| 5.526316
| 0.473684
| 0.228571
| 0.380952
| 0.438095
| 0.742857
| 0.742857
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0.086331
| 139
| 3
| 80
| 46.333333
| 0.826772
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
0267e9a93e2d77fcf4b1bd660ccd418625571f22
| 236
|
py
|
Python
|
cblib/scripts/filters/psdcones.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-06-13T06:57:31.000Z
|
2020-06-18T09:58:11.000Z
|
cblib/scripts/filters/psdcones.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 1
|
2019-04-27T18:28:57.000Z
|
2019-04-30T17:16:53.000Z
|
cblib/scripts/filters/psdcones.py
|
HFriberg/cblib-base
|
164a00eb73ef3ac61f5b54f30492209cc69b854b
|
[
"Zlib"
] | 3
|
2019-04-30T11:19:34.000Z
|
2019-05-31T13:12:17.000Z
|
import psdvarcones
import psdmapcones
def keyquery(cdim=None):
return( psdvarcones.keyquery(cdim) | psdmapcones.keyquery(cdim) )
def getval(prob, cdim=None):
return( psdvarcones.getval(prob,cdim) + psdmapcones.getval(prob,cdim) )
| 26.222222
| 73
| 0.771186
| 29
| 236
| 6.275862
| 0.344828
| 0.197802
| 0.230769
| 0.274725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105932
| 236
| 8
| 74
| 29.5
| 0.862559
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
028846f18bed975dba61de4144147c4ace3ac7fd
| 44,043
|
py
|
Python
|
alf/networks/encoding_networks.py
|
jesbu1/alf
|
def59fe39bdbca70a6c80e9b8f2c7c785cb59ea7
|
[
"Apache-2.0"
] | null | null | null |
alf/networks/encoding_networks.py
|
jesbu1/alf
|
def59fe39bdbca70a6c80e9b8f2c7c785cb59ea7
|
[
"Apache-2.0"
] | null | null | null |
alf/networks/encoding_networks.py
|
jesbu1/alf
|
def59fe39bdbca70a6c80e9b8f2c7c785cb59ea7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 Horizon Robotics. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import copy
import functools
import gin
import numpy as np
import torch
import torch.nn as nn
from .network import Network
from .preprocessor_networks import PreprocessorNetwork
import alf
import alf.layers as layers
from alf.initializers import variance_scaling_init
from alf.tensor_specs import TensorSpec
from alf.utils import common, math_ops
@gin.configurable
class ImageEncodingNetwork(Network):
"""
A general template class for creating convolutional encoding networks.
"""
def __init__(self,
input_channels,
input_size,
conv_layer_params,
same_padding=False,
activation=torch.relu_,
kernel_initializer=None,
flatten_output=False,
name="ImageEncodingNetwork"):
"""
Initialize the layers for encoding an image into a latent vector.
Currently there seems no need for this class to handle nested inputs;
If necessary, extend the argument list to support it in the future.
How to calculate the output size:
`<https://pytorch.org/docs/stable/nn.html#torch.nn.Conv2d>`_::
H = (H1 - HF + 2P) // strides + 1
where H = output size, H1 = input size, HF = size of kernel, P = padding.
Regarding padding: in the previous TF version, we have two padding modes:
``valid`` and ``same``. For the former, we always have no padding (P=0); for
the latter, it's also called "half padding" (P=(HF-1)//2 when strides=1
and HF is an odd number the output has the same size with the input.
Currently, PyTorch don't support different left and right paddings and
P is always (HF-1)//2. So if HF is an even number, the output size will
decrease by 1 when strides=1).
Args:
input_channels (int): number of channels in the input image
input_size (int or tuple): the input image size (height, width)
conv_layer_params (tuppe[tuple]): a non-empty tuple of
tuple (num_filters, kernel_size, strides, padding), where
padding is optional
same_padding (bool): similar to TF's conv2d ``same`` padding mode. If
True, the user provided paddings in `conv_layer_params` will be
replaced by automatically calculated ones; if False, it
corresponds to TF's ``valid`` padding mode (the user can still
provide custom paddings though)
activation (torch.nn.functional): activation for all the layers
kernel_initializer (Callable): initializer for all the layers.
flatten_output (bool): If False, the output will be an image
structure of shape ``BxCxHxW``; otherwise the output will be
flattened into a feature of shape ``BxN``.
"""
input_size = common.tuplify2d(input_size)
super().__init__(
input_tensor_spec=TensorSpec((input_channels, ) + input_size),
name=name)
assert isinstance(conv_layer_params, tuple)
assert len(conv_layer_params) > 0
self._flatten_output = flatten_output
self._conv_layer_params = conv_layer_params
self._conv_layers = nn.ModuleList()
for paras in conv_layer_params:
filters, kernel_size, strides = paras[:3]
padding = paras[3] if len(paras) > 3 else 0
if same_padding: # overwrite paddings
kernel_size = common.tuplify2d(kernel_size)
padding = ((kernel_size[0] - 1) // 2,
(kernel_size[1] - 1) // 2)
self._conv_layers.append(
layers.Conv2D(
input_channels,
filters,
kernel_size,
activation=activation,
kernel_initializer=kernel_initializer,
strides=strides,
padding=padding))
input_channels = filters
def forward(self, inputs, state=()):
"""The empty state just keeps the interface same with other networks."""
z = inputs
for conv_l in self._conv_layers:
z = conv_l(z)
if self._flatten_output:
z = torch.reshape(z, (z.size()[0], -1))
return z, state
@gin.configurable
class ParallelImageEncodingNetwork(Network):
"""
A Parallel Image Encoding Network that can be used to perform n
independent image encodings in parallel.
"""
def __init__(self,
input_channels,
input_size,
n,
conv_layer_params,
same_padding=False,
activation=torch.relu_,
kernel_initializer=None,
flatten_output=False,
name="ParallelImageEncodingNetwork"):
"""
Args:
input_channels (int): number of channels in the input image
input_size (int or tuple): the input image size (height, width)
n (int): number of parallel networks
conv_layer_params (tuppe[tuple]): a non-empty tuple of
tuple (num_filters, kernel_size, strides, padding), where
padding is optional
same_padding (bool): similar to TF's conv2d ``same`` padding mode. If
True, the user provided paddings in `conv_layer_params` will be
replaced by automatically calculated ones; if False, it
corresponds to TF's ``valid`` padding mode (the user can still
provide custom paddings though)
activation (torch.nn.functional): activation for all the layers
kernel_initializer (Callable): initializer for all the layers.
flatten_output (bool): If False, the output will be an image
structure of shape ``(B, n, C, H, W)``; otherwise the output
will be flattened into a feature of shape ``(B, n, C*H*W)``.
"""
input_size = common.tuplify2d(input_size)
super().__init__(
input_tensor_spec=TensorSpec((input_channels, ) + input_size),
name=name)
assert isinstance(conv_layer_params, tuple)
assert len(conv_layer_params) > 0
self._flatten_output = flatten_output
self._conv_layer_params = conv_layer_params
self._conv_layers = nn.ModuleList()
for paras in conv_layer_params:
filters, kernel_size, strides = paras[:3]
padding = paras[3] if len(paras) > 3 else 0
if same_padding: # overwrite paddings
kernel_size = common.tuplify2d(kernel_size)
padding = ((kernel_size[0] - 1) // 2,
(kernel_size[1] - 1) // 2)
self._conv_layers.append(
layers.ParallelConv2D(
input_channels,
filters,
kernel_size,
n,
activation=activation,
kernel_initializer=kernel_initializer,
strides=strides,
padding=padding))
input_channels = filters
def forward(self, inputs, state=()):
"""Forward
Args:
inputs (torch.Tensor): with shape ``[B, C, H, W]``
or ``[B, n, C, H, W]``
where the meaning of the symbols are:
- ``B``: batch size
- ``n``: number of replicas
- ``C``: number of channels
- ``H``: image height
- ``W``: image width.
When the shape of inputs is ``[B, C, H, W]``, the same input is
shared among all the n replicas.
When the shape of img is ``[B, n, C, H, W]``, each replica
will have its own data by slicing inputs.
state: an empty state just keeps the interface same with other
networks.
Returns:
- a tensor of shape ``(B, n, C, H, W)`` if ``flatten_output=False``
``(B, n, C*H*W)`` if ``flatten_output=True``
- the empty state just to keep the interface same with other networks
"""
z = inputs
for conv_l in self._conv_layers:
z = conv_l(z)
if self._flatten_output:
z = torch.reshape(z, (*z.size()[:2], -1))
return z, state
@gin.configurable
class ImageDecodingNetwork(Network):
"""
A general template class for creating transposed convolutional decoding networks.
"""
def __init__(self,
input_size,
transconv_layer_params,
start_decoding_size,
start_decoding_channels,
same_padding=False,
preprocess_fc_layer_params=None,
activation=torch.relu_,
kernel_initializer=None,
output_activation=torch.tanh,
name="ImageDecodingNetwork"):
"""
Initialize the layers for decoding a latent vector into an image.
Currently there seems no need for this class to handle nested inputs;
If necessary, extend the argument list to support it in the future.
How to calculate the output size:
`<https://pytorch.org/docs/stable/nn.html#torch.nn.ConvTranspose2d>`_::
H = (H1-1) * strides + HF - 2P + OP
where H = output size, H1 = input size, HF = size of kernel, P = padding,
OP = output_padding (currently hardcoded to be 0 for this class).
Regarding padding: in the previous TF version, we have two padding modes:
``valid`` and ``same``. For the former, we always have no padding (P=0); for
the latter, it's also called ``half padding`` (P=(HF-1)//2 when strides=1
and HF is an odd number the output has the same size with the input.
Currently, PyTorch doesn't support different left and right paddings and
P is always (HF-1)//2. So if HF is an even number, the output size will
increaseby 1 when strides=1).
Args:
input_size (int): the size of the input latent vector
transconv_layer_params (tuple[tuple]): a non-empty
tuple of tuple (num_filters, kernel_size, strides, padding),
where ``padding`` is optional.
start_decoding_size (int or tuple): the initial height and width
we'd like to have for the feature map
start_decoding_channels (int): the initial number of channels we'd
like to have for the feature map. Note that we always first
project an input latent vector into a vector of an appropriate
length so that it can be reshaped into (``start_decoding_channels``,
``start_decoding_height``, ``start_decoding_width``).
same_padding (bool): similar to TF's conv2d ``same`` padding mode. If
True, the user provided paddings in ``transconv_layer_params`` will
be replaced by automatically calculated ones; if False, it
corresponds to TF's ``valid`` padding mode (the user can still
provide custom paddings though).
preprocess_fc_layer_params (tuple[int]): a tuple of fc
layer units. These fc layers are used for preprocessing the
latent vector before transposed convolutions.
activation (nn.functional): activation for hidden layers
kernel_initializer (Callable): initializer for all the layers.
output_activation (nn.functional): activation for the output layer.
Usually our image inputs are normalized to [0, 1] or [-1, 1],
so this function should be ``torch.sigmoid`` or
``torch.tanh``.
name (str):
"""
super().__init__(
input_tensor_spec=TensorSpec((input_size, )), name=name)
assert isinstance(transconv_layer_params, tuple)
assert len(transconv_layer_params) > 0
self._preprocess_fc_layers = nn.ModuleList()
if preprocess_fc_layer_params is not None:
for size in preprocess_fc_layer_params:
self._preprocess_fc_layers.append(
layers.FC(
input_size,
size,
activation=activation,
kernel_initializer=kernel_initializer))
input_size = size
start_decoding_size = common.tuplify2d(start_decoding_size)
# pytorch assumes "channels_first" !
self._start_decoding_shape = [
start_decoding_channels, start_decoding_size[0],
start_decoding_size[1]
]
self._preprocess_fc_layers.append(
layers.FC(
input_size,
np.prod(self._start_decoding_shape),
activation=activation,
kernel_initializer=kernel_initializer))
self._transconv_layer_params = transconv_layer_params
self._transconv_layers = nn.ModuleList()
in_channels = start_decoding_channels
for i, paras in enumerate(transconv_layer_params):
filters, kernel_size, strides = paras[:3]
padding = paras[3] if len(paras) > 3 else 0
if same_padding: # overwrite paddings
kernel_size = common.tuplify2d(kernel_size)
padding = ((kernel_size[0] - 1) // 2,
(kernel_size[1] - 1) // 2)
act = activation
if i == len(transconv_layer_params) - 1:
act = output_activation
self._transconv_layers.append(
layers.ConvTranspose2D(
in_channels,
filters,
kernel_size,
activation=act,
kernel_initializer=kernel_initializer,
strides=strides,
padding=padding))
in_channels = filters
def forward(self, inputs, state=()):
"""Returns an image of shape ``(B,C,H,W)``. The empty state just keeps the
interface same with other networks.
"""
z = inputs
for fc_l in self._preprocess_fc_layers:
z = fc_l(z)
z = torch.reshape(z, [-1] + self._start_decoding_shape)
for deconv_l in self._transconv_layers:
z = deconv_l(z)
return z, state
@gin.configurable
class ParallelImageDecodingNetwork(Network):
"""
A Parallel Image Decoding Network that can be used to perform n
independent image decodings in parallel.
"""
def __init__(self,
input_size,
n,
transconv_layer_params,
start_decoding_size,
start_decoding_channels,
same_padding=False,
preprocess_fc_layer_params=None,
activation=torch.relu_,
kernel_initializer=None,
output_activation=torch.tanh,
name="ImageDecodingNetwork"):
"""
Args:
input_size (int): the size of the input latent vector
n (int): number of parallel networks
transconv_layer_params (tuple[tuple]): a non-empty
tuple of tuple (num_filters, kernel_size, strides, padding),
where ``padding`` is optional.
start_decoding_size (int or tuple): the initial height and width
we'd like to have for the feature map
start_decoding_channels (int): the initial number of channels we'd
like to have for the feature map. Note that we always first
project an input latent vector into a vector of an appropriate
length so that it can be reshaped into (``start_decoding_channels``,
``start_decoding_height``, ``start_decoding_width``).
same_padding (bool): similar to TF's conv2d ``same`` padding mode. If
True, the user provided paddings in ``transconv_layer_params`` will
be replaced by automatically calculated ones; if False, it
corresponds to TF's ``valid`` padding mode (the user can still
provide custom paddings though).
preprocess_fc_layer_params (tuple[int]): a tuple of fc
layer units. These fc layers are used for preprocessing the
latent vector before transposed convolutions.
activation (nn.functional): activation for hidden layers
kernel_initializer (Callable): initializer for all the layers.
output_activation (nn.functional): activation for the output layer.
Usually our image inputs are normalized to [0, 1] or [-1, 1],
so this function should be ``torch.sigmoid`` or
``torch.tanh``.
name (str):
"""
super().__init__(
input_tensor_spec=TensorSpec((input_size, )), name=name)
assert isinstance(transconv_layer_params, tuple)
assert len(transconv_layer_params) > 0
self._preprocess_fc_layers = nn.ModuleList()
if preprocess_fc_layer_params is not None:
for size in preprocess_fc_layer_params:
self._preprocess_fc_layers.append(
layers.ParallelFC(
input_size,
size,
n,
activation=activation,
kernel_initializer=kernel_initializer))
input_size = size
start_decoding_size = common.tuplify2d(start_decoding_size)
# pytorch assumes "channels_first" !
self._start_decoding_shape = [
start_decoding_channels, start_decoding_size[0],
start_decoding_size[1]
]
self._preprocess_fc_layers.append(
layers.ParallelFC(
input_size,
np.prod(self._start_decoding_shape),
n,
activation=activation,
kernel_initializer=kernel_initializer))
self._transconv_layer_params = transconv_layer_params
self._transconv_layers = nn.ModuleList()
in_channels = start_decoding_channels
for i, paras in enumerate(transconv_layer_params):
filters, kernel_size, strides = paras[:3]
padding = paras[3] if len(paras) > 3 else 0
if same_padding: # overwrite paddings
kernel_size = common.tuplify2d(kernel_size)
padding = ((kernel_size[0] - 1) // 2,
(kernel_size[1] - 1) // 2)
act = activation
if i == len(transconv_layer_params) - 1:
act = output_activation
self._transconv_layers.append(
layers.ParallelConvTranspose2D(
in_channels,
filters,
kernel_size,
n,
activation=act,
kernel_initializer=kernel_initializer,
strides=strides,
padding=padding))
in_channels = filters
self._n = n
def forward(self, inputs, state=()):
"""Forward
Args:
inputs (torch.Tensor): with shape ``[B, N]``
or ``[B, n, N]``
where the meaning of the symbols are:
- ``B``: batch size
- ``n``: number of replicas
- ``N``: dimension of the feature vector to be decoded.
When the shape of inputs is ``[B, N]``, the same input is
shared among all the n replicas.
When the shape of img is ``[B, n, N]``, each replica
will have its own data by slicing inputs.
state: an empty state just keeps the interface same with other
networks.
Returns:
- an image of shape ``(B, n, C, H, W)``
- the empty state just to keep the interface same with other networks
"""
z = inputs
for fc_l in self._preprocess_fc_layers:
z = fc_l(z)
z = torch.reshape(z, [-1, self._n] + self._start_decoding_shape)
for deconv_l in self._transconv_layers:
z = deconv_l(z)
return z, state
@gin.configurable
class EncodingNetwork(PreprocessorNetwork):
"""Feed Forward network with CNN and FC layers which allows the last layer
to have different settings from the other layers.
"""
def __init__(self,
input_tensor_spec,
input_preprocessors=None,
preprocessing_combiner=None,
conv_layer_params=None,
fc_layer_params=None,
activation=torch.relu_,
kernel_initializer=None,
last_layer_size=None,
last_activation=None,
last_kernel_initializer=None,
name="EncodingNetwork"):
"""
Args:
input_tensor_spec (nested TensorSpec): the (nested) tensor spec of
the input. If nested, then ``preprocessing_combiner`` must not be
None.
input_preprocessors (nested InputPreprocessor): a nest of
``InputPreprocessor``, each of which will be applied to the
corresponding input. If not None, then it must have the same
structure with ``input_tensor_spec``. This arg is helpful if you
want to have separate preprocessings for different inputs by
configuring a gin file without changing the code. For example,
embedding a discrete input before concatenating it to another
continuous vector.
preprocessing_combiner (NestCombiner): preprocessing called on
complex inputs. Note that this combiner must also accept
``input_tensor_spec`` as the input to compute the processed
tensor spec. For example, see ``alf.nest.utils.NestConcat``. This
arg is helpful if you want to combine inputs by configuring a
gin file without changing the code.
conv_layer_params (tuple[tuple]): a tuple of tuples where each
tuple takes a format ``(filters, kernel_size, strides, padding)``,
where ``padding`` is optional.
fc_layer_params (tuple[int]): a tuple of integers
representing FC layer sizes.
activation (nn.functional): activation used for all the layers but
the last layer.
kernel_initializer (Callable): initializer for all the layers but
the last layer. If None, a variance_scaling_initializer will be
used.
last_layer_size (int): an optional size of an additional layer
appended at the very end. Note that if ``last_activation`` is
specified, ``last_layer_size`` has to be specified explicitly.
last_activation (nn.functional): activation function of the
additional layer specified by ``last_layer_size``. Note that if
``last_layer_size`` is not None, ``last_activation`` has to be
specified explicitly.
last_kernel_initializer (Callable): initializer for the the
additional layer specified by ``last_layer_size``.
If None, it will be the same with ``kernel_initializer``. If
``last_layer_size`` is None, ``last_kernel_initializer`` will
not be used.
name (str):
"""
super().__init__(
input_tensor_spec,
input_preprocessors,
preprocessing_combiner,
name=name)
if kernel_initializer is None:
kernel_initializer = functools.partial(
variance_scaling_init,
mode='fan_in',
distribution='truncated_normal',
nonlinearity=activation)
self._img_encoding_net = None
if conv_layer_params:
assert isinstance(conv_layer_params, tuple), \
"The input params {} should be tuple".format(conv_layer_params)
assert len(self._processed_input_tensor_spec.shape) == 3, \
"The input shape {} should be like (C,H,W)!".format(
self._processed_input_tensor_spec.shape)
input_channels, height, width = self._processed_input_tensor_spec.shape
self._img_encoding_net = ImageEncodingNetwork(
input_channels, (height, width),
conv_layer_params,
activation=activation,
kernel_initializer=kernel_initializer,
flatten_output=True)
input_size = self._img_encoding_net.output_spec.shape[0]
else:
assert self._processed_input_tensor_spec.ndim == 1, \
"The input shape {} should be like (N,)!".format(
self._processed_input_tensor_spec.shape)
input_size = self._processed_input_tensor_spec.shape[0]
self._fc_layers = nn.ModuleList()
if fc_layer_params is None:
fc_layer_params = []
else:
assert isinstance(fc_layer_params, tuple)
fc_layer_params = list(fc_layer_params)
for size in fc_layer_params:
self._fc_layers.append(
layers.FC(
input_size,
size,
activation=activation,
kernel_initializer=kernel_initializer))
input_size = size
if last_layer_size is not None or last_activation is not None:
assert last_layer_size is not None and last_activation is not None, \
"Both last_layer_size and last_activation need to be specified!"
if last_kernel_initializer is None:
common.warning_once(
"last_kernel_initializer is not specified "
"for the last layer of size {}.".format(last_layer_size))
last_kernel_initializer = kernel_initializer
self._fc_layers.append(
layers.FC(
input_size,
last_layer_size,
activation=last_activation,
kernel_initializer=last_kernel_initializer))
input_size = last_layer_size
self._output_spec = TensorSpec(
(input_size, ), dtype=self._processed_input_tensor_spec.dtype)
def forward(self, inputs, state=()):
"""
Args:
inputs (nested Tensor):
"""
# call super to preprocess inputs
z, state = super().forward(inputs, state)
if self._img_encoding_net is not None:
z, _ = self._img_encoding_net(z)
for fc in self._fc_layers:
z = fc(z)
return z, state
def make_parallel(self, n):
"""Make a parllelized version of this network.
A parallel network has ``n`` copies of network with the same structure but
different independently initialized parameters.
For supported network structures (currently, networks with only FC layers)
it will create ``ParallelCriticNetwork`` (PCN). Otherwise, it will
create a ``NaiveParallelNetwork`` (NPN). However, PCN is not always
faster than NPN. Especially for small ``n`` and large batch_size. See
``test_make_parallel()`` in critic_networks_test.py for detail.
Returns:
Network: A paralle network
"""
if (self.saved_args.get('input_preprocessors') is None and
(self._preprocessing_combiner == math_ops.identity or isinstance(
self._preprocessing_combiner,
(alf.nest.utils.NestSum, alf.nest.utils.NestConcat)))):
parallel_enc_net_args = dict(**self.saved_args)
parallel_enc_net_args.update(n=n, name="parallel_" + self.name)
return ParallelEncodingNetwork(**parallel_enc_net_args)
else:
return super().make_parallel(n)
@gin.configurable
class ParallelEncodingNetwork(PreprocessorNetwork):
"""Parallel feed-forward network with FC layers which allows the last layer
to have different settings from the other layers.
"""
def __init__(self,
input_tensor_spec,
n,
input_preprocessors=None,
preprocessing_combiner=None,
conv_layer_params=None,
fc_layer_params=None,
activation=torch.relu_,
kernel_initializer=None,
last_layer_size=None,
last_activation=None,
last_kernel_initializer=None,
name="ParallelEncodingNetwork"):
"""
Args:
input_tensor_spec (nested TensorSpec): the (nested) tensor spec of
the input. If nested, then ``preprocessing_combiner`` must not be
None.
n (int): number of parallel networks
input_preprocessors (None): must be ``None``.
preprocessing_combiner (NestCombiner): preprocessing called on
complex inputs. Note that this combiner must also accept
``input_tensor_spec`` as the input to compute the processed
tensor spec. For example, see ``alf.nest.utils.NestConcat``. This
arg is helpful if you want to combine inputs by configuring a
gin file without changing the code.
conv_layer_params (tuple[tuple]): a tuple of tuples where each
tuple takes a format ``(filters, kernel_size, strides, padding)``,
where ``padding`` is optional.
fc_layer_params (tuple[int]): a tuple of integers
representing FC layer sizes.
activation (nn.functional): activation used for all the layers but
the last layer.
kernel_initializer (Callable): initializer for all the layers but
the last layer. If None, a variance_scaling_initializer will be
used.
last_layer_size (int): an optional size of an additional layer
appended at the very end. Note that if ``last_activation`` is
specified, ``last_layer_size`` has to be specified explicitly.
last_activation (nn.functional): activation function of the
additional layer specified by ``last_layer_size``. Note that if
``last_layer_size`` is not None, ``last_activation`` has to be
specified explicitly.
last_kernel_initializer (Callable): initializer for the the
additional layer specified by ``last_layer_size``.
If None, it will be the same with ``kernel_initializer``. If
``last_layer_size`` is None, ``last_kernel_initializer`` will
not be used.
name (str):
"""
super().__init__(
input_tensor_spec,
input_preprocessors=None,
preprocessing_combiner=preprocessing_combiner,
name=name)
# TODO: handle input_preprocessors
assert input_preprocessors is None
if kernel_initializer is None:
kernel_initializer = functools.partial(
variance_scaling_init,
mode='fan_in',
distribution='truncated_normal',
nonlinearity=activation)
self._img_encoding_net = None
if conv_layer_params:
assert isinstance(conv_layer_params, tuple), \
"The input params {} should be tuple".format(conv_layer_params)
assert len(self._processed_input_tensor_spec.shape) == 3, \
"The input shape {} should be like (C,H,W)!".format(
self._processed_input_tensor_spec.shape)
input_channels, height, width = self._processed_input_tensor_spec.shape
self._img_encoding_net = ParallelImageEncodingNetwork(
input_channels, (height, width),
n,
conv_layer_params,
activation=activation,
kernel_initializer=kernel_initializer,
flatten_output=True)
input_size = self._img_encoding_net.output_spec.shape[1]
else:
assert self._processed_input_tensor_spec.ndim == 1, \
"The input shape {} should be like (N,)!".format(
self._processed_input_tensor_spec.shape)
input_size = self._processed_input_tensor_spec.shape[0]
self._fc_layers = nn.ModuleList()
if fc_layer_params is None:
fc_layer_params = []
else:
assert isinstance(fc_layer_params, tuple)
fc_layer_params = list(fc_layer_params)
for size in fc_layer_params:
self._fc_layers.append(
layers.ParallelFC(
input_size,
size,
n,
activation=activation,
kernel_initializer=kernel_initializer))
input_size = size
if last_layer_size is not None or last_activation is not None:
assert last_layer_size is not None and last_activation is not None, \
"Both last_layer_size and last_activation need to be specified!"
if last_kernel_initializer is None:
common.warning_once(
"last_kernel_initializer is not specified "
"for the last layer of size {}.".format(last_layer_size))
last_kernel_initializer = kernel_initializer
self._fc_layers.append(
layers.ParallelFC(
input_size,
last_layer_size,
n,
activation=last_activation,
kernel_initializer=last_kernel_initializer))
input_size = last_layer_size
self._output_spec = TensorSpec(
(n, input_size), dtype=self._processed_input_tensor_spec.dtype)
self._n = n
def forward(self, inputs, state=()):
"""
Args:
inputs (nested Tensor):
"""
# call super to preprocess inputs
z, state = super().forward(inputs, state, max_outer_rank=2)
if self._img_encoding_net is None and len(self._fc_layers) == 0:
if inputs.ndim == 2:
z = z.unsqueeze(1).expand(-1, self._n, *z.shape[1:])
else:
if self._img_encoding_net is not None:
z, _ = self._img_encoding_net(z)
for fc in self._fc_layers:
z = fc(z)
return z, state
@gin.configurable
class LSTMEncodingNetwork(Network):
"""LSTM cells followed by an encoding network."""
def __init__(self,
input_tensor_spec,
input_preprocessors=None,
preprocessing_combiner=None,
conv_layer_params=None,
pre_fc_layer_params=None,
hidden_size=(100, ),
lstm_output_layers=-1,
post_fc_layer_params=None,
activation=torch.relu_,
kernel_initializer=None,
last_layer_size=None,
last_activation=None,
last_kernel_initializer=None,
name="LSTMEncodingNetwork"):
"""
Args:
input_tensor_spec (nested TensorSpec): the (nested) tensor spec of
the input. If nested, then ``preprocessing_combiner`` must not be
None.
input_preprocessors (nested InputPreprocessor): a nest of
``InputPreprocessor``, each of which will be applied to the
corresponding input. If not None, then it must have the same
structure with ``input_tensor_spec``. This arg is helpful if you
want to have separate preprocessings for different inputs by
configuring a gin file without changing the code. For example,
embedding a discrete input before concatenating it to another
continuous vector.
preprocessing_combiner (NestCombiner): preprocessing called on
complex inputs. Note that this combiner must also accept
``input_tensor_spec`` as the input to compute the processed
tensor spec. For example, see ``alf.nest.utils.NestConcat``. This
arg is helpful if you want to combine inputs by configuring a
gin file without changing the code.
conv_layer_params (tuple[tuple]): a tuple of tuples where each
tuple takes a format ``(filters, kernel_size, strides, padding)``,
where ``padding`` is optional.
pre_fc_layer_params (tuple[int]): a tuple of integers
representing FC layers that are applied before the LSTM cells.
hidden_size (int or tuple[int]): the hidden size(s) of
the lstm cell(s). Each size corresponds to a cell. If there are
multiple sizes, then lstm cells are stacked.
lstm_output_layers (None|int|list[int]): -1 means the output from
the last lstm layer. ``None`` means all lstm layers.
post_fc_layer_params (tuple[int]): an optional tuple of
integers representing hidden FC layers that are applied after
the LSTM cells.
activation (nn.functional): activation for all the layers but the
last layer.
kernel_initializer (Callable): initializer for all the layers but
the last layer.
last_layer_size (int): an optional size of an additional layer
appended at the very end. Note that if ``last_activation`` is
specified, ``last_layer_size`` has to be specified explicitly.
last_activation (nn.functional): activation function of the
additional layer specified by ``last_layer_size``. Note that if
``last_layer_size`` is not None, ``last_activation`` has to be
specified explicitly.
last_kernel_initializer (Callable): initializer for the the
additional layer specified by ``last_layer_size``.
If None, it will be the same with ``kernel_initializer``. If
``last_layer_size`` is None, ``last_kernel_initializer`` will
not be used.
"""
super().__init__(input_tensor_spec, name=name)
if (input_preprocessors or preprocessing_combiner or conv_layer_params
or pre_fc_layer_params):
self._pre_encoding_net = EncodingNetwork(
input_tensor_spec=input_tensor_spec,
input_preprocessors=input_preprocessors,
preprocessing_combiner=preprocessing_combiner,
conv_layer_params=conv_layer_params,
fc_layer_params=pre_fc_layer_params,
activation=activation,
kernel_initializer=kernel_initializer)
input_size = self._pre_encoding_net.output_spec.shape[0]
else:
self._pre_encoding_net = lambda x: (x, ())
input_size = input_tensor_spec.shape[0]
if isinstance(hidden_size, int):
hidden_size = [hidden_size]
else:
assert isinstance(hidden_size, tuple)
self._cells = nn.ModuleList()
self._state_spec = []
for hs in hidden_size:
self._cells.append(
torch.nn.LSTMCell(input_size=input_size, hidden_size=hs))
self._state_spec.append(self._create_lstm_cell_state_spec(hs))
input_size = hs
if lstm_output_layers is None:
lstm_output_layers = list(range(len(hidden_size)))
elif type(lstm_output_layers) == int:
lstm_output_layers = [lstm_output_layers]
self._lstm_output_layers = lstm_output_layers
self._lstm_output_layers = copy.copy(lstm_output_layers)
input_size = sum(hidden_size[i] for i in lstm_output_layers)
if post_fc_layer_params is None and last_layer_size is None:
self._post_encoding_net = lambda x: (x, ())
self._output_spec = TensorSpec((input_size, ))
else:
self._post_encoding_net = EncodingNetwork(
input_tensor_spec=TensorSpec((input_size, )),
fc_layer_params=post_fc_layer_params,
activation=activation,
kernel_initializer=kernel_initializer,
last_layer_size=last_layer_size,
last_activation=last_activation,
last_kernel_initializer=last_kernel_initializer)
self._output_spec = self._post_encoding_net.output_spec
def _create_lstm_cell_state_spec(self, hidden_size, dtype=torch.float32):
"""Create LSTMCell state specs given the hidden size and dtype, according to
PyTorch `LSTMCell doc <https://pytorch.org/docs/stable/nn.html#torch.nn.LSTMCell>`_.
Each LSTMCell has two states: h and c with the same shape.
Args:
hidden_size (int): the number of units in the hidden state
dtype (torch.dtype): dtype of the specs
Returns:
specs (tuple[TensorSpec]):
"""
state_spec = TensorSpec(shape=(hidden_size, ), dtype=dtype)
return (state_spec, state_spec)
def forward(self, inputs, state):
"""
Args:
inputs (nested torch.Tensor):
state (list[tuple]): a list of tuples, where each tuple is a pair
of ``h_state`` and ``c_state``.
Returns:
tuple:
- output (torch.Tensor): output of the network
- new_state (list[tuple]): the updated states
"""
assert isinstance(state, list)
for s in state:
assert isinstance(s, tuple) and len(s) == 2, \
"Each LSTMCell state should be a tuple of (h,c)!"
assert len(self._cells) == len(state)
new_state = []
h_state, _ = self._pre_encoding_net(inputs)
for cell, s in zip(self._cells, state):
h_state, c_state = cell(h_state, s)
new_state.append((h_state, c_state))
if len(self._lstm_output_layers) == 1:
lstm_output = new_state[self._lstm_output_layers[0]][0]
else:
lstm_output = [new_state[l][0] for l in self._lstm_output_layers]
h_state = torch.cat(lstm_output, -1)
output, _ = self._post_encoding_net(h_state)
return output, new_state
@property
def state_spec(self):
return self._state_spec
| 44.577935
| 92
| 0.589152
| 5,054
| 44,043
| 4.922636
| 0.089434
| 0.037582
| 0.021705
| 0.021866
| 0.804775
| 0.785321
| 0.768922
| 0.753165
| 0.744403
| 0.732505
| 0
| 0.004952
| 0.339736
| 44,043
| 987
| 93
| 44.6231
| 0.850585
| 0.418477
| 0
| 0.759615
| 0
| 0
| 0.033374
| 0.004248
| 0
| 0
| 0
| 0.001013
| 0.044231
| 1
| 0.032692
| false
| 0
| 0.026923
| 0.001923
| 0.094231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a59255e6e651fbf080f0a9f574308f0bf255027
| 10,810
|
py
|
Python
|
cfgov/ask_cfpb/migrations/0045_remove_category_sidebar.py
|
adebisi-aden/consumerfinance.gov
|
8c0f5afac341823c59f73b0c6bd60592e0f5eaca
|
[
"CC0-1.0"
] | 37
|
2020-08-18T19:52:39.000Z
|
2022-03-23T08:08:41.000Z
|
cfgov/ask_cfpb/migrations/0045_remove_category_sidebar.py
|
adebisi-aden/consumerfinance.gov
|
8c0f5afac341823c59f73b0c6bd60592e0f5eaca
|
[
"CC0-1.0"
] | 338
|
2020-08-14T20:46:36.000Z
|
2022-03-31T20:49:32.000Z
|
cfgov/ask_cfpb/migrations/0045_remove_category_sidebar.py
|
adebisi-aden/consumerfinance.gov
|
8c0f5afac341823c59f73b0c6bd60592e0f5eaca
|
[
"CC0-1.0"
] | 14
|
2020-10-21T15:27:03.000Z
|
2022-03-17T03:16:36.000Z
|
# Generated by Django 2.2.24 on 2021-08-03 13:53
from django.db import migrations
import v1.atomic_elements.molecules
import v1.blocks
import v1.models.snippets
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.snippets.blocks
class Migration(migrations.Migration):
dependencies = [
('ask_cfpb', '0044_add_aria_label_to_hyperlinks'),
]
operations = [
migrations.AlterField(
model_name='answerpage',
name='sidebar',
field=wagtail.core.fields.StreamField([('call_to_action', wagtail.core.blocks.StructBlock([('slug_text', wagtail.core.blocks.CharBlock(required=False)), ('paragraph_text', wagtail.core.blocks.RichTextBlock(required=False)), ('button', wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(required=False)), ('aria_label', wagtail.core.blocks.CharBlock(help_text='Add an ARIA label if the link text does not describe the destination of the link (e.g. has ambiguous text like "Learn more" that is not descriptive on its own).', required=False)), ('url', wagtail.core.blocks.CharBlock(default='/', required=False)), ('size', wagtail.core.blocks.ChoiceBlock(choices=[('regular', 'Regular'), ('large', 'Large Primary')]))]))])), ('related_links', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)), ('links', wagtail.core.blocks.ListBlock(wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(required=False)), ('aria_label', wagtail.core.blocks.CharBlock(help_text='Add an ARIA label if the link text does not describe the destination of the link (e.g. has ambiguous text like "Learn more" that is not descriptive on its own).', required=False)), ('url', wagtail.core.blocks.CharBlock(default='/', required=False))])))])), ('related_metadata', wagtail.core.blocks.StructBlock([('slug', wagtail.core.blocks.CharBlock(max_length=100)), ('content', wagtail.core.blocks.StreamBlock([('text', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(max_length=100)), ('blob', wagtail.core.blocks.RichTextBlock())], icon='pilcrow')), ('list', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(max_length=100)), ('links', wagtail.core.blocks.ListBlock(wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(required=False)), ('aria_label', wagtail.core.blocks.CharBlock(help_text='Add an ARIA label if the link text does not describe the destination of the link (e.g. has ambiguous text like "Learn more" that is not descriptive on its own).', required=False)), ('url', wagtail.core.blocks.CharBlock(default='/', required=False))])))], icon='list-ul')), ('date', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(max_length=100)), ('date', wagtail.core.blocks.DateBlock())], icon='date')), ('topics', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(default='Topics', max_length=100)), ('show_topics', wagtail.core.blocks.BooleanBlock(default=True, required=False))], icon='tag'))])), ('is_half_width', wagtail.core.blocks.BooleanBlock(default=False, required=False))])), ('email_signup', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(default='Stay informed', required=False)), ('default_heading', wagtail.core.blocks.BooleanBlock(default=True, help_text='If selected, heading will be styled as an H5 with green top rule. Deselect to style header as H3.', label='Default heading style', required=False)), ('text', wagtail.core.blocks.CharBlock(help_text='Write a sentence or two about what kinds of emails the user is signing up for, how frequently they will be sent, etc.', required=False)), ('gd_code', wagtail.core.blocks.CharBlock(help_text='Code for the topic (i.e., mailing list) you want people who submit this form to subscribe to. Format: USCFPB_###', label='GovDelivery code', required=False)), ('disclaimer_page', wagtail.core.blocks.PageChooserBlock(help_text='Choose the page that the "See Privacy Act statement" link should go to. If in doubt, use "Generic Email Sign-Up Privacy Act Statement".', label='Privacy Act statement', required=False))])), ('sidebar_contact', wagtail.core.blocks.StructBlock([('contact', wagtail.snippets.blocks.SnippetChooserBlock('v1.Contact')), ('has_top_rule_line', wagtail.core.blocks.BooleanBlock(default=False, help_text='Add a horizontal rule line to top of contact block.', required=False))])), ('rss_feed', v1.atomic_elements.molecules.RSSFeed()), ('social_media', wagtail.core.blocks.StructBlock([('is_share_view', wagtail.core.blocks.BooleanBlock(default=True, help_text='If unchecked, social media icons will link users to official CFPB accounts. Do not fill in any further fields.', label='Desired action: share this page', required=False)), ('blurb', wagtail.core.blocks.CharBlock(default="Look what I found on the CFPB's site!", help_text='Sets the tweet text, email subject line, and LinkedIn post text.', required=False)), ('twitter_text', wagtail.core.blocks.CharBlock(help_text='(Optional) Custom text for Twitter shares. If blank, will default to value of blurb field above.', max_length=100, required=False)), ('twitter_related', wagtail.core.blocks.CharBlock(help_text='(Optional) A comma-separated list of accounts related to the content of the shared URL. Do not enter the @ symbol. If blank, it will default to just "cfpb".', required=False)), ('twitter_hashtags', wagtail.core.blocks.CharBlock(help_text='(Optional) A comma-separated list of hashtags to be appended to default tweet text.', required=False)), ('twitter_lang', wagtail.core.blocks.CharBlock(help_text='(Optional) Loads text components in the specified language, if other than English. E.g., use "es" for Spanish. See https://dev.twitter.com/web/overview/languages for a list of supported language codes.', required=False)), ('email_title', wagtail.core.blocks.CharBlock(help_text='(Optional) Custom subject for email shares. If blank, will default to value of blurb field above.', required=False)), ('email_text', wagtail.core.blocks.CharBlock(help_text='(Optional) Custom text for email shares. If blank, will default to "Check out this page from the CFPB".', required=False)), ('email_signature', wagtail.core.blocks.CharBlock(help_text='(Optional) Adds a custom signature line to email shares. ', required=False)), ('linkedin_title', wagtail.core.blocks.CharBlock(help_text='(Optional) Custom title for LinkedIn shares. If blank, will default to value of blurb field above.', required=False)), ('linkedin_text', wagtail.core.blocks.CharBlock(help_text='(Optional) Custom text for LinkedIn shares.', required=False))])), ('reusable_text', v1.blocks.ReusableTextChooserBlock(v1.models.snippets.ReusableText))], blank=True),
),
migrations.AlterField(
model_name='articlepage',
name='sidebar',
field=wagtail.core.fields.StreamField([('call_to_action', wagtail.core.blocks.StructBlock([('slug_text', wagtail.core.blocks.CharBlock(required=False)), ('paragraph_text', wagtail.core.blocks.RichTextBlock(required=False)), ('button', wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(required=False)), ('aria_label', wagtail.core.blocks.CharBlock(help_text='Add an ARIA label if the link text does not describe the destination of the link (e.g. has ambiguous text like "Learn more" that is not descriptive on its own).', required=False)), ('url', wagtail.core.blocks.CharBlock(default='/', required=False)), ('size', wagtail.core.blocks.ChoiceBlock(choices=[('regular', 'Regular'), ('large', 'Large Primary')]))]))])), ('related_links', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('paragraph', wagtail.core.blocks.RichTextBlock(required=False)), ('links', wagtail.core.blocks.ListBlock(wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(required=False)), ('aria_label', wagtail.core.blocks.CharBlock(help_text='Add an ARIA label if the link text does not describe the destination of the link (e.g. has ambiguous text like "Learn more" that is not descriptive on its own).', required=False)), ('url', wagtail.core.blocks.CharBlock(default='/', required=False))])))])), ('related_metadata', wagtail.core.blocks.StructBlock([('slug', wagtail.core.blocks.CharBlock(max_length=100)), ('content', wagtail.core.blocks.StreamBlock([('text', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(max_length=100)), ('blob', wagtail.core.blocks.RichTextBlock())], icon='pilcrow')), ('list', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(max_length=100)), ('links', wagtail.core.blocks.ListBlock(wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(required=False)), ('aria_label', wagtail.core.blocks.CharBlock(help_text='Add an ARIA label if the link text does not describe the destination of the link (e.g. has ambiguous text like "Learn more" that is not descriptive on its own).', required=False)), ('url', wagtail.core.blocks.CharBlock(default='/', required=False))])))], icon='list-ul')), ('date', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(max_length=100)), ('date', wagtail.core.blocks.DateBlock())], icon='date')), ('topics', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(default='Topics', max_length=100)), ('show_topics', wagtail.core.blocks.BooleanBlock(default=True, required=False))], icon='tag'))])), ('is_half_width', wagtail.core.blocks.BooleanBlock(default=False, required=False))])), ('email_signup', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(default='Stay informed', required=False)), ('default_heading', wagtail.core.blocks.BooleanBlock(default=True, help_text='If selected, heading will be styled as an H5 with green top rule. Deselect to style header as H3.', label='Default heading style', required=False)), ('text', wagtail.core.blocks.CharBlock(help_text='Write a sentence or two about what kinds of emails the user is signing up for, how frequently they will be sent, etc.', required=False)), ('gd_code', wagtail.core.blocks.CharBlock(help_text='Code for the topic (i.e., mailing list) you want people who submit this form to subscribe to. Format: USCFPB_###', label='GovDelivery code', required=False)), ('disclaimer_page', wagtail.core.blocks.PageChooserBlock(help_text='Choose the page that the "See Privacy Act statement" link should go to. If in doubt, use "Generic Email Sign-Up Privacy Act Statement".', label='Privacy Act statement', required=False))])), ('reusable_text', v1.blocks.ReusableTextChooserBlock(v1.models.snippets.ReusableText))], blank=True),
),
]
| 360.333333
| 6,343
| 0.754117
| 1,492
| 10,810
| 5.396783
| 0.168901
| 0.139344
| 0.209016
| 0.154993
| 0.822032
| 0.817933
| 0.812842
| 0.802409
| 0.797566
| 0.778192
| 0
| 0.006696
| 0.088252
| 10,810
| 29
| 6,344
| 372.758621
| 0.810268
| 0.004255
| 0
| 0.26087
| 1
| 0.913043
| 0.398439
| 0.003066
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.304348
| 0
| 0.434783
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 12
|
5a60490956e03473c342fa3da4fa5e46c3412072
| 9,394
|
py
|
Python
|
tests/rtllib/test_multipliers.py
|
gtzimpragos/PyRTL
|
deeea93aa37a48d1cd6d65b2b475575de02a7a1c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/rtllib/test_multipliers.py
|
gtzimpragos/PyRTL
|
deeea93aa37a48d1cd6d65b2b475575de02a7a1c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/rtllib/test_multipliers.py
|
gtzimpragos/PyRTL
|
deeea93aa37a48d1cd6d65b2b475575de02a7a1c
|
[
"BSD-3-Clause"
] | null | null | null |
import random
import unittest
import pyrtl
import pyrtl.rtllib.testingutils as utils
from pyrtl.rtllib import multipliers, adders, libutils
class TestSimpleMult(unittest.TestCase):
def setUp(self):
pyrtl.reset_working_block()
def test_trivial_case(self):
self.mult_t_base(1, 5)
def test_trivial_case_2(self):
self.mult_t_base(2, 1)
def test_trivial_case_3(self):
self.mult_t_base(1, 1)
def test_simple_mult_1(self):
self.mult_t_base(5, 7)
def test_simple_mult_2(self):
self.mult_t_base(2, 9)
def mult_t_base(self, len_a, len_b):
a, b, reset = pyrtl.Input(len_a, "a"), pyrtl.Input(len_b, "b"), pyrtl.Input(1, 'reset')
product, done = pyrtl.Output(name="product"), pyrtl.Output(name="done")
m_prod, m_done = multipliers.simple_mult(a, b, reset)
product <<= m_prod
done <<= m_done
self.assertEquals(len(product), len_a + len_b)
xvals = [int(random.uniform(0, 2**len_a-1)) for i in range(20)]
yvals = [int(random.uniform(0, 2**len_b-1)) for i in range(20)]
true_result = [i * j for i, j in zip(xvals, yvals)]
mult_results = []
for x_val, y_val, true_res in zip(xvals, yvals, true_result):
sim_trace = pyrtl.SimulationTrace()
sim = pyrtl.Simulation(tracer=sim_trace)
sim.step({a: x_val, b: y_val, reset:1})
for cycle in range(len(a) + 1):
sim.step({a: 0, b: 0, reset:0})
# Extracting the values and verifying correctness
mult_results.append(sim.inspect("product"))
self.assertEqual(sim.inspect("done"), 1)
self.assertEqual(mult_results, true_result)
class TestComplexMult(unittest.TestCase):
def setUp(self):
pyrtl.reset_working_block()
def test_trivial_case(self):
with self.assertRaises(pyrtl.PyrtlError):
self.mult_t_base(1, 5, 2)
def test_trivial_case_2(self):
with self.assertRaises(pyrtl.PyrtlError):
self.mult_t_base(2, 1, 5)
def test_trivial_case_3(self):
self.mult_t_base(1, 1, 1)
def test_complex_mult_1(self):
self.mult_t_base(5, 7, 3)
def test_complex_mult_2(self):
self.mult_t_base(10, 12, 3)
def test_complex_mult_3(self):
with self.assertRaises(pyrtl.PyrtlError):
self.mult_t_base(2, 9, 4)
def test_complex_mult_4(self):
with self.assertRaises(pyrtl.PyrtlError):
self.mult_t_base(8, 4, 6)
def mult_t_base(self, len_a, len_b, shifts):
a, b = pyrtl.Input(len_a, 'a'), pyrtl.Input(len_b, 'b')
reset = pyrtl.Input(1, 'reset')
product, done = pyrtl.Output(name='product'), pyrtl.Output(name='done')
m_prod, m_done = multipliers.complex_mult(a, b, shifts, reset)
product <<= m_prod
done <<= m_done
self.assertEquals(len(product), len_a + len_b)
xvals = [int(random.uniform(0, 2**len_a-1)) for i in range(20)]
yvals = [int(random.uniform(0, 2**len_b-1)) for i in range(20)]
true_result = [i * j for i, j in zip(xvals, yvals)]
mult_results = []
for x_val, y_val, true_res in zip(xvals, yvals, true_result):
sim_trace = pyrtl.SimulationTrace()
sim = pyrtl.Simulation(tracer=sim_trace)
sim.step({a: x_val, b: y_val, reset:1})
if shifts <= len_a:
length = len_a//shifts + (1 if len_a%shifts==0 else 2)
else:
length = len_a + 1
for cycle in range(length):
sim.step({a: 0, b: 0, reset:0})
# Extracting the values and verifying correctness
mult_results.append(sim.inspect('product'))
self.assertEqual(sim.inspect('done'), 1)
self.assertEqual(mult_results, true_result)
class TestWallace(unittest.TestCase):
@classmethod
def setUpClass(cls):
# this is to ensure reproducibility
random.seed(777906376)
def setUp(self):
pyrtl.reset_working_block()
def mult_t_base(self, len_a, len_b, **mult_args):
# Creating the logic nets
a, b = pyrtl.Input(len_a, "a"), pyrtl.Input(len_b, "b")
product = pyrtl.Output(name="product")
product <<= multipliers.tree_multiplier(a, b, **mult_args)
self.assertEquals(len(product), len_a + len_b)
# creating the testing values and the correct results
xvals = [int(random.uniform(0, 2**len_a-1)) for i in range(20)]
yvals = [int(random.uniform(0, 2**len_b-1)) for i in range(20)]
true_result = [i * j for i, j in zip(xvals, yvals)]
# Setting up and running the tests
sim_trace = pyrtl.SimulationTrace()
sim = pyrtl.Simulation(tracer=sim_trace)
for cycle in range(len(xvals)):
sim.step({a: xvals[cycle], b: yvals[cycle]})
# Extracting the values and verifying correctness
multiplier_result = sim_trace.trace[product]
self.assertEqual(multiplier_result, true_result)
def test_trivial_case(self):
self.mult_t_base(1, 5)
def test_trivial_case_2(self):
self.mult_t_base(2, 1)
def test_trivial_case_3(self):
self.mult_t_base(1, 1)
def test_wallace_tree_1(self):
self.mult_t_base(5, 7)
def test_wallace_tree_2(self):
self.mult_t_base(2, 9)
def test_dada_tree(self):
self.mult_t_base(5, 10, reducer=adders.dada_reducer)
def test_fma_1(self):
wires, vals = utils.make_inputs_and_values(exact_bitwidth=10, num_wires=3,
dist=utils.inverse_power_dist)
test_w = multipliers.fused_multiply_adder(wires[0], wires[1], wires[2], False,
reducer=adders.dada_reducer,
adder_func=adders.ripple_add)
self.assertEqual(len(test_w), 20)
outwire = pyrtl.Output(21, "test")
outwire <<= test_w
out_vals = utils.sim_and_ret_out(outwire, wires, vals)
true_result = [vals[0][cycle] * vals[1][cycle] + vals[2][cycle]
for cycle in range(len(vals[0]))]
self.assertEqual(out_vals, true_result)
def test_gen_fma_1(self):
wires, vals = utils.make_inputs_and_values(max_bitwidth=8, num_wires=8,
dist=utils.inverse_power_dist)
# mixing tuples and lists solely for readability purposes
mult_pairs = [(wires[0], wires[1]), (wires[2], wires[3]), (wires[4], wires[5])]
add_wires = (wires[6], wires[7])
outwire = pyrtl.Output(name="test")
outwire <<= multipliers.generalized_fma(mult_pairs, add_wires, signed=False)
out_vals = utils.sim_and_ret_out(outwire, wires, vals)
true_result = [vals[0][cycle] * vals[1][cycle] + vals[2][cycle] * vals[3][cycle] +
vals[4][cycle] * vals[5][cycle] + vals[6][cycle] + vals[7][cycle]
for cycle in range(len(vals[0]))]
self.assertEqual(out_vals, true_result)
class TestSignedTreeMult(unittest.TestCase):
@classmethod
def setUpClass(cls):
# this is to ensure reproducibility
random.seed(777906375)
def setUp(self):
pyrtl.reset_working_block()
def mult_t_base(self, len_a, len_b, **mult_args):
# Creating the logic nets
a, b = pyrtl.Input(len_a, "a"), pyrtl.Input(len_b, "b")
product = pyrtl.Output(name="product")
product <<= multipliers.signed_tree_multiplier(a, b, **mult_args)
self.assertEquals(len(product), len_a + len_b)
# creating the testing values and the correct results
bound_a = 2**(len_a-1) - 1
bound_b = 2**(len_b-1) - 1
xvals = [int(random.uniform(-bound_a, bound_a)) for i in range(20)]
yvals = [int(random.uniform(-bound_b, bound_b)) for i in range(20)]
true_result = [i * j for i, j in zip(xvals, yvals)]
# Setting up and running the tests
sim_trace = pyrtl.SimulationTrace()
sim = pyrtl.Simulation(tracer=sim_trace)
for cycle in range(len(xvals)):
sim.step({
a: libutils.twos_comp_repr(xvals[cycle], len_a),
b: libutils.twos_comp_repr(yvals[cycle], len_b)
})
# Extracting the values and verifying correctness
multiplier_result = [libutils.rev_twos_comp_repr(p, len(product))
for p in sim_trace.trace[product]]
self.assertEqual(multiplier_result, true_result)
def test_small_bitwidth_error(self):
with self.assertRaises(pyrtl.PyrtlError):
self.mult_t_base(1, 1)
def test_trivial_case(self):
self.mult_t_base(2, 3)
def test_trivial_case_2(self):
self.mult_t_base(4, 4)
def test_trivial_case_3(self):
self.mult_t_base(3, 4)
def test_wallace_tree_1(self):
self.mult_t_base(10, 3)
def test_wallace_tree_2(self):
self.mult_t_base(8, 8)
def test_dada_tree(self):
self.mult_t_base(5, 10, reducer=adders.dada_reducer)
| 36.410853
| 95
| 0.597615
| 1,319
| 9,394
| 4.035633
| 0.124337
| 0.02724
| 0.049033
| 0.061056
| 0.80387
| 0.781326
| 0.769303
| 0.764419
| 0.742438
| 0.711065
| 0
| 0.028333
| 0.28614
| 9,394
| 258
| 96
| 36.410853
| 0.765434
| 0.056738
| 0
| 0.56044
| 0
| 0
| 0.009494
| 0
| 0
| 0
| 0
| 0
| 0.098901
| 1
| 0.203297
| false
| 0
| 0.027473
| 0
| 0.252747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a62ac7db166975ac580d7cbeb62b1e3e6b9ff3b
| 4,981
|
py
|
Python
|
tests/utils/test_jinja_render_version_filters.py
|
bossjones/ultron8
|
45db73d32542a844570d44bc83defa935e15803f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/utils/test_jinja_render_version_filters.py
|
bossjones/ultron8
|
45db73d32542a844570d44bc83defa935e15803f
|
[
"Apache-2.0",
"MIT"
] | 43
|
2019-06-01T23:08:32.000Z
|
2022-02-07T22:24:53.000Z
|
tests/utils/test_jinja_render_version_filters.py
|
bossjones/ultron8
|
45db73d32542a844570d44bc83defa935e15803f
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
# st2common
from ultron8.utils import jinja as jinja_utils
class TestJinjaUtilsVersionsFilterTestCase:
def test_version_compare(self):
env = jinja_utils.get_jinja_environment()
template = '{{version | version_compare("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.9.0"})
expected = "-1"
assert actual == expected
template = '{{version | version_compare("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "1"
assert actual == expected
template = '{{version | version_compare("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.0"})
expected = "0"
assert actual == expected
def test_version_more_than(self):
env = jinja_utils.get_jinja_environment()
template = '{{version | version_more_than("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.9.0"})
expected = "False"
assert actual == expected
template = '{{version | version_more_than("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "True"
assert actual == expected
template = '{{version | version_more_than("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.0"})
expected = "False"
assert actual == expected
def test_version_less_than(self):
env = jinja_utils.get_jinja_environment()
template = '{{version | version_less_than("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.9.0"})
expected = "True"
assert actual == expected
template = '{{version | version_less_than("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "False"
assert actual == expected
template = '{{version | version_less_than("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.0"})
expected = "False"
assert actual == expected
def test_version_equal(self):
env = jinja_utils.get_jinja_environment()
template = '{{version | version_equal("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.9.0"})
expected = "False"
assert actual == expected
template = '{{version | version_equal("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "False"
assert actual == expected
template = '{{version | version_equal("0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.0"})
expected = "True"
assert actual == expected
def test_version_match(self):
env = jinja_utils.get_jinja_environment()
template = '{{version | version_match(">0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "True"
assert actual == expected
actual = env.from_string(template).render({"version": "0.1.1"})
expected = "False"
assert actual == expected
template = '{{version | version_match("<0.10.0")}}'
actual = env.from_string(template).render({"version": "0.1.0"})
expected = "True"
assert actual == expected
actual = env.from_string(template).render({"version": "1.1.0"})
expected = "False"
assert actual == expected
template = '{{version | version_match("==0.10.0")}}'
actual = env.from_string(template).render({"version": "0.10.0"})
expected = "True"
assert actual == expected
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "False"
assert actual == expected
def test_version_bump_major(self):
env = jinja_utils.get_jinja_environment()
template = "{{version | version_bump_major}}"
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "1.0.0"
assert actual == expected
def test_version_bump_minor(self):
env = jinja_utils.get_jinja_environment()
template = "{{version | version_bump_minor}}"
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "0.11.0"
assert actual == expected
def test_version_bump_patch(self):
env = jinja_utils.get_jinja_environment()
template = "{{version | version_bump_patch}}"
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "0.10.2"
assert actual == expected
def test_version_strip_patch(self):
env = jinja_utils.get_jinja_environment()
template = "{{version | version_strip_patch}}"
actual = env.from_string(template).render({"version": "0.10.1"})
expected = "0.10"
assert actual == expected
| 36.357664
| 72
| 0.60008
| 584
| 4,981
| 4.941781
| 0.070205
| 0.033264
| 0.099099
| 0.144837
| 0.936244
| 0.936244
| 0.919612
| 0.906098
| 0.872834
| 0.871448
| 0
| 0.042429
| 0.242923
| 4,981
| 136
| 73
| 36.625
| 0.722885
| 0.001807
| 0
| 0.733333
| 0
| 0
| 0.219517
| 0.086519
| 0
| 0
| 0
| 0
| 0.209524
| 1
| 0.085714
| false
| 0
| 0.009524
| 0
| 0.104762
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce56a3baf14f61b65baf66e7b7625cc14fa8d197
| 1,533
|
py
|
Python
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v1/losses/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 2
|
2020-09-30T00:11:09.000Z
|
2021-10-04T13:00:38.000Z
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v1/losses/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | null | null | null |
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/_api/v2/compat/v1/losses/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 1
|
2021-01-28T01:57:41.000Z
|
2021-01-28T01:57:41.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Loss operations for use in neural networks.
Note: All the losses are added to the `GraphKeys.LOSSES` collection by default.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.ops.losses.losses_impl import Reduction
from tensorflow.python.ops.losses.losses_impl import absolute_difference
from tensorflow.python.ops.losses.losses_impl import compute_weighted_loss
from tensorflow.python.ops.losses.losses_impl import cosine_distance
from tensorflow.python.ops.losses.losses_impl import hinge_loss
from tensorflow.python.ops.losses.losses_impl import huber_loss
from tensorflow.python.ops.losses.losses_impl import log_loss
from tensorflow.python.ops.losses.losses_impl import mean_pairwise_squared_error
from tensorflow.python.ops.losses.losses_impl import mean_squared_error
from tensorflow.python.ops.losses.losses_impl import sigmoid_cross_entropy
from tensorflow.python.ops.losses.losses_impl import softmax_cross_entropy
from tensorflow.python.ops.losses.losses_impl import sparse_softmax_cross_entropy
from tensorflow.python.ops.losses.util import add_loss
from tensorflow.python.ops.losses.util import get_losses
from tensorflow.python.ops.losses.util import get_regularization_loss
from tensorflow.python.ops.losses.util import get_regularization_losses
from tensorflow.python.ops.losses.util import get_total_loss
del _print_function
| 47.90625
| 82
| 0.861709
| 230
| 1,533
| 5.521739
| 0.295652
| 0.226772
| 0.267717
| 0.307874
| 0.70315
| 0.70315
| 0.70315
| 0.695276
| 0.510236
| 0.179528
| 0
| 0
| 0.078278
| 1,533
| 31
| 83
| 49.451613
| 0.898797
| 0.163731
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.95
| 0
| 0.95
| 0.1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.