hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8aceabfbc864dc5048bf9b004819ee81f8484d27 | 674 | py | Python | Ising-model/InteractionParametersSolve.py | lizhenzhupearl/polytype | a5277c6242f8a986aab65b25fa0b9763e136d637 | [
"MIT"
] | 5 | 2020-06-14T12:50:32.000Z | 2021-08-29T10:39:39.000Z | Ising-model/InteractionParametersSolve.py | lizhenzhupearl/polytype | a5277c6242f8a986aab65b25fa0b9763e136d637 | [
"MIT"
] | null | null | null | Ising-model/InteractionParametersSolve.py | lizhenzhupearl/polytype | a5277c6242f8a986aab65b25fa0b9763e136d637 | [
"MIT"
] | 3 | 2020-06-14T12:50:34.000Z | 2021-07-27T04:08:57.000Z |
import numpy as np
MAT = np.array([[2,2,2,2,2,2,2,2], #2H
[3,-3,3,3,3,-3,-3,3],#3C
[4,0,-4,4,-4,0,0,4],#4H
[6,-2,-2,-2,6,6,-2,-2],#6H
[9,3,-3,-3,9,-9,3,-3],#9R
[12,0,0,-12,0,0,0,12],#12R
[12,-8,4,4,4,0,0,-4],#12H
])
ENG = [-24.651502, -36.707196, -49.048539, -73.554815, -110.63025, -147.36321, -146.929] #CsPbI3
#ENG = [-28.057165, -42.207545, -56.101650, -84.211709, -126.20073, -168.32148, -168.60493] #CsPbBr3
print(MAT)
print(ENG)
J, residuals, rank, s = np.linalg.lstsq(MAT,ENG,rcond=None)
#J = np.linalg.solve(MAT,ENG)
print(J)
#print(residuals)
| 24.962963 | 100 | 0.488131 | 127 | 674 | 2.590551 | 0.480315 | 0.06079 | 0.06383 | 0.06079 | 0.085106 | 0.085106 | 0.048632 | 0 | 0 | 0 | 0 | 0.358416 | 0.250742 | 674 | 26 | 101 | 25.923077 | 0.293069 | 0.243323 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0.214286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
8ad1a0b1c7d07cd5de14a4104ffe975d1dc02bb1 | 26,675 | py | Python | tests/unit/test__helpers.py | renovate-bot/google-resumable-media-python | 1f01b88d0ce05ca561359de1ad89b47c6c60c9b7 | [
"Apache-2.0"
] | null | null | null | tests/unit/test__helpers.py | renovate-bot/google-resumable-media-python | 1f01b88d0ce05ca561359de1ad89b47c6c60c9b7 | [
"Apache-2.0"
] | null | null | null | tests/unit/test__helpers.py | renovate-bot/google-resumable-media-python | 1f01b88d0ce05ca561359de1ad89b47c6c60c9b7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import hashlib
import http.client
import mock
import pytest
import requests.exceptions
from google.resumable_media import _helpers
from google.resumable_media import common
def test_do_nothing():
ret_val = _helpers.do_nothing()
assert ret_val is None
class Test_header_required(object):
def _success_helper(self, **kwargs):
name = "some-header"
value = "The Right Hand Side"
headers = {name: value, "other-name": "other-value"}
response = mock.Mock(headers=headers, spec=["headers"])
result = _helpers.header_required(response, name, _get_headers, **kwargs)
assert result == value
def test_success(self):
self._success_helper()
def test_success_with_callback(self):
callback = mock.Mock(spec=[])
self._success_helper(callback=callback)
callback.assert_not_called()
def _failure_helper(self, **kwargs):
response = mock.Mock(headers={}, spec=["headers"])
name = "any-name"
with pytest.raises(common.InvalidResponse) as exc_info:
_helpers.header_required(response, name, _get_headers, **kwargs)
error = exc_info.value
assert error.response is response
assert len(error.args) == 2
assert error.args[1] == name
def test_failure(self):
self._failure_helper()
def test_failure_with_callback(self):
callback = mock.Mock(spec=[])
self._failure_helper(callback=callback)
callback.assert_called_once_with()
class Test_require_status_code(object):
@staticmethod
def _get_status_code(response):
return response.status_code
def test_success(self):
status_codes = (http.client.OK, http.client.CREATED)
acceptable = (
http.client.OK,
int(http.client.OK),
http.client.CREATED,
int(http.client.CREATED),
)
for value in acceptable:
response = _make_response(value)
status_code = _helpers.require_status_code(
response, status_codes, self._get_status_code
)
assert value == status_code
def test_success_with_callback(self):
status_codes = (http.client.OK,)
response = _make_response(http.client.OK)
callback = mock.Mock(spec=[])
status_code = _helpers.require_status_code(
response, status_codes, self._get_status_code, callback=callback
)
assert status_code == http.client.OK
callback.assert_not_called()
def test_failure(self):
status_codes = (http.client.CREATED, http.client.NO_CONTENT)
response = _make_response(http.client.OK)
with pytest.raises(common.InvalidResponse) as exc_info:
_helpers.require_status_code(response, status_codes, self._get_status_code)
error = exc_info.value
assert error.response is response
assert len(error.args) == 5
assert error.args[1] == response.status_code
assert error.args[3:] == status_codes
def test_failure_with_callback(self):
status_codes = (http.client.OK,)
response = _make_response(http.client.NOT_FOUND)
callback = mock.Mock(spec=[])
with pytest.raises(common.InvalidResponse) as exc_info:
_helpers.require_status_code(
response, status_codes, self._get_status_code, callback=callback
)
error = exc_info.value
assert error.response is response
assert len(error.args) == 4
assert error.args[1] == response.status_code
assert error.args[3:] == status_codes
callback.assert_called_once_with()
def test_retryable_failure_without_callback(self):
status_codes = (http.client.OK,)
retryable_responses = [
_make_response(status_code) for status_code in common.RETRYABLE
]
callback = mock.Mock(spec=[])
for retryable_response in retryable_responses:
with pytest.raises(common.InvalidResponse) as exc_info:
_helpers.require_status_code(
retryable_response,
status_codes,
self._get_status_code,
callback=callback,
)
error = exc_info.value
assert error.response is retryable_response
assert len(error.args) == 4
assert error.args[1] == retryable_response.status_code
assert error.args[3:] == status_codes
callback.assert_not_called()
class Test_calculate_retry_wait(object):
@mock.patch("random.randint", return_value=125)
def test_past_limit(self, randint_mock):
base_wait, wait_time = _helpers.calculate_retry_wait(70.0, 64.0)
assert base_wait == 64.0
assert wait_time == 64.125
randint_mock.assert_called_once_with(0, 1000)
@mock.patch("random.randint", return_value=250)
def test_at_limit(self, randint_mock):
base_wait, wait_time = _helpers.calculate_retry_wait(50.0, 50.0)
assert base_wait == 50.0
assert wait_time == 50.25
randint_mock.assert_called_once_with(0, 1000)
@mock.patch("random.randint", return_value=875)
def test_under_limit(self, randint_mock):
base_wait, wait_time = _helpers.calculate_retry_wait(16.0, 33.0)
assert base_wait == 32.0
assert wait_time == 32.875
randint_mock.assert_called_once_with(0, 1000)
@mock.patch("random.randint", return_value=875)
def test_custom_multiplier(self, randint_mock):
base_wait, wait_time = _helpers.calculate_retry_wait(16.0, 64.0, 3)
assert base_wait == 48.0
assert wait_time == 48.875
randint_mock.assert_called_once_with(0, 1000)
class Test_wait_and_retry(object):
def test_success_no_retry(self):
truthy = http.client.OK
assert truthy not in common.RETRYABLE
response = _make_response(truthy)
func = mock.Mock(return_value=response, spec=[])
retry_strategy = common.RetryStrategy()
ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy)
assert ret_val is response
func.assert_called_once_with()
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_success_with_retry(self, randint_mock, sleep_mock):
status_codes = common.RETRYABLE + (http.client.NOT_FOUND,)
responses = [_make_response(status_code) for status_code in status_codes]
randint_mock.side_effect = [75 * i for i in range(len(status_codes) - 1)]
def raise_response():
raise common.InvalidResponse(responses.pop(0))
func = mock.Mock(side_effect=raise_response)
retry_strategy = common.RetryStrategy()
try:
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
except common.InvalidResponse as e:
ret_val = e.response
assert ret_val.status_code == status_codes[-1]
assert status_codes[-1] not in common.RETRYABLE
assert func.call_count == len(status_codes)
assert func.mock_calls == [mock.call()] * len(status_codes)
assert randint_mock.call_count == len(status_codes) - 1
assert randint_mock.mock_calls == [mock.call(0, 1000)] * (len(status_codes) - 1)
assert sleep_mock.call_count == len(status_codes) - 1
wait = 1
multiplier = 2
for i in range(len(status_codes) - 1):
randint = i * 75 / 1000
sleep_mock.assert_any_call(wait + randint)
wait = wait * multiplier
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_success_with_retry_custom_delay(self, randint_mock, sleep_mock):
status_codes = (
http.client.INTERNAL_SERVER_ERROR,
http.client.BAD_GATEWAY,
http.client.SERVICE_UNAVAILABLE,
http.client.NOT_FOUND,
)
responses = [_make_response(status_code) for status_code in status_codes]
randint_mock.side_effect = [75 * i for i in range(len(status_codes) - 1)]
def raise_response():
raise common.InvalidResponse(responses.pop(0))
func = mock.Mock(side_effect=raise_response)
initial_delay = 3.0
multiplier = 4
retry_strategy = common.RetryStrategy(
initial_delay=initial_delay, multiplier=multiplier
)
try:
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
except common.InvalidResponse as e:
ret_val = e.response
assert ret_val.status_code == status_codes[-1]
assert status_codes[-1] not in common.RETRYABLE
assert func.call_count == len(status_codes)
assert func.mock_calls == [mock.call()] * len(status_codes)
assert randint_mock.call_count == len(status_codes) - 1
assert randint_mock.mock_calls == [mock.call(0, 1000)] * (len(status_codes) - 1)
assert sleep_mock.call_count == len(status_codes) - 1
for i in range(len(status_codes) - 1):
randint = i * 75 / 1000
sleep_mock.assert_any_call(initial_delay + randint)
initial_delay = initial_delay * multiplier
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_success_with_retry_connection_error(self, randint_mock, sleep_mock):
response = _make_response(http.client.NOT_FOUND)
responses = [
requests.exceptions.ConnectionError,
requests.exceptions.ConnectionError,
requests.exceptions.ConnectionError,
response,
]
func = mock.Mock(side_effect=responses, spec=[])
randint_mock.side_effect = [125 * i for i in range(len(responses) - 1)]
retry_strategy = common.RetryStrategy()
ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy)
assert ret_val == responses[-1]
assert func.call_count == len(responses)
assert func.mock_calls == [mock.call()] * len(responses)
assert randint_mock.call_count == len(responses) - 1
assert randint_mock.mock_calls == [mock.call(0, 1000)] * (len(responses) - 1)
assert sleep_mock.call_count == len(responses) - 1
wait = 1
multiplier = 2
for i in range(len(responses) - 1):
randint = i * 125 / 1000
sleep_mock.assert_any_call(wait + randint)
wait = wait * multiplier
@mock.patch(u"time.sleep")
@mock.patch(u"random.randint")
def test_success_with_retry_chunked_encoding_error(self, randint_mock, sleep_mock):
response = _make_response(http.client.NOT_FOUND)
responses = [
requests.exceptions.ChunkedEncodingError,
requests.exceptions.ChunkedEncodingError,
response,
]
func = mock.Mock(side_effect=responses, spec=[])
randint_mock.side_effect = [125 * i for i in range(len(responses) - 1)]
retry_strategy = common.RetryStrategy()
ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy)
assert ret_val == responses[-1]
assert func.call_count == len(responses)
assert func.mock_calls == [mock.call()] * len(responses)
assert randint_mock.call_count == len(responses) - 1
assert randint_mock.mock_calls == [mock.call(0, 1000)] * (len(responses) - 1)
assert sleep_mock.call_count == len(responses) - 1
wait = 1
multiplier = 2
for i in range(len(responses) - 1):
randint = i * 125 / 1000
sleep_mock.assert_any_call(wait + randint)
wait = wait * multiplier
@mock.patch(u"time.sleep")
@mock.patch(u"random.randint")
def test_connection_import_error_failure(self, randint_mock, sleep_mock):
response = _make_response(http.client.NOT_FOUND)
responses = [
requests.exceptions.ConnectionError,
requests.exceptions.ConnectionError,
requests.exceptions.ConnectionError,
response,
]
randint_mock.side_effect = [125 * i for i in range(len(responses) - 1)]
with mock.patch(
"google.resumable_media._helpers._get_connection_error_classes",
side_effect=ImportError,
):
with pytest.raises(requests.exceptions.ConnectionError):
func = mock.Mock(side_effect=responses, spec=[])
retry_strategy = common.RetryStrategy()
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock):
randint_mock.side_effect = [875, 0, 375, 500, 500, 250, 125]
status_codes = (
http.client.SERVICE_UNAVAILABLE,
http.client.GATEWAY_TIMEOUT,
http.client.TOO_MANY_REQUESTS,
http.client.INTERNAL_SERVER_ERROR,
http.client.SERVICE_UNAVAILABLE,
http.client.BAD_GATEWAY,
http.client.TOO_MANY_REQUESTS,
)
responses = [_make_response(status_code) for status_code in status_codes]
def raise_response():
raise common.InvalidResponse(responses.pop(0))
func = mock.Mock(side_effect=raise_response)
retry_strategy = common.RetryStrategy(max_cumulative_retry=100.0)
try:
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
except common.InvalidResponse as e:
ret_val = e.response
assert ret_val.status_code == status_codes[-1]
assert status_codes[-1] in common.RETRYABLE
assert func.call_count == 7
assert func.mock_calls == [mock.call()] * 7
assert randint_mock.call_count == 7
assert randint_mock.mock_calls == [mock.call(0, 1000)] * 7
assert sleep_mock.call_count == 6
sleep_mock.assert_any_call(1.875)
sleep_mock.assert_any_call(2.0)
sleep_mock.assert_any_call(4.375)
sleep_mock.assert_any_call(8.5)
sleep_mock.assert_any_call(16.5)
sleep_mock.assert_any_call(32.25)
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_retry_exceeds_max_retries(self, randint_mock, sleep_mock):
status_codes = (
http.client.SERVICE_UNAVAILABLE,
http.client.GATEWAY_TIMEOUT,
http.client.TOO_MANY_REQUESTS,
http.client.INTERNAL_SERVER_ERROR,
http.client.SERVICE_UNAVAILABLE,
http.client.BAD_GATEWAY,
http.client.TOO_MANY_REQUESTS,
)
responses = [_make_response(status_code) for status_code in status_codes]
randint_mock.side_effect = [75 * i for i in range(len(responses))]
def raise_response():
raise common.InvalidResponse(responses.pop(0))
func = mock.Mock(side_effect=raise_response)
max_retries = 6
retry_strategy = common.RetryStrategy(max_retries=max_retries)
try:
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
except common.InvalidResponse as e:
ret_val = e.response
assert ret_val.status_code == status_codes[-1]
assert status_codes[-1] in common.RETRYABLE
assert func.call_count == max_retries + 1
assert func.mock_calls == [mock.call()] * (max_retries + 1)
assert randint_mock.call_count == max_retries + 1
assert randint_mock.mock_calls == [mock.call(0, 1000)] * (max_retries + 1)
assert sleep_mock.call_count == max_retries
wait = 1
multiplier = 2
for i in range(max_retries - 1):
randint = i * 75 / 1000
sleep_mock.assert_any_call(wait + randint)
wait = wait * multiplier
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_retry_zero_max_retries(self, randint_mock, sleep_mock):
status_codes = (
http.client.SERVICE_UNAVAILABLE,
http.client.GATEWAY_TIMEOUT,
http.client.TOO_MANY_REQUESTS,
)
responses = [_make_response(status_code) for status_code in status_codes]
randint_mock.side_effect = [125 * i for i in range(len(status_codes))]
def raise_response():
raise common.InvalidResponse(responses.pop(0))
func = mock.Mock(side_effect=raise_response)
retry_strategy = common.RetryStrategy(max_retries=0)
try:
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
except common.InvalidResponse as e:
ret_val = e.response
assert func.call_count == 1
assert func.mock_calls == [mock.call()] * 1
assert ret_val.status_code == status_codes[0]
assert randint_mock.call_count == 1
assert sleep_mock.call_count == 0
@mock.patch("time.sleep")
@mock.patch("random.randint")
def test_retry_exceeded_reraises_connection_error(self, randint_mock, sleep_mock):
randint_mock.side_effect = [875, 0, 375, 500, 500, 250, 125]
responses = [requests.exceptions.ConnectionError] * 7
func = mock.Mock(side_effect=responses, spec=[])
retry_strategy = common.RetryStrategy(max_cumulative_retry=100.0)
with pytest.raises(requests.exceptions.ConnectionError):
_helpers.wait_and_retry(func, _get_status_code, retry_strategy)
assert func.call_count == 7
assert func.mock_calls == [mock.call()] * 7
assert randint_mock.call_count == 7
assert randint_mock.mock_calls == [mock.call(0, 1000)] * 7
assert sleep_mock.call_count == 6
sleep_mock.assert_any_call(1.875)
sleep_mock.assert_any_call(2.0)
sleep_mock.assert_any_call(4.375)
sleep_mock.assert_any_call(8.5)
sleep_mock.assert_any_call(16.5)
sleep_mock.assert_any_call(32.25)
def _make_response(status_code):
return mock.Mock(status_code=status_code, spec=["status_code"])
def _get_status_code(response):
return response.status_code
def _get_headers(response):
return response.headers
@pytest.mark.parametrize("checksum", ["md5", "crc32c", None])
def test__get_checksum_object(checksum):
checksum_object = _helpers._get_checksum_object(checksum)
checksum_types = {
"md5": type(hashlib.md5()),
"crc32c": type(_helpers._get_crc32c_object()),
None: type(None),
}
assert isinstance(checksum_object, checksum_types[checksum])
def test__get_checksum_object_invalid():
with pytest.raises(ValueError):
_helpers._get_checksum_object("invalid")
@mock.patch("builtins.__import__")
def test__get_crc32_object_wo_google_crc32c_wo_crcmod(mock_import):
mock_import.side_effect = ImportError("testing")
with pytest.raises(ImportError):
_helpers._get_crc32c_object()
expected_calls = [
mock.call("google_crc32c", mock.ANY, None, None, 0),
mock.call("crcmod", mock.ANY, None, None, 0),
]
mock_import.assert_has_calls(expected_calls)
@mock.patch("builtins.__import__")
def test__get_crc32_object_w_google_crc32c(mock_import):
google_crc32c = mock.Mock(spec=["Checksum"])
mock_import.return_value = google_crc32c
found = _helpers._get_crc32c_object()
assert found is google_crc32c.Checksum.return_value
google_crc32c.Checksum.assert_called_once_with()
mock_import.assert_called_once_with("google_crc32c", mock.ANY, None, None, 0)
@mock.patch("builtins.__import__")
def test__get_crc32_object_wo_google_crc32c_w_crcmod(mock_import):
crcmod = mock.Mock(spec=["predefined", "crcmod"])
crcmod.predefined = mock.Mock(spec=["Crc"])
crcmod.crcmod = mock.Mock(spec=["_usingExtension"])
mock_import.side_effect = [ImportError("testing"), crcmod, crcmod.crcmod]
found = _helpers._get_crc32c_object()
assert found is crcmod.predefined.Crc.return_value
crcmod.predefined.Crc.assert_called_once_with("crc-32c")
expected_calls = [
mock.call("google_crc32c", mock.ANY, None, None, 0),
mock.call("crcmod", mock.ANY, None, None, 0),
mock.call("crcmod.crcmod", mock.ANY, {}, ["_usingExtension"], 0),
]
mock_import.assert_has_calls(expected_calls)
@pytest.mark.filterwarnings("ignore::RuntimeWarning")
@mock.patch("builtins.__import__")
def test__is_fast_crcmod_wo_extension_warning(mock_import):
crcmod = mock.Mock(spec=["crcmod"])
crcmod.crcmod = mock.Mock(spec=["_usingExtension"])
crcmod.crcmod._usingExtension = False
mock_import.return_value = crcmod.crcmod
assert not _helpers._is_fast_crcmod()
mock_import.assert_called_once_with(
"crcmod.crcmod",
mock.ANY,
{},
["_usingExtension"],
0,
)
@mock.patch("builtins.__import__")
def test__is_fast_crcmod_w_extension(mock_import):
crcmod = mock.Mock(spec=["crcmod"])
crcmod.crcmod = mock.Mock(spec=["_usingExtension"])
crcmod.crcmod._usingExtension = True
mock_import.return_value = crcmod.crcmod
assert _helpers._is_fast_crcmod()
def test__DoNothingHash():
do_nothing_hash = _helpers._DoNothingHash()
return_value = do_nothing_hash.update(b"some data")
assert return_value is None
class Test__get_expected_checksum(object):
@pytest.mark.parametrize("template", ["crc32c={},md5={}", "crc32c={}, md5={}"])
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
@mock.patch("google.resumable_media._helpers._LOGGER")
def test__w_header_present(self, _LOGGER, template, checksum):
checksums = {"md5": "b2twdXNodGhpc2J1dHRvbg==", "crc32c": "3q2+7w=="}
header_value = template.format(checksums["crc32c"], checksums["md5"])
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_response(headers=headers)
def _get_headers(response):
return response.headers
url = "https://example.com/"
expected_checksum, checksum_obj = _helpers._get_expected_checksum(
response, _get_headers, url, checksum_type=checksum
)
assert expected_checksum == checksums[checksum]
checksum_types = {
"md5": type(hashlib.md5()),
"crc32c": type(_helpers._get_crc32c_object()),
}
assert isinstance(checksum_obj, checksum_types[checksum])
_LOGGER.info.assert_not_called()
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
@mock.patch("google.resumable_media._helpers._LOGGER")
def test__w_header_missing(self, _LOGGER, checksum):
headers = {}
response = _mock_response(headers=headers)
def _get_headers(response):
return response.headers
url = "https://example.com/"
expected_checksum, checksum_obj = _helpers._get_expected_checksum(
response, _get_headers, url, checksum_type=checksum
)
assert expected_checksum is None
assert isinstance(checksum_obj, _helpers._DoNothingHash)
expected_msg = _helpers._MISSING_CHECKSUM.format(
url, checksum_type=checksum.upper()
)
_LOGGER.info.assert_called_once_with(expected_msg)
class Test__parse_checksum_header(object):
CRC32C_CHECKSUM = "3q2+7w=="
MD5_CHECKSUM = "c2l4dGVlbmJ5dGVzbG9uZw=="
def test_empty_value(self):
header_value = None
response = None
md5_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="md5"
)
assert md5_header is None
crc32c_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="crc32c"
)
assert crc32c_header is None
def test_crc32c_only(self):
header_value = "crc32c={}".format(self.CRC32C_CHECKSUM)
response = None
md5_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="md5"
)
assert md5_header is None
crc32c_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="crc32c"
)
assert crc32c_header == self.CRC32C_CHECKSUM
def test_md5_only(self):
header_value = "md5={}".format(self.MD5_CHECKSUM)
response = None
md5_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="md5"
)
assert md5_header == self.MD5_CHECKSUM
crc32c_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="crc32c"
)
assert crc32c_header is None
def test_both_crc32c_and_md5(self):
header_value = "crc32c={},md5={}".format(
self.CRC32C_CHECKSUM, self.MD5_CHECKSUM
)
response = None
md5_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="md5"
)
assert md5_header == self.MD5_CHECKSUM
crc32c_header = _helpers._parse_checksum_header(
header_value, response, checksum_label="crc32c"
)
assert crc32c_header == self.CRC32C_CHECKSUM
def test_md5_multiple_matches(self):
another_checksum = "eW91IGRpZCBXQVQgbm93Pw=="
header_value = "md5={},md5={}".format(self.MD5_CHECKSUM, another_checksum)
response = mock.sentinel.response
with pytest.raises(common.InvalidResponse) as exc_info:
_helpers._parse_checksum_header(
header_value, response, checksum_label="md5"
)
error = exc_info.value
assert error.response is response
assert len(error.args) == 3
assert error.args[1] == header_value
assert error.args[2] == [self.MD5_CHECKSUM, another_checksum]
def _mock_response(headers):
return mock.Mock(
headers=headers,
status_code=200,
spec=["status_code", "headers"],
)
| 35.99865 | 88 | 0.661331 | 3,242 | 26,675 | 5.124614 | 0.083282 | 0.033105 | 0.015168 | 0.018418 | 0.760624 | 0.7277 | 0.681714 | 0.656194 | 0.6329 | 0.608824 | 0 | 0.025268 | 0.240375 | 26,675 | 740 | 89 | 36.047297 | 0.79465 | 0.020544 | 0 | 0.586087 | 0 | 0 | 0.045839 | 0.008923 | 0 | 0 | 0 | 0 | 0.236522 | 1 | 0.092174 | false | 0 | 0.052174 | 0.012174 | 0.170435 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
76d04f97b6032cc3ab2e2693e3984f670a1d75ae | 200 | py | Python | utils/check.py | Dams4K/MyLastDiscordBot | d998e12182bb98db464c6cd2efaa333fca6e8e4f | [
"MIT"
] | null | null | null | utils/check.py | Dams4K/MyLastDiscordBot | d998e12182bb98db464c6cd2efaa333fca6e8e4f | [
"MIT"
] | null | null | null | utils/check.py | Dams4K/MyLastDiscordBot | d998e12182bb98db464c6cd2efaa333fca6e8e4f | [
"MIT"
] | null | null | null | # Fonction pour voir si une chaine de charactère peut être transformé en entier
def can_convert_to_int(msg: str) -> bool:
try:
int(msg)
return True
except:
return False | 28.571429 | 79 | 0.66 | 29 | 200 | 4.448276 | 0.896552 | 0.093023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.285 | 200 | 7 | 80 | 28.571429 | 0.902098 | 0.385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
76d4f9bcb99ab593407d1ba2f8814fd679b2fe75 | 9,724 | py | Python | bindings/python/cntk/layers/higher_order_layers.py | ArpitSisodia/CNTK | 11732625b3d7b5bca1f447c4790e8c8f10de45e1 | [
"RSA-MD"
] | null | null | null | bindings/python/cntk/layers/higher_order_layers.py | ArpitSisodia/CNTK | 11732625b3d7b5bca1f447c4790e8c8f10de45e1 | [
"RSA-MD"
] | null | null | null | bindings/python/cntk/layers/higher_order_layers.py | ArpitSisodia/CNTK | 11732625b3d7b5bca1f447c4790e8c8f10de45e1 | [
"RSA-MD"
] | 1 | 2020-12-24T14:50:54.000Z | 2020-12-24T14:50:54.000Z | # ==============================================================================
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
'''
higher_order_layers -- higher-order functions, like Sequential() and ResNetBlock().
Note that sequential higher-order functions like Recurrence() are in sequence.py.
'''
from ..utils import Record
from .blocks import *
from .blocks import _initializer_for, _get_initial_state_or_default, _INFERRED, _inject_name
from .sequence import * # they are also higher-order functions
from .typing import *
# TODO: should we have a parameter to specify the arity of the input?
# Can it be automatically determined? (yes, unless the first function is a tuple, then we don't know whether to broadcast or not)
def Sequential(layers, name=''):
'''
Sequential(layers, name='')
Layer factory function to create a composite that applies a sequence of layers (or any functions) onto an input.
``Sequential ([F, G, H])(x)`` means the same as ``H(G(F(x)))``.
The list of functions may also include tuples of functions. In that case, each function
in a tuple is applied to the input, and the result is a tuple containing the results of
these function applications. If followed by another function (typ. ``plus`` or ``splice``),
the tuple items form the arguments to that function.
Intermediate values in the chain can be accessed by name by inserting a ``Label(name=...)`` layer.
Note: An equivalent way of writing ``Sequential ([F, G, H])(x)`` is ``F >> G >> H``.
Example:
>>> from cntk.layers import *
>>> # sequence classifier. Maps a one-hot word sequence to a scalar probability value.
>>> # The recurrence is a Fold(), meaning only the final hidden state is produced.
>>> # The Label() layer allows to access the final hidden layer by name.
>>> model = Sequential([Embedding(300), Fold(LSTM(500)), Label('hidden'), Dense(1, activation=sigmoid)])
>>> model.update_signature(Sequence[Tensor[30000]])
>>> model.hidden.shape
(500,)
>>> # simple example that squares an input value
>>> f = Sequential([log, lambda x: 2 * x, exp]) # the second function is a Python lambda
>>> f.update_signature(1)
>>> f([np.array([2])]) # log, times 2, exp is the same as computing the square
array([[ 4.]], dtype=float32)
>>> # using function tuples to implement a bidirectional LSTM
>>> bi_lstm = Sequential([(Recurrence(LSTM(250)), # first tuple entry: forward pass
... Recurrence(LSTM(250), go_backwards=True)), # second: backward pass
... splice]) # splice both on top of each other
>>> # using function tuple to implement a ResNet block
>>> # The function tuple applies all items to the input, and emits a tuple with the results
>>> # that then act as the arguments to the next one.
>>> # Here we say (Convolution(), identity), which generates two arguments to the next function,
>>> # the first being the convolution, the second being the input passed through.
>>> # Following that with plus() implements the ResNet formula.
>>> from cntk.ops import plus, relu
>>> resnet_layer = Sequential([(Convolution((3,3), 64, activation=None), # first tuple entry
... identity), # second tuple entry is a pass-through
... plus, # this sums both
... relu]) # activation applied afterwards
>>> # simple function-tuples example with values
>>> f = Sequential([(lambda x: x * x, identity), splice]) # computes tuple (x^2, x) and splices both values
>>> f.update_signature(1)
>>> f([np.array([2])])
array([[ 4., 2.]], dtype=float32)
Args:
layers (list of :class:`~cntk.ops.functions.Function`, equivalent Python functions, tuples of functions, or lists thereof): the list of functions to apply in sequence.
A tuple aplies each of its items to the input and results in a tuple value.
An item that is a list will be flattened.
Returns:
cntk.ops.functions.Function:
A function that accepts one argument and applies the given ``functions`` one after another.
'''
if not isinstance(layers, list): # to support nested lists, run every item recursively through Sequential()
# TODO: Is this confusing w.r.t. tuple which is parallel and list which is sequential?
return layers
from functools import reduce
layers = [Sequential(layer) for layer in layers] # expand all layers recursively
composed_function = reduce(lambda f, g: f >> g, layers, identity)
return _inject_name(composed_function, name)
def For(what_range, constructor, name=''):
'''
For(what_range, constructor, name='')
Layer factory function to create a composite through a pattern similar to Python's `for` statement.
This layer factory loops over the given range and passes each value to the constructor function.
It is equivalent to
``Sequential([constructor(i) for i in what_range])``.
It is acceptable that ``constructor`` takes no argument.
Example:
>>> from cntk.layers import *
>>> from cntk.ops import relu
>>> # stack of 3 Dense relu layers
>>> model = For(range(3), lambda: Dense(2000, activation=relu))
>>> # version of the above that has no activation for the last layer
>>> model = For(range(3), lambda i: Dense(2000, activation=relu if i < 2 else identity))
>>> # complex example that uses For() inside Sequential()
>>> with default_options(activation=relu, pad=True): # default activation is relu
... model = Sequential([
... For(range(2), lambda : [
... Convolution2D((3,3), 64),
... Convolution2D((3,3), 64),
... MaxPooling((3,3), strides=2)
... ]),
... Label('ndfeat'), # name this specific value
... For(range(2), lambda i: [ # this passes a nested list to Sequential
... Dense([256,128][i]), # layer index i used to index into an array of parameters
... Dropout(0.5)
... ]),
... Label('hidden'),
... Dense(10, activation=None) # activation parameter overrides default (which was set to relu)
... ])
>>> model.update_signature((3,32,32)) # RGB, 32 x 32 pixels
>>> model.ndfeat.shape # shape at top of convo/pooling pyramid
(64, 8, 8)
>>> model.hidden.shape # shape before classifier
(128,)
Args:
what_range (range): a Python range to loop over
constructor (Python function/lambda with 1 or 0 arguments): lambda that constructs a layer
Returns:
cntk.ops.functions.Function:
A function that accepts one argument and applies the layers as constructed by ``constructor`` one after another.
'''
# Python 2.7 support requires us to use getargspec() instead of inspect
from inspect import getargspec
takes_arg = len(getargspec(constructor).args) > 0
# helper to call the layer constructor
def call(i):
if takes_arg:
return constructor(i) # takes an arg: pass it
else:
return constructor() # takes no arg: call without, that's fine too
layers = [call(i) for i in what_range]
sequential = Sequential(layers)
return _inject_name(sequential, name)
# legacy name for For()
def LayerStack(N, constructor):
import warnings
warnings.warn('This will be removed in future versions. Please use '
'For(...) instead', DeprecationWarning)
return For(range(N), constructor)
def SequentialClique(functions, name=''):
'''
SequentialClique(functions, name='')
Layer factory function to create a composite that applies a sequence of or any functions onto an input,
with skip connections between all function. I.e. each function receives a sum of the input and all
prior functions' outputs.
'''
def clique(x):
for f in functions:
out = f(x)
# BUGBUG: this should be a splice(), and it should be along depth.
# Interface to be finalized.
x = x + out
return out
clique = _inject_name(clique, name)
return clique
# TODO: consider potential name clash; users might want to call their functions the same.
def ResNetBlock(f, name=''):
'''
ResNetBlock(f, name='')
Layer factory function to create a composite that adds a skip connection to a function.
This is equivalent to ``Sequential((f, identity), plus)``.
Example:
>>> # a ResNet layer
>>> from cntk.layers import *
>>> from cntk.ops import relu
>>> resnet_layer = Sequential([ResNetBlock(Convolution((3,3), 64, activation=None)), relu])
Args:
f (:class:`~cntk.ops.functions.Function` or equivalent Python function):
the function to add the skip connection to.
Returns:
cntk.ops.functions.Function:
A function that accepts one argument, applies ``f`` to it, and adds the original argument.
'''
def skip(x):
return f(x) + x
skip = _inject_name(skip, name)
return skip
| 44.401826 | 173 | 0.615487 | 1,241 | 9,724 | 4.792909 | 0.288477 | 0.009415 | 0.01345 | 0.020175 | 0.159381 | 0.118023 | 0.094486 | 0.094486 | 0.078682 | 0.057162 | 0 | 0.014173 | 0.267174 | 9,724 | 218 | 174 | 44.605505 | 0.820376 | 0.114665 | 0 | 0 | 0 | 0 | 0.045093 | 0 | 0 | 0 | 0 | 0.013761 | 0 | 0 | null | null | 0 | 0.195122 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
76d5db4a099cdbe808a8e7e7f257cecd49122cf2 | 5,308 | py | Python | openclean/function/similarity/text.py | remram44/openclean-core | 8c09c8302cadbb3bb02c959907f91a3ae343f939 | [
"BSD-3-Clause"
] | 4 | 2021-04-20T09:06:26.000Z | 2021-11-20T20:31:28.000Z | openclean/function/similarity/text.py | remram44/openclean-core | 8c09c8302cadbb3bb02c959907f91a3ae343f939 | [
"BSD-3-Clause"
] | 14 | 2021-01-19T19:23:16.000Z | 2021-04-28T14:31:03.000Z | openclean/function/similarity/text.py | remram44/openclean-core | 8c09c8302cadbb3bb02c959907f91a3ae343f939 | [
"BSD-3-Clause"
] | 5 | 2021-08-24T11:57:21.000Z | 2022-03-17T04:39:04.000Z | # This file is part of the Data Cleaning Library (openclean).
#
# Copyright (C) 2018-2021 New York University.
#
# openclean is released under the Revised BSD License. See file LICENSE for
# full license details.
"""Collection of string similarity functions."""
from typing import Callable
import jellyfish
from openclean.function.similarity.base import SimilarityFunction
# -- Edit distance string similarity functions --------------------------------
class NormalizedEditDistance(SimilarityFunction):
"""String similarity function that is based on functions that compute an
edit distance between a pair of strings.
The similarity for a pair of strings based on edit distance is the defined
as (1 - normalized distance).
"""
def __init__(self, func: Callable):
"""Initialize the function that computes the edit distance between a
pair of strings.
Parameters
----------
func: callable
Functon that expects two strings are arguments.
"""
self.func = func
def sim(self, val_1: str, val_2: str) -> float:
"""Calculates the edit distance between two strings and returns the
similarity between them as (1 - normalized distance). The normalized
distance is the edit distance divided by the length of the longer of
the two strings.
Parameters
----------
val_1: string
Value 1
val_2: string
Value 2
Returns
-------
float
"""
edit_distance = self.func(val_1, val_2)
return 1 - (float(edit_distance) / max(len(val_1), len(val_2)))
class DamerauLevenshteinDistance(NormalizedEditDistance):
"""String similarity function that is based on the Damerau-Levenshtein
distance between two strings.
"""
def __init__(self):
"""Initialize the edit distance function in the super class."""
super(DamerauLevenshteinDistance, self).__init__(
func=jellyfish.damerau_levenshtein_distance
)
class HammingDistance(NormalizedEditDistance):
"""String similarity function that is based on the Hamming distance
between two strings.
"""
def __init__(self):
"""Initialize the edit distance function in the super class."""
super(HammingDistance, self).__init__(func=jellyfish.hamming_distance)
class LevenshteinDistance(NormalizedEditDistance):
"""String similarity function that is based on the Levenshtein distance
between two strings.
"""
def __init__(self):
"""Initialize the edit distance function in the super class."""
super(LevenshteinDistance, self).__init__(func=jellyfish.levenshtein_distance)
# -- String similarity functions ----------------------------------------------
class StringSimilarityFunction(SimilarityFunction):
"""Wrapper for existing string similarity functions that compute the
similarity between a pair of strings as a float in the interval [0-1].
"""
def __init__(self, func: Callable):
"""Initialize the function that computes similatiry between a
pair of strings.
Parameters
----------
func: callable
Functon that expects two strings are arguments.
"""
self.func = func
def sim(self, val_1: str, val_2: str) -> float:
"""Calculate the similarity beween the given pair of strings.
Parameters
----------
val_1: string
Value 1
val_2: string
Value 2
Returns
-------
float
"""
return self.func(val_1, val_2)
class JaroSimilarity(StringSimilarityFunction):
"""String similarity function that is based on the Jaro similarity
between two strings.
"""
def __init__(self):
"""Initialize the edit distance function in the super class."""
super(JaroSimilarity, self).__init__(func=jellyfish.jaro_similarity)
class JaroWinklerSimilarity(StringSimilarityFunction):
"""String similarity function that is based on the Jaro-Winkler distance
between two strings.
"""
def __init__(self):
"""Initialize the edit distance function in the super class."""
super(JaroWinklerSimilarity, self).__init__(
func=jellyfish.jaro_winkler_similarity
)
# -- Match Rating Approach ----------------------------------------------------
class MatchRatingComparison(SimilarityFunction):
"""String similarity function that is based on the match rating algorithm
that returns True if two strings are considered equivalent and False
otherwise.
To return a value in the interval of [0-1] a match rating result of True is
translated to 1 and the result False is translated to 0.
"""
def sim(self, val_1: str, val_2: str) -> float:
"""Use Match rating approach to compare the given strings.
Returns 1 if the match rating algorithm coniders the given strings as
equivalent and 0 otherwise.
Parameters
----------
val_1: string
Value 1
val_2: string
Value 2
Returns
-------
float
"""
return 1 if jellyfish.match_rating_comparison(val_1, val_2) else 0
| 31.408284 | 86 | 0.641861 | 602 | 5,308 | 5.528239 | 0.20598 | 0.046875 | 0.036058 | 0.058894 | 0.509315 | 0.465144 | 0.455529 | 0.442007 | 0.408053 | 0.352163 | 0 | 0.011923 | 0.257347 | 5,308 | 168 | 87 | 31.595238 | 0.832319 | 0.550867 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.277778 | false | 0 | 0.083333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
76e1105588c2c985a2c9a76da9dc537ac08efc4b | 1,204 | py | Python | optimus_manager/envs.py | matejkrajcovic/optimus-manager | 8b281e3e17d1ba1b565343dfb255b5404eac909a | [
"MIT"
] | null | null | null | optimus_manager/envs.py | matejkrajcovic/optimus-manager | 8b281e3e17d1ba1b565343dfb255b5404eac909a | [
"MIT"
] | null | null | null | optimus_manager/envs.py | matejkrajcovic/optimus-manager | 8b281e3e17d1ba1b565343dfb255b5404eac909a | [
"MIT"
] | null | null | null | VERSION = "1.1"
SOCKET_PATH = "/tmp/optimus-manager"
SOCKET_TIMEOUT = 1.0
STARTUP_MODE_VAR_PATH = "/var/lib/optimus-manager/startup_mode"
REQUESTED_MODE_VAR_PATH = "/var/lib/optimus-manager/requested_mode"
DPI_VAR_PATH = "/var/lib/optimus-manager/dpi"
TEMP_CONFIG_PATH_VAR_PATH = "/var/lib/optimus-manager/temp_conf_path"
DEFAULT_STARTUP_MODE = "intel"
SYSTEM_CONFIGS_PATH = "/etc/optimus-manager/configs/"
XORG_CONF_PATH = "/etc/X11/xorg.conf.d/10-optimus-manager.conf"
DEFAULT_CONFIG_PATH = "/usr/share/optimus-manager.conf"
USER_CONFIG_PATH = "/etc/optimus-manager/optimus-manager.conf"
USER_CONFIG_COPY_PATH = "/var/lib/optimus-manager/config_copy.conf"
EXTRA_XORG_OPTIONS_INTEL_PATH = "/etc/optimus-manager/xorg-intel.conf"
EXTRA_XORG_OPTIONS_NVIDIA_PATH = "/etc/optimus-manager/xorg-nvidia.conf"
XSETUP_SCRIPT_INTEL = "/etc/optimus-manager/xsetup-intel.sh"
XSETUP_SCRIPT_NVIDIA = "/etc/optimus-manager/xsetup-nvidia.sh"
LOG_DIR_PATH = "/var/log/optimus-manager/"
BOOT_SETUP_LOGFILE_NAME = "boot_setup.log"
PRIME_SETUP_LOGFILE_NAME = "prime_setup.log"
GPU_SETUP_LOGFILE_NAME = "gpu_setup.log"
LOGGING_SEPARATOR_SUFFIX = " ==================== "
LOG_MAX_SIZE = 20000
LOG_CROPPED_SIZE = 10000
| 36.484848 | 72 | 0.787375 | 182 | 1,204 | 4.857143 | 0.291209 | 0.253394 | 0.115385 | 0.096154 | 0.278281 | 0.131222 | 0.070136 | 0 | 0 | 0 | 0 | 0.016043 | 0.068106 | 1,204 | 32 | 73 | 37.625 | 0.771836 | 0 | 0 | 0 | 0 | 0 | 0.491694 | 0.415282 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
76e61e71b23dacc311211b48febc9dbca6657c22 | 2,499 | py | Python | Dangerous/Golismero/tools/sqlmap/plugins/dbms/sybase/connector.py | JeyZeta/Dangerous- | 824ea6b571eda98bb855f176361e9b35dfda578e | [
"MIT"
] | null | null | null | Dangerous/Golismero/tools/sqlmap/plugins/dbms/sybase/connector.py | JeyZeta/Dangerous- | 824ea6b571eda98bb855f176361e9b35dfda578e | [
"MIT"
] | null | null | null | Dangerous/Golismero/tools/sqlmap/plugins/dbms/sybase/connector.py | JeyZeta/Dangerous- | 824ea6b571eda98bb855f176361e9b35dfda578e | [
"MIT"
] | 1 | 2018-07-04T18:35:16.000Z | 2018-07-04T18:35:16.000Z | #!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
try:
import _mssql
import pymssql
except ImportError:
pass
import logging
from lib.core.convert import utf8encode
from lib.core.data import conf
from lib.core.data import logger
from lib.core.exception import SqlmapConnectionException
from plugins.generic.connector import Connector as GenericConnector
class Connector(GenericConnector):
"""
Homepage: http://pymssql.sourceforge.net/
User guide: http://pymssql.sourceforge.net/examples_pymssql.php
API: http://pymssql.sourceforge.net/ref_pymssql.php
Debian package: python-pymssql
License: LGPL
Possible connectors: http://wiki.python.org/moin/SQL%20Server
Important note: pymssql library on your system MUST be version 1.0.2
to work, get it from http://sourceforge.net/projects/pymssql/files/pymssql/1.0.2/
"""
def __init__(self):
GenericConnector.__init__(self)
def connect(self):
self.initConnection()
try:
self.connector = pymssql.connect(host="%s:%d" % (self.hostname, self.port), user=self.user, password=self.password, database=self.db, login_timeout=conf.timeout, timeout=conf.timeout)
except pymssql.OperationalError, msg:
raise SqlmapConnectionException(msg)
self.initCursor()
self.printConnected()
def fetchall(self):
try:
return self.cursor.fetchall()
except (pymssql.ProgrammingError, pymssql.OperationalError, _mssql.MssqlDatabaseException), msg:
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " "))
return None
def execute(self, query):
retVal = False
try:
self.cursor.execute(utf8encode(query))
retVal = True
except (pymssql.OperationalError, pymssql.ProgrammingError), msg:
logger.log(logging.WARN if conf.dbmsHandler else logging.DEBUG, "(remote) %s" % str(msg).replace("\n", " "))
except pymssql.InternalError, msg:
raise SqlmapConnectionException(msg)
return retVal
def select(self, query):
retVal = None
if self.execute(query):
retVal = self.fetchall()
try:
self.connector.commit()
except pymssql.OperationalError:
pass
return retVal
| 30.47561 | 195 | 0.663465 | 283 | 2,499 | 5.812721 | 0.438163 | 0.039514 | 0.026748 | 0.045593 | 0.119149 | 0.093617 | 0.093617 | 0.093617 | 0.093617 | 0.093617 | 0 | 0.009375 | 0.231693 | 2,499 | 81 | 196 | 30.851852 | 0.847396 | 0.008003 | 0 | 0.276596 | 0 | 0 | 0.017396 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.06383 | 0.191489 | null | null | 0.021277 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
76f4d5c3043090b1da80e3e32b5d3cc6ffb56c9d | 3,310 | py | Python | backend/src/webhooks/api.py | ggcarrots/HighFive | f8610c30240fd80cf45a4147d4e6237aa9d3f82c | [
"MIT"
] | 1 | 2019-06-08T09:15:18.000Z | 2019-06-08T09:15:18.000Z | backend/src/webhooks/api.py | ggcarrots/HighFive | f8610c30240fd80cf45a4147d4e6237aa9d3f82c | [
"MIT"
] | 13 | 2020-09-04T23:28:00.000Z | 2022-03-02T04:18:43.000Z | backend/src/webhooks/api.py | ggcarrots/HighFive | f8610c30240fd80cf45a4147d4e6237aa9d3f82c | [
"MIT"
] | null | null | null | # import itertools
# from django.db import transaction
# from django.http import HttpResponse
# from rest_framework.exceptions import ValidationError
# from rest_framework.response import Response
# from rest_framework.viewsets import GenericViewSet
# from rest_framework.viewsets import ModelViewSet
#
# from utils.serializers import EmptySerializer
# from webhooks.dialogflow import talk_to_assistant
# from webhooks.messenger import send_message
# from webhooks.models import FacebookPage
# from webhooks.models import Message
# from webhooks.models import Topic
# from webhooks.serializers import TopicSerializer
#
#
# class FacebookWebHookAPI(GenericViewSet):
# serializer_class = EmptySerializer
#
# def get_queryset(self):
# """
# Stub get queryset. Just to make router work
# """
# return FacebookPage.objects.none()
#
# def list(self, request, *args, **kwargs):
# """
# Verification Endpoint.
# """
# challenge = request.GET.get("hub.challenge")
# verify_token = request.GET.get("hub.verify_token")
#
# page: FacebookPage = FacebookPage.objects.filter(verify_token=verify_token).first()
# if not page:
# raise ValidationError("Invliad token. Page not found.")
#
# if not page.verify_token.strip() == verify_token.strip():
# raise ValidationError("Page for given token not found.")
#
# page.set_as_verified()
# return HttpResponse(challenge.encode())
#
# def create(self, request, *args, **kwargs):
# entry = request.data['entry']
# all_messaging = [obj['messaging'] for obj in entry]
# messages = itertools.chain(*all_messaging)
#
# for msg in messages:
# self.handle_message(msg)
#
# return Response(status=204)
#
# @transaction.atomic()
# def handle_message(self, msg):
# content = msg['message']
# sender_id = msg['sender']['id']
# if not content:
# return
#
# if content.get('is_echo'):
# return
#
# if content.get('attachments'):
# ...
#
# # FB can send message multiple times, mid can be used to check duplicates
# # message['mid']
#
# text = content.get('text')
#
# topic: Topic
# topic, _ = Topic.objects.get_or_create(initiator_id=sender_id)
#
# Message.objects.create(
# topic=topic,
# text=text,
# is_author_consultant=False,
# is_author_bot=False,
# is_author_customer=True,
# )
#
# if not topic.dialogflow_sessions_id:
# topic.set_dialogflow_session_id()
#
# bot_response = talk_to_assistant(text, topic.dialogflow_sessions_id)
#
# Message.objects.create(
# topic=topic,
# text=bot_response,
# is_author_consultant=False,
# is_author_bot=True,
# is_author_customer=False,
# )
#
# # TODO
# page: FacebookPage = FacebookPage.objects.first()
# send_message(page.page_access_token, sender_id, bot_response)
#
# print('Got message:', msg)
#
#
# class TopicAPI(ModelViewSet):
# queryset = Topic.objects.all()
# serializer_class = TopicSerializer
| 31.226415 | 93 | 0.623263 | 348 | 3,310 | 5.764368 | 0.333333 | 0.035892 | 0.033898 | 0.035892 | 0.131605 | 0.069791 | 0.069791 | 0 | 0 | 0 | 0 | 0.001235 | 0.266163 | 3,310 | 105 | 94 | 31.52381 | 0.824619 | 0.935347 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0.009524 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
76fc66fcfbe1e56ee89003dc2ebae5e8b0e2d915 | 229 | py | Python | python/simpleaudio_test.py | aapris/ReedWall | b2c3d2d3a390fdc463aaeeb3ab70433f4f2b0fcb | [
"MIT"
] | null | null | null | python/simpleaudio_test.py | aapris/ReedWall | b2c3d2d3a390fdc463aaeeb3ab70433f4f2b0fcb | [
"MIT"
] | null | null | null | python/simpleaudio_test.py | aapris/ReedWall | b2c3d2d3a390fdc463aaeeb3ab70433f4f2b0fcb | [
"MIT"
] | null | null | null | import simpleaudio as sa
import time
import sys
wave_obj = sa.WaveObject.from_wave_file(sys.argv[1])
#for i in range(1000):
#play_obj = wave_obj.play()
#time.sleep(0.001)
play_obj = wave_obj.play()
play_obj.wait_done()
| 19.083333 | 52 | 0.729258 | 41 | 229 | 3.853659 | 0.585366 | 0.132911 | 0.139241 | 0.177215 | 0.227848 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045918 | 0.144105 | 229 | 11 | 53 | 20.818182 | 0.760204 | 0.279476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
0a0607a0550f50feffb09736a2d865719c2229cd | 4,509 | py | Python | python/ip_generator.py | lduf/nas-netwk-gen | d0e0990d90165a49a79fb09d383e634211368f38 | [
"MIT"
] | null | null | null | python/ip_generator.py | lduf/nas-netwk-gen | d0e0990d90165a49a79fb09d383e634211368f38 | [
"MIT"
] | 1 | 2022-01-26T11:57:45.000Z | 2022-01-26T11:57:45.000Z | python/ip_generator.py | lduf/nas-netwk-gen | d0e0990d90165a49a79fb09d383e634211368f38 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import json
import sys
import argparse
#import tabulate
"""
This file is used to generate ip addresses for a given router.
"""
parser = argparse.ArgumentParser(description='Run ip generation algorithm')
parser.add_argument('-f', '--topology_file', type=str, help='give the topology file name (default : topology.json)', metavar='', default="topology.json")
args = parser.parse_args()
def read_data(filename):
with open(filename) as json_file:
data = json.load(json_file)
return data
def write_data(filename, data):
with open(filename, 'w') as f:
json.dump(data, f, indent=4)
def add_parameters_interface(data_json):
for router in data_json:
for interface in data_json[router]["interfaces"]:
if "parameters" not in data_json[router]["interfaces"][interface]:
data_json[router]["interfaces"][interface]["parameters"] = {}
if "protocols" not in data_json[router]["interfaces"][interface]:
data_json[router]["interfaces"][interface]["protocols"] = ["ip_address"]
# ajouter l'ip_address
data_json[router]["interfaces"][interface]["parameters"]["ip_address"] = data_json[router]["interfaces"][interface]["ip"]["ip_address"]
# ajouter le mask
data_json[router]["interfaces"][interface]["parameters"]["mask"] = data_json[router]["interfaces"][interface]["ip"]["mask"]
# ajouter l'interface name
data_json[router]["interfaces"][interface]["parameters"]["interface_name"] = interface
if "ip_address" not in data_json[router]["interfaces"][interface]["protocols"]:
data_json[router]["interfaces"][interface]["protocols"].insert(0, "ip_address")
return data_json
def generate_ip_topology(topology_file):
ip_base = "10.0.{}.{}"
netmask = "255.255.255.0"
loopback_netmask = "255.255.255.255"
subdomain = 1
data_json = read_data(topology_file)
# pour chaque routeur, parcourir ses interfaces et y associer une adresse IP
for router in data_json.keys():
# récupérer le numéro de router
num_router_act = int(router[1:])
# pour chaque interface
for interface in data_json[router]["interfaces"]:
if "neighbor" in data_json[router]["interfaces"][interface]:
# récupérer les informations sur le voisin
router_neighbor = data_json[router]["interfaces"][interface]["neighbor"]
router_neighbor_interface = router_neighbor["interface"]
router_neighbor_name = router_neighbor["name"]
num_router_neighbor = int(router_neighbor["name"][1:])
if len(data_json[router]["interfaces"][interface]["ip"]) == 0:
# configurer ip_address et mask du routeur actuel
data_json[router]["interfaces"][interface]["ip"]["ip_address"] = ip_base.format(subdomain, num_router_act)
data_json[router]["interfaces"][interface]["ip"]["mask"] = netmask
# configurer ip_address et mask du routeur voisin
data_json[router_neighbor_name]["interfaces"][router_neighbor_interface]["ip"]["ip_address"] = ip_base.format(subdomain, num_router_neighbor)
data_json[router_neighbor_name]["interfaces"][router_neighbor_interface]["ip"]["mask"] = netmask
# incrémenter le numéro de subdomain
subdomain += 1
if "Loopback0" not in data_json[router]["interfaces"]:
# ajout de la loopback
data_json[router]["interfaces"]["Loopback0"] = {}
#data_json[router]["interfaces"]["Loopback0"]["parameters"] = {}
data_json[router]["interfaces"]["Loopback0"]["ip"] = {}
data_json[router]["interfaces"]["Loopback0"]["ip"]["ip_address"] = "{0}.{0}.{0}.{0}".format(num_router_act)
data_json[router]["interfaces"]["Loopback0"]["ip"]["mask"] = loopback_netmask
data_json = add_parameters_interface(data_json)
write_data(topology_file, data_json)
return data_json
if __name__ == '__main__':
# Read the command requirements from the command.json file
filename = args.topology_file
# topology = matrix_topology(read_data(filename))
# Create the API to generate the GNS3 configurations command
# print(get_ip_topology(filename))
data_json = generate_ip_topology(filename)
| 45.545455 | 161 | 0.641827 | 522 | 4,509 | 5.329502 | 0.224138 | 0.106398 | 0.130841 | 0.207045 | 0.506111 | 0.405823 | 0.278577 | 0.180086 | 0.126887 | 0.126887 | 0 | 0.012303 | 0.224884 | 4,509 | 99 | 162 | 45.545455 | 0.783691 | 0.149257 | 0 | 0.070175 | 1 | 0 | 0.192051 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.070175 | false | 0 | 0.052632 | 0 | 0.175439 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a068ff42cdfed9bb423acf1d1da8ad3910548c4 | 198 | py | Python | desafios py/desafio-014.py | caiovfelipe/Curso_PYTHON | c9fd092bfd8857478e94e7cad99ccb0658af4e6a | [
"MIT"
] | null | null | null | desafios py/desafio-014.py | caiovfelipe/Curso_PYTHON | c9fd092bfd8857478e94e7cad99ccb0658af4e6a | [
"MIT"
] | null | null | null | desafios py/desafio-014.py | caiovfelipe/Curso_PYTHON | c9fd092bfd8857478e94e7cad99ccb0658af4e6a | [
"MIT"
] | null | null | null | c = float(input('A temperatura em C°: '))
#f = ((9*c)/5)+32
f = 9*c/5+32
#Não precisamos usar os parenteses por causa da ordem de presedencia!!
print('A temperatura em {} °C é {} °F '.format(c, f))
| 33 | 70 | 0.631313 | 41 | 198 | 3.121951 | 0.609756 | 0.1875 | 0.21875 | 0.0625 | 0.09375 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04878 | 0.171717 | 198 | 5 | 71 | 39.6 | 0.713415 | 0.429293 | 0 | 0 | 0 | 0 | 0.468468 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a1054e2dd24d22718249c5e811cb240cf18d4a1 | 4,826 | py | Python | pili/auth/forms.py | pilosus/pili | 8eb51e79420b7a2e4148f3b819e787cf6711e8cd | [
"MIT"
] | 2 | 2019-12-22T13:05:08.000Z | 2020-02-02T13:05:31.000Z | pili/auth/forms.py | pilosus/pili | 8eb51e79420b7a2e4148f3b819e787cf6711e8cd | [
"MIT"
] | 71 | 2016-10-31T15:41:10.000Z | 2022-03-21T14:26:22.000Z | pili/auth/forms.py | pilosus/pili | 8eb51e79420b7a2e4148f3b819e787cf6711e8cd | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import (
BooleanField,
PasswordField,
SelectField,
StringField,
SubmitField,
ValidationError,
)
from wtforms.validators import DataRequired, Email, EqualTo, Length, Regexp
from pili.jinja_filters import permissions2str
from pili.models import Role, User
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64), Email()])
username = StringField(
'Username',
validators=[
DataRequired(),
Length(1, 64),
Regexp(
'^[A-Za-z][A-Za-z0-9_.]*$',
0,
'Usernames must have only letters, ' 'numbers, dots or underscores',
),
],
)
password = PasswordField(
'Password',
validators=[
DataRequired(),
EqualTo('password2', message='Passwords must match.'),
],
)
password2 = PasswordField('Confirm password', validators=[DataRequired()])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class ChangePasswordForm(FlaskForm):
old_password = PasswordField('Old password', validators=[DataRequired()])
password = PasswordField(
'New password',
validators=[
DataRequired(),
EqualTo('password2', message='Passwords must match'),
],
)
password2 = PasswordField('Passwords must match', validators=[DataRequired()])
submit = SubmitField('Update Password')
class PasswordResetRequestForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64), Email()])
submit = SubmitField('Reset Password')
class PasswordResetForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64), Email()])
password = PasswordField(
'New password',
validators=[
DataRequired(),
EqualTo('password2', message='Passwords must match'),
],
)
password2 = PasswordField('Confirm password', validators=[DataRequired()])
submit = SubmitField('Reset Password')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is None:
raise ValidationError('Unknown email address.')
class InviteRequestForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Length(1, 64), Email()])
role = SelectField('Role', coerce=int, default=1)
submit = SubmitField('Invite')
def __init__(self, *args, **kwargs):
super(InviteRequestForm, self).__init__(*args, **kwargs)
self.role.choices = [
(role.id, "{0} [{1}]".format(role.name, permissions2str(role.permissions)))
for role in Role.query.order_by(Role.name).all()
if role.name != 'Administrator'
]
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
class InviteAcceptForm(FlaskForm):
username = StringField(
'Username',
validators=[
DataRequired(),
Length(1, 64),
Regexp(
'^[A-Za-z][A-Za-z0-9_.]*$',
0,
'Usernames must have only letters, ' 'numbers, dots or underscores',
),
],
)
password = PasswordField(
'Password',
validators=[
DataRequired(),
EqualTo('password2', message='Passwords must match.'),
],
)
password2 = PasswordField('Confirm password', validators=[DataRequired()])
submit = SubmitField('Register')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class ChangeEmailForm(FlaskForm):
email = StringField(
'New Email', validators=[DataRequired(), Length(1, 64), Email()]
)
password = PasswordField('Password', validators=[DataRequired()])
submit = SubmitField('Update Email Address')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
| 32.829932 | 87 | 0.624119 | 454 | 4,826 | 6.57489 | 0.231278 | 0.140034 | 0.100503 | 0.077722 | 0.683752 | 0.660972 | 0.660972 | 0.660972 | 0.660972 | 0.660972 | 0 | 0.011816 | 0.245959 | 4,826 | 146 | 88 | 33.054795 | 0.808464 | 0 | 0 | 0.622951 | 0 | 0 | 0.156444 | 0.009946 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057377 | false | 0.213115 | 0.040984 | 0 | 0.401639 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
0a15019c5ea419df9c4e20afa909bb9353d88414 | 1,032 | py | Python | secret_parametrizer/migrations/0001_initial.py | DiogoBerti/secret_parametrizer | 04c728ad20d1251244331a105e78bc31bde9db57 | [
"MIT"
] | null | null | null | secret_parametrizer/migrations/0001_initial.py | DiogoBerti/secret_parametrizer | 04c728ad20d1251244331a105e78bc31bde9db57 | [
"MIT"
] | null | null | null | secret_parametrizer/migrations/0001_initial.py | DiogoBerti/secret_parametrizer | 04c728ad20d1251244331a105e78bc31bde9db57 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.14 on 2020-08-01 02:05
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SecretParametrizer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=64, null=True, verbose_name='Name')),
('code', models.CharField(blank=True, max_length=64, null=True, verbose_name='Code')),
('description', models.TextField(blank=True, null=True, verbose_name='Description')),
('url_to_call', models.TextField(blank=True, null=True, verbose_name='URL')),
('token', models.TextField(blank=True, null=True, verbose_name='TOKEN')),
('key', models.CharField(blank=True, max_length=64, null=True, verbose_name='Key')),
],
),
]
| 38.222222 | 114 | 0.608527 | 116 | 1,032 | 5.293103 | 0.413793 | 0.125407 | 0.14658 | 0.185668 | 0.473941 | 0.473941 | 0.473941 | 0.473941 | 0.263844 | 0.263844 | 0 | 0.028241 | 0.245155 | 1,032 | 26 | 115 | 39.692308 | 0.759949 | 0.044574 | 0 | 0 | 1 | 0 | 0.091463 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.052632 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a19d7994cc20a47668d525d9c3c676d9dd5484a | 971 | py | Python | src/opera/parser/tosca/v_1_3/data_type.py | abitrolly/xopera-opera | 49694282db95bb425b988117b90b629f898803ec | [
"Apache-2.0"
] | 31 | 2019-02-12T06:52:24.000Z | 2022-03-23T06:48:10.000Z | src/opera/parser/tosca/v_1_3/data_type.py | abitrolly/xopera-opera | 49694282db95bb425b988117b90b629f898803ec | [
"Apache-2.0"
] | 190 | 2019-05-09T09:12:38.000Z | 2022-03-31T06:21:51.000Z | src/opera/parser/tosca/v_1_3/data_type.py | abitrolly/xopera-opera | 49694282db95bb425b988117b90b629f898803ec | [
"Apache-2.0"
] | 19 | 2019-05-09T13:36:22.000Z | 2022-01-25T21:27:50.000Z | from opera.parser.yaml.node import Node
from .constraint_clause import ConstraintClause
from .property_definition import PropertyDefinition
from ..entity import TypeEntity
from ..list import List
from ..map import Map
from ..reference import DataTypeReference
class DataType(TypeEntity):
REFERENCE = DataTypeReference("data_types")
ATTRS = dict(
constraints=List(ConstraintClause),
properties=Map(PropertyDefinition),
)
@classmethod
def normalize(cls, yaml_node):
# Let the validator handle non-dict case
if not isinstance(yaml_node.value, dict):
return yaml_node
# Make sure we have derived_from key
for k in yaml_node.value:
if k.value == "derived_from":
return yaml_node
# Create default derived_from spec if missing
data = {Node("derived_from"): Node("None")}
data.update(yaml_node.value)
return Node(data, yaml_node.loc)
| 30.34375 | 53 | 0.683831 | 116 | 971 | 5.603448 | 0.491379 | 0.098462 | 0.06 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.239959 | 971 | 31 | 54 | 31.322581 | 0.880759 | 0.120494 | 0 | 0.086957 | 0 | 0 | 0.044706 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.043478 | false | 0 | 0.304348 | 0 | 0.608696 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
0a1c4bd85dbd1346eeae5f2b5663d17f88216ac3 | 490 | py | Python | semantic-representation/project/errors/handlers.py | SOFIE-project/SMAUG-Marketplace | 404b6caa7c5ea58c27c20d716dffa60904fb7f46 | [
"Apache-2.0"
] | 1 | 2021-03-29T15:11:46.000Z | 2021-03-29T15:11:46.000Z | project/errors/handlers.py | SOFIE-project/Semantic-Representation | 3ec0ddb8537e8c58ca3d1af05af3203fa17d690d | [
"Apache-2.0"
] | null | null | null | project/errors/handlers.py | SOFIE-project/Semantic-Representation | 3ec0ddb8537e8c58ca3d1af05af3203fa17d690d | [
"Apache-2.0"
] | 1 | 2020-02-21T18:06:11.000Z | 2020-02-21T18:06:11.000Z |
from flask import request
from project import db
from project.errors import bp
from project.api.errors import error_response as api_error_response
def wants_json_response():
return request.accept_mimetypes['application/json'] >= \
request.accept_mimetypes['text/html']
@bp.app_errorhandler(404)
def not_found_error(error):
return api_error_response(404)
@bp.app_errorhandler(500)
def internal_error(error):
db.session.rollback()
return api_error_response(500) | 23.333333 | 67 | 0.777551 | 69 | 490 | 5.289855 | 0.434783 | 0.142466 | 0.131507 | 0.120548 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028302 | 0.134694 | 490 | 21 | 68 | 23.333333 | 0.832547 | 0 | 0 | 0 | 0 | 0 | 0.05102 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.214286 | false | 0 | 0.285714 | 0.142857 | 0.714286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
0a1eca5969153ccce302baa0630abff11615177a | 2,231 | py | Python | fhirclient/r4models/medicinalproductcontraindication_tests.py | cspears-mitre/CapStatement | 2390566ed75d420e0615e3a0aacb77e8c030fdcc | [
"Apache-2.0"
] | 1 | 2021-12-24T11:14:38.000Z | 2021-12-24T11:14:38.000Z | fhirclient/r4models/medicinalproductcontraindication_tests.py | cspears-mitre/CapStatement | 2390566ed75d420e0615e3a0aacb77e8c030fdcc | [
"Apache-2.0"
] | null | null | null | fhirclient/r4models/medicinalproductcontraindication_tests.py | cspears-mitre/CapStatement | 2390566ed75d420e0615e3a0aacb77e8c030fdcc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.6.0-bd605d07 on 2018-12-20.
# 2018, SMART Health IT.
import os
import io
import unittest
import json
from . import medicinalproductcontraindication
from .fhirdate import FHIRDate
class MedicinalProductContraindicationTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("MedicinalProductContraindication", js["resourceType"])
return medicinalproductcontraindication.MedicinalProductContraindication(js)
def testMedicinalProductContraindication1(self):
inst = self.instantiate_from("medicinalproductcontraindication-example.json")
self.assertIsNotNone(inst, "Must have instantiated a MedicinalProductContraindication instance")
self.implMedicinalProductContraindication1(inst)
js = inst.as_json()
self.assertEqual("MedicinalProductContraindication", js["resourceType"])
inst2 = medicinalproductcontraindication.MedicinalProductContraindication(js)
self.implMedicinalProductContraindication1(inst2)
def implMedicinalProductContraindication1(self, inst):
self.assertEqual(inst.comorbidity[0].coding[0].code, "Hepaticdisease")
self.assertEqual(inst.comorbidity[0].coding[0].system, "http://ema.europa.eu/example/comorbidity")
self.assertEqual(inst.disease.coding[0].code, "Coagulopathiesandbleedingdiatheses(exclthrombocytopenic)")
self.assertEqual(inst.disease.coding[0].system, "http://ema.europa.eu/example/contraindicationsasdisease-symptom-procedure")
self.assertEqual(inst.disease.text, "Hepatic disease associated with coagulopathy and clinically relevant bleeding risk")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://hl7.org/fhir/v3/ActReason")
self.assertEqual(inst.text.status, "generated")
| 48.5 | 132 | 0.735545 | 227 | 2,231 | 7.207048 | 0.431718 | 0.110024 | 0.116137 | 0.047677 | 0.24511 | 0.170538 | 0.084963 | 0.042787 | 0 | 0 | 0 | 0.020602 | 0.151502 | 2,231 | 45 | 133 | 49.577778 | 0.843634 | 0.052443 | 0 | 0.0625 | 1 | 0 | 0.265655 | 0.103416 | 0 | 0 | 0 | 0 | 0.40625 | 1 | 0.09375 | false | 0 | 0.1875 | 0 | 0.34375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a2b41443fc62e0899188c81b7abbcfd2f3f8dfa | 39,255 | py | Python | sdk/python/lib/pulumi/runtime/proto/resource_pb2.py | appleboy/pulumi | 249140242ebbe241b0869421f3700e75ae28402b | [
"Apache-2.0"
] | null | null | null | sdk/python/lib/pulumi/runtime/proto/resource_pb2.py | appleboy/pulumi | 249140242ebbe241b0869421f3700e75ae28402b | [
"Apache-2.0"
] | null | null | null | sdk/python/lib/pulumi/runtime/proto/resource_pb2.py | appleboy/pulumi | 249140242ebbe241b0869421f3700e75ae28402b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: resource.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from . import provider_pb2 as provider__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='resource.proto',
package='pulumirpc',
syntax='proto3',
serialized_options=None,
serialized_pb=b'\n\x0eresource.proto\x12\tpulumirpc\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x0eprovider.proto\"$\n\x16SupportsFeatureRequest\x12\n\n\x02id\x18\x01 \x01(\t\"-\n\x17SupportsFeatureResponse\x12\x12\n\nhasSupport\x18\x01 \x01(\x08\"\xfc\x01\n\x13ReadResourceRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0e\n\x06parent\x18\x04 \x01(\t\x12+\n\nproperties\x18\x05 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x14\n\x0c\x64\x65pendencies\x18\x06 \x03(\t\x12\x10\n\x08provider\x18\x07 \x01(\t\x12\x0f\n\x07version\x18\x08 \x01(\t\x12\x15\n\racceptSecrets\x18\t \x01(\x08\x12\x1f\n\x17\x61\x64\x64itionalSecretOutputs\x18\n \x03(\t\x12\x0f\n\x07\x61liases\x18\x0b \x03(\t\"P\n\x14ReadResourceResponse\x12\x0b\n\x03urn\x18\x01 \x01(\t\x12+\n\nproperties\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct\"\xaf\x06\n\x17RegisterResourceRequest\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06parent\x18\x03 \x01(\t\x12\x0e\n\x06\x63ustom\x18\x04 \x01(\x08\x12\'\n\x06object\x18\x05 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x0f\n\x07protect\x18\x06 \x01(\x08\x12\x14\n\x0c\x64\x65pendencies\x18\x07 \x03(\t\x12\x10\n\x08provider\x18\x08 \x01(\t\x12Z\n\x14propertyDependencies\x18\t \x03(\x0b\x32<.pulumirpc.RegisterResourceRequest.PropertyDependenciesEntry\x12\x1b\n\x13\x64\x65leteBeforeReplace\x18\n \x01(\x08\x12\x0f\n\x07version\x18\x0b \x01(\t\x12\x15\n\rignoreChanges\x18\x0c \x03(\t\x12\x15\n\racceptSecrets\x18\r \x01(\x08\x12\x1f\n\x17\x61\x64\x64itionalSecretOutputs\x18\x0e \x03(\t\x12\x0f\n\x07\x61liases\x18\x0f \x03(\t\x12\x10\n\x08importId\x18\x10 \x01(\t\x12I\n\x0e\x63ustomTimeouts\x18\x11 \x01(\x0b\x32\x31.pulumirpc.RegisterResourceRequest.CustomTimeouts\x12\"\n\x1a\x64\x65leteBeforeReplaceDefined\x18\x12 \x01(\x08\x12\x1d\n\x15supportsPartialValues\x18\x13 \x01(\x08\x12\x0e\n\x06remote\x18\x14 \x01(\x08\x1a$\n\x14PropertyDependencies\x12\x0c\n\x04urns\x18\x01 \x03(\t\x1a@\n\x0e\x43ustomTimeouts\x12\x0e\n\x06\x63reate\x18\x01 \x01(\t\x12\x0e\n\x06update\x18\x02 \x01(\t\x12\x0e\n\x06\x64\x65lete\x18\x03 \x01(\t\x1at\n\x19PropertyDependenciesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x46\n\x05value\x18\x02 \x01(\x0b\x32\x37.pulumirpc.RegisterResourceRequest.PropertyDependencies:\x02\x38\x01\"\xf7\x02\n\x18RegisterResourceResponse\x12\x0b\n\x03urn\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\'\n\x06object\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x0e\n\x06stable\x18\x04 \x01(\x08\x12\x0f\n\x07stables\x18\x05 \x03(\t\x12[\n\x14propertyDependencies\x18\x06 \x03(\x0b\x32=.pulumirpc.RegisterResourceResponse.PropertyDependenciesEntry\x1a$\n\x14PropertyDependencies\x12\x0c\n\x04urns\x18\x01 \x03(\t\x1au\n\x19PropertyDependenciesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12G\n\x05value\x18\x02 \x01(\x0b\x32\x38.pulumirpc.RegisterResourceResponse.PropertyDependencies:\x02\x38\x01\"W\n\x1eRegisterResourceOutputsRequest\x12\x0b\n\x03urn\x18\x01 \x01(\t\x12(\n\x07outputs\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct2\x89\x04\n\x0fResourceMonitor\x12Z\n\x0fSupportsFeature\x12!.pulumirpc.SupportsFeatureRequest\x1a\".pulumirpc.SupportsFeatureResponse\"\x00\x12?\n\x06Invoke\x12\x18.pulumirpc.InvokeRequest\x1a\x19.pulumirpc.InvokeResponse\"\x00\x12G\n\x0cStreamInvoke\x12\x18.pulumirpc.InvokeRequest\x1a\x19.pulumirpc.InvokeResponse\"\x00\x30\x01\x12Q\n\x0cReadResource\x12\x1e.pulumirpc.ReadResourceRequest\x1a\x1f.pulumirpc.ReadResourceResponse\"\x00\x12]\n\x10RegisterResource\x12\".pulumirpc.RegisterResourceRequest\x1a#.pulumirpc.RegisterResourceResponse\"\x00\x12^\n\x17RegisterResourceOutputs\x12).pulumirpc.RegisterResourceOutputsRequest\x1a\x16.google.protobuf.Empty\"\x00\x62\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,provider__pb2.DESCRIPTOR,])
_SUPPORTSFEATUREREQUEST = _descriptor.Descriptor(
name='SupportsFeatureRequest',
full_name='pulumirpc.SupportsFeatureRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pulumirpc.SupportsFeatureRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=104,
serialized_end=140,
)
_SUPPORTSFEATURERESPONSE = _descriptor.Descriptor(
name='SupportsFeatureResponse',
full_name='pulumirpc.SupportsFeatureResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hasSupport', full_name='pulumirpc.SupportsFeatureResponse.hasSupport', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=142,
serialized_end=187,
)
_READRESOURCEREQUEST = _descriptor.Descriptor(
name='ReadResourceRequest',
full_name='pulumirpc.ReadResourceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='pulumirpc.ReadResourceRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='pulumirpc.ReadResourceRequest.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='pulumirpc.ReadResourceRequest.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='parent', full_name='pulumirpc.ReadResourceRequest.parent', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='properties', full_name='pulumirpc.ReadResourceRequest.properties', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dependencies', full_name='pulumirpc.ReadResourceRequest.dependencies', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='provider', full_name='pulumirpc.ReadResourceRequest.provider', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='pulumirpc.ReadResourceRequest.version', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acceptSecrets', full_name='pulumirpc.ReadResourceRequest.acceptSecrets', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='additionalSecretOutputs', full_name='pulumirpc.ReadResourceRequest.additionalSecretOutputs', index=9,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aliases', full_name='pulumirpc.ReadResourceRequest.aliases', index=10,
number=11, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=190,
serialized_end=442,
)
_READRESOURCERESPONSE = _descriptor.Descriptor(
name='ReadResourceResponse',
full_name='pulumirpc.ReadResourceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='urn', full_name='pulumirpc.ReadResourceResponse.urn', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='properties', full_name='pulumirpc.ReadResourceResponse.properties', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=444,
serialized_end=524,
)
_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIES = _descriptor.Descriptor(
name='PropertyDependencies',
full_name='pulumirpc.RegisterResourceRequest.PropertyDependencies',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='urns', full_name='pulumirpc.RegisterResourceRequest.PropertyDependencies.urns', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1122,
serialized_end=1158,
)
_REGISTERRESOURCEREQUEST_CUSTOMTIMEOUTS = _descriptor.Descriptor(
name='CustomTimeouts',
full_name='pulumirpc.RegisterResourceRequest.CustomTimeouts',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='create', full_name='pulumirpc.RegisterResourceRequest.CustomTimeouts.create', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='update', full_name='pulumirpc.RegisterResourceRequest.CustomTimeouts.update', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delete', full_name='pulumirpc.RegisterResourceRequest.CustomTimeouts.delete', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1160,
serialized_end=1224,
)
_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY = _descriptor.Descriptor(
name='PropertyDependenciesEntry',
full_name='pulumirpc.RegisterResourceRequest.PropertyDependenciesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pulumirpc.RegisterResourceRequest.PropertyDependenciesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pulumirpc.RegisterResourceRequest.PropertyDependenciesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1226,
serialized_end=1342,
)
_REGISTERRESOURCEREQUEST = _descriptor.Descriptor(
name='RegisterResourceRequest',
full_name='pulumirpc.RegisterResourceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='pulumirpc.RegisterResourceRequest.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='pulumirpc.RegisterResourceRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='parent', full_name='pulumirpc.RegisterResourceRequest.parent', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='custom', full_name='pulumirpc.RegisterResourceRequest.custom', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='object', full_name='pulumirpc.RegisterResourceRequest.object', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='protect', full_name='pulumirpc.RegisterResourceRequest.protect', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dependencies', full_name='pulumirpc.RegisterResourceRequest.dependencies', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='provider', full_name='pulumirpc.RegisterResourceRequest.provider', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='propertyDependencies', full_name='pulumirpc.RegisterResourceRequest.propertyDependencies', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deleteBeforeReplace', full_name='pulumirpc.RegisterResourceRequest.deleteBeforeReplace', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='pulumirpc.RegisterResourceRequest.version', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignoreChanges', full_name='pulumirpc.RegisterResourceRequest.ignoreChanges', index=11,
number=12, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='acceptSecrets', full_name='pulumirpc.RegisterResourceRequest.acceptSecrets', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='additionalSecretOutputs', full_name='pulumirpc.RegisterResourceRequest.additionalSecretOutputs', index=13,
number=14, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aliases', full_name='pulumirpc.RegisterResourceRequest.aliases', index=14,
number=15, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='importId', full_name='pulumirpc.RegisterResourceRequest.importId', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='customTimeouts', full_name='pulumirpc.RegisterResourceRequest.customTimeouts', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deleteBeforeReplaceDefined', full_name='pulumirpc.RegisterResourceRequest.deleteBeforeReplaceDefined', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='supportsPartialValues', full_name='pulumirpc.RegisterResourceRequest.supportsPartialValues', index=18,
number=19, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote', full_name='pulumirpc.RegisterResourceRequest.remote', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIES, _REGISTERRESOURCEREQUEST_CUSTOMTIMEOUTS, _REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=527,
serialized_end=1342,
)
_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIES = _descriptor.Descriptor(
name='PropertyDependencies',
full_name='pulumirpc.RegisterResourceResponse.PropertyDependencies',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='urns', full_name='pulumirpc.RegisterResourceResponse.PropertyDependencies.urns', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1122,
serialized_end=1158,
)
_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY = _descriptor.Descriptor(
name='PropertyDependenciesEntry',
full_name='pulumirpc.RegisterResourceResponse.PropertyDependenciesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='pulumirpc.RegisterResourceResponse.PropertyDependenciesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='pulumirpc.RegisterResourceResponse.PropertyDependenciesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1603,
serialized_end=1720,
)
_REGISTERRESOURCERESPONSE = _descriptor.Descriptor(
name='RegisterResourceResponse',
full_name='pulumirpc.RegisterResourceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='urn', full_name='pulumirpc.RegisterResourceResponse.urn', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='pulumirpc.RegisterResourceResponse.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='object', full_name='pulumirpc.RegisterResourceResponse.object', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stable', full_name='pulumirpc.RegisterResourceResponse.stable', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stables', full_name='pulumirpc.RegisterResourceResponse.stables', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='propertyDependencies', full_name='pulumirpc.RegisterResourceResponse.propertyDependencies', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIES, _REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1345,
serialized_end=1720,
)
_REGISTERRESOURCEOUTPUTSREQUEST = _descriptor.Descriptor(
name='RegisterResourceOutputsRequest',
full_name='pulumirpc.RegisterResourceOutputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='urn', full_name='pulumirpc.RegisterResourceOutputsRequest.urn', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outputs', full_name='pulumirpc.RegisterResourceOutputsRequest.outputs', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1722,
serialized_end=1809,
)
_READRESOURCEREQUEST.fields_by_name['properties'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_READRESOURCERESPONSE.fields_by_name['properties'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIES.containing_type = _REGISTERRESOURCEREQUEST
_REGISTERRESOURCEREQUEST_CUSTOMTIMEOUTS.containing_type = _REGISTERRESOURCEREQUEST
_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY.fields_by_name['value'].message_type = _REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIES
_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY.containing_type = _REGISTERRESOURCEREQUEST
_REGISTERRESOURCEREQUEST.fields_by_name['object'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_REGISTERRESOURCEREQUEST.fields_by_name['propertyDependencies'].message_type = _REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY
_REGISTERRESOURCEREQUEST.fields_by_name['customTimeouts'].message_type = _REGISTERRESOURCEREQUEST_CUSTOMTIMEOUTS
_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIES.containing_type = _REGISTERRESOURCERESPONSE
_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY.fields_by_name['value'].message_type = _REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIES
_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY.containing_type = _REGISTERRESOURCERESPONSE
_REGISTERRESOURCERESPONSE.fields_by_name['object'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_REGISTERRESOURCERESPONSE.fields_by_name['propertyDependencies'].message_type = _REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY
_REGISTERRESOURCEOUTPUTSREQUEST.fields_by_name['outputs'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
DESCRIPTOR.message_types_by_name['SupportsFeatureRequest'] = _SUPPORTSFEATUREREQUEST
DESCRIPTOR.message_types_by_name['SupportsFeatureResponse'] = _SUPPORTSFEATURERESPONSE
DESCRIPTOR.message_types_by_name['ReadResourceRequest'] = _READRESOURCEREQUEST
DESCRIPTOR.message_types_by_name['ReadResourceResponse'] = _READRESOURCERESPONSE
DESCRIPTOR.message_types_by_name['RegisterResourceRequest'] = _REGISTERRESOURCEREQUEST
DESCRIPTOR.message_types_by_name['RegisterResourceResponse'] = _REGISTERRESOURCERESPONSE
DESCRIPTOR.message_types_by_name['RegisterResourceOutputsRequest'] = _REGISTERRESOURCEOUTPUTSREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
SupportsFeatureRequest = _reflection.GeneratedProtocolMessageType('SupportsFeatureRequest', (_message.Message,), {
'DESCRIPTOR' : _SUPPORTSFEATUREREQUEST,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.SupportsFeatureRequest)
})
_sym_db.RegisterMessage(SupportsFeatureRequest)
SupportsFeatureResponse = _reflection.GeneratedProtocolMessageType('SupportsFeatureResponse', (_message.Message,), {
'DESCRIPTOR' : _SUPPORTSFEATURERESPONSE,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.SupportsFeatureResponse)
})
_sym_db.RegisterMessage(SupportsFeatureResponse)
ReadResourceRequest = _reflection.GeneratedProtocolMessageType('ReadResourceRequest', (_message.Message,), {
'DESCRIPTOR' : _READRESOURCEREQUEST,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.ReadResourceRequest)
})
_sym_db.RegisterMessage(ReadResourceRequest)
ReadResourceResponse = _reflection.GeneratedProtocolMessageType('ReadResourceResponse', (_message.Message,), {
'DESCRIPTOR' : _READRESOURCERESPONSE,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.ReadResourceResponse)
})
_sym_db.RegisterMessage(ReadResourceResponse)
RegisterResourceRequest = _reflection.GeneratedProtocolMessageType('RegisterResourceRequest', (_message.Message,), {
'PropertyDependencies' : _reflection.GeneratedProtocolMessageType('PropertyDependencies', (_message.Message,), {
'DESCRIPTOR' : _REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIES,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceRequest.PropertyDependencies)
})
,
'CustomTimeouts' : _reflection.GeneratedProtocolMessageType('CustomTimeouts', (_message.Message,), {
'DESCRIPTOR' : _REGISTERRESOURCEREQUEST_CUSTOMTIMEOUTS,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceRequest.CustomTimeouts)
})
,
'PropertyDependenciesEntry' : _reflection.GeneratedProtocolMessageType('PropertyDependenciesEntry', (_message.Message,), {
'DESCRIPTOR' : _REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceRequest.PropertyDependenciesEntry)
})
,
'DESCRIPTOR' : _REGISTERRESOURCEREQUEST,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceRequest)
})
_sym_db.RegisterMessage(RegisterResourceRequest)
_sym_db.RegisterMessage(RegisterResourceRequest.PropertyDependencies)
_sym_db.RegisterMessage(RegisterResourceRequest.CustomTimeouts)
_sym_db.RegisterMessage(RegisterResourceRequest.PropertyDependenciesEntry)
RegisterResourceResponse = _reflection.GeneratedProtocolMessageType('RegisterResourceResponse', (_message.Message,), {
'PropertyDependencies' : _reflection.GeneratedProtocolMessageType('PropertyDependencies', (_message.Message,), {
'DESCRIPTOR' : _REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIES,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceResponse.PropertyDependencies)
})
,
'PropertyDependenciesEntry' : _reflection.GeneratedProtocolMessageType('PropertyDependenciesEntry', (_message.Message,), {
'DESCRIPTOR' : _REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceResponse.PropertyDependenciesEntry)
})
,
'DESCRIPTOR' : _REGISTERRESOURCERESPONSE,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceResponse)
})
_sym_db.RegisterMessage(RegisterResourceResponse)
_sym_db.RegisterMessage(RegisterResourceResponse.PropertyDependencies)
_sym_db.RegisterMessage(RegisterResourceResponse.PropertyDependenciesEntry)
RegisterResourceOutputsRequest = _reflection.GeneratedProtocolMessageType('RegisterResourceOutputsRequest', (_message.Message,), {
'DESCRIPTOR' : _REGISTERRESOURCEOUTPUTSREQUEST,
'__module__' : 'resource_pb2'
# @@protoc_insertion_point(class_scope:pulumirpc.RegisterResourceOutputsRequest)
})
_sym_db.RegisterMessage(RegisterResourceOutputsRequest)
_REGISTERRESOURCEREQUEST_PROPERTYDEPENDENCIESENTRY._options = None
_REGISTERRESOURCERESPONSE_PROPERTYDEPENDENCIESENTRY._options = None
_RESOURCEMONITOR = _descriptor.ServiceDescriptor(
name='ResourceMonitor',
full_name='pulumirpc.ResourceMonitor',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=1812,
serialized_end=2333,
methods=[
_descriptor.MethodDescriptor(
name='SupportsFeature',
full_name='pulumirpc.ResourceMonitor.SupportsFeature',
index=0,
containing_service=None,
input_type=_SUPPORTSFEATUREREQUEST,
output_type=_SUPPORTSFEATURERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='Invoke',
full_name='pulumirpc.ResourceMonitor.Invoke',
index=1,
containing_service=None,
input_type=provider__pb2._INVOKEREQUEST,
output_type=provider__pb2._INVOKERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='StreamInvoke',
full_name='pulumirpc.ResourceMonitor.StreamInvoke',
index=2,
containing_service=None,
input_type=provider__pb2._INVOKEREQUEST,
output_type=provider__pb2._INVOKERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='ReadResource',
full_name='pulumirpc.ResourceMonitor.ReadResource',
index=3,
containing_service=None,
input_type=_READRESOURCEREQUEST,
output_type=_READRESOURCERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='RegisterResource',
full_name='pulumirpc.ResourceMonitor.RegisterResource',
index=4,
containing_service=None,
input_type=_REGISTERRESOURCEREQUEST,
output_type=_REGISTERRESOURCERESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='RegisterResourceOutputs',
full_name='pulumirpc.ResourceMonitor.RegisterResourceOutputs',
index=5,
containing_service=None,
input_type=_REGISTERRESOURCEOUTPUTSREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_RESOURCEMONITOR)
DESCRIPTOR.services_by_name['ResourceMonitor'] = _RESOURCEMONITOR
# @@protoc_insertion_point(module_scope)
| 45.486674 | 3,784 | 0.766654 | 4,405 | 39,255 | 6.560045 | 0.064699 | 0.04651 | 0.041769 | 0.03599 | 0.676472 | 0.636329 | 0.619822 | 0.586531 | 0.568017 | 0.544555 | 0 | 0.036435 | 0.117641 | 39,255 | 862 | 3,785 | 45.539443 | 0.797846 | 0.02927 | 0 | 0.69 | 1 | 0.00375 | 0.213952 | 0.174491 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.01125 | 0 | 0.01125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a3591e2e0829e47a19dbd17e2ce326f7c5c3fea | 647 | py | Python | idm/commands/__init__.py | ruslanvolov6667/ID | d2e2e0d8764bb9bc9caad163d69de03972cb5380 | [
"MIT"
] | 1 | 2020-07-16T00:16:06.000Z | 2020-07-16T00:16:06.000Z | idm/commands/__init__.py | ruslanvolov6667/ID | d2e2e0d8764bb9bc9caad163d69de03972cb5380 | [
"MIT"
] | null | null | null | idm/commands/__init__.py | ruslanvolov6667/ID | d2e2e0d8764bb9bc9caad163d69de03972cb5380 | [
"MIT"
] | null | null | null |
from .add_user import add_user
from .ban_expired import ban_expired
from .ban_get_reason import ban_get_reason
from .bind_chat import bind_chat
from .delete_messages_from_user import delete_messages_from_user
from .delete_messages import delete_messages
from .forbidden_links import forbidden_links
from .ignore_messages import ignore_messages
from .ping import ping as ping__
from .print_bookmark import print_bookmark
from .send_my_signal import send_my_signal
from .send_signal import send_signal
from .subscribe_signals import subscribe_signals
from .to_group import to_group
from .signals import *
from .my_signals import * | 35.944444 | 65 | 0.842349 | 99 | 647 | 5.131313 | 0.262626 | 0.110236 | 0.106299 | 0.086614 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12983 | 647 | 18 | 66 | 35.944444 | 0.902309 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
0a3f85e6ceee77b968f1cc58e05429f990216276 | 6,299 | py | Python | tests/selenium/TransitSubsidyApp.py | mjjavaid/cfpb-transit_subsidy | 3f0363f21e754f812c2aca2af031d4bbbee386ed | [
"CC0-1.0"
] | null | null | null | tests/selenium/TransitSubsidyApp.py | mjjavaid/cfpb-transit_subsidy | 3f0363f21e754f812c2aca2af031d4bbbee386ed | [
"CC0-1.0"
] | null | null | null | tests/selenium/TransitSubsidyApp.py | mjjavaid/cfpb-transit_subsidy | 3f0363f21e754f812c2aca2af031d4bbbee386ed | [
"CC0-1.0"
] | null | null | null | __author__ = 'CFPB Labs'
import time
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from nose.tools import *
class TransitSubsidyApp():
"""
Abstration of the Transit Subsidy application. This extends the
WebDriver Page model pattern (http://code.google.com/p/selenium/wiki/PageObjects)
and applies to the application as a whole rather than a single page state.
@note_to_self: one or more page objects could comprise a application object, which,
more or less, could serve as a testing facade.
"""
def __init__(self,driver,base_url):
self.driver = driver
self.base_url = base_url
def reset(self):
self.driver.find_element_by_link_text('Reset Form').click()
def login(self, username="ted", password="ted" ):
self.driver.get( self.base_url + "/login/")
eq_("Your Intranet >", self.driver.title)
self.driver.find_element_by_id("id_username").clear()
self.driver.find_element_by_id("id_username").send_keys(username)
self.driver.find_element_by_id("id_password").clear()
self.driver.find_element_by_id("id_password").send_keys(password)
self.driver.find_element_by_id("btn_login").click()
eq_("Your Intranet > Transit Subsidy Request", self.driver.title)
def logout(self):
driver.get(base_url + "/logout/")
def commute_from( self, street='123 Main St', city='Anytown', state="VA", zip="62312" ):
self.driver.find_element_by_id("id_origin_street").clear()
self.driver.find_element_by_id("id_origin_street").send_keys(street)
self.driver.find_element_by_id("id_origin_city").clear()
self.driver.find_element_by_id("id_origin_city").send_keys(city)
self.driver.find_element_by_id("id_origin_state").clear()
self.driver.find_element_by_id("id_origin_state").send_keys(state)
self.driver.find_element_by_id("id_origin_zip").clear()
self.driver.find_element_by_id("id_origin_zip").send_keys(zip)
def commute_to(self, destination_id=2):
self.driver.find_element_by_id("id_destination").find_elements_by_tag_name('option')[destination_id].click()
def add_segment(self, segment_id,mode_id,amount,add_another=False):
self.driver.find_element_by_id("segment-type_%s" % segment_id).find_elements_by_tag_name('option')[mode_id].click()
self.driver.find_element_by_id("segment-amount_%s" % segment_id).clear()
self.driver.find_element_by_id("segment-amount_%s" % segment_id).send_keys(amount)
if add_another: self.driver.find_element_by_id("add_%s" % segment_id).click()
def add_other_segment(self, segment_id, other_text, amount, add_another=False):
self.driver.find_element_by_id("segment-type_%s" % segment_id).find_elements_by_tag_name('option')[17].click()
time.sleep(1)
self.driver.find_element_by_id("segment-other_%s" % segment_id).send_keys(other_text)
self.driver.find_element_by_id("segment-amount_%s" % segment_id).clear()
self.driver.find_element_by_id("segment-amount_%s" % segment_id).send_keys(amount)
if add_another: self.driver.find_element_by_id("add_%s" % segment_id).click()
def remove_segment(self,segment_id):
self.driver.find_element_by_id('rm_%s' % segment_id).click()
def click_add(self):
self.driver.find_element_by_id('add_1').click()
def select_workdays(self, id=2, other=None):
self.driver.find_element_by_xpath("(//input[@id='id_work_sked'])[%s]" % id).click()
if id==4:
self.driver.find_element_by_id('id_number_of_workdays').clear()
self.driver.find_element_by_id('id_number_of_workdays').send_keys(other)
def view_smartriphelp(self):
self.driver.find_element_by_id("id_help_smartrip").click()
#Keys.ESCAPE should work, too
self.driver.find_element_by_id("cboxClose").click()
def add_smartrip(self, num='00020 0001 5644 364 6'):
self.driver.find_element_by_id("id_dc_wmta_smartrip_id").clear()
self.driver.find_element_by_id("id_dc_wmta_smartrip_id").send_keys(num)
def enroll(self):
time.sleep(.5)
self.driver.find_element_by_id("btn_enroll_smartrip").click()
time.sleep(1)
def sign(self, last_four_ssn='1234', signature='Mick Jagger'):
time.sleep(.5)
self.driver.find_element_by_id("id_last_four_ssn").send_keys(last_four_ssn)
self.driver.find_element_by_id("id_signature").send_keys(signature)
self.driver.find_element_by_id("btn_agree").click()
time.sleep(.5)
eq_("Your Intranet > Transit Subsidy Confirmation", self.driver.title)
def dont_sign(self):
self.driver.find_element_by_id("btn_no_agree").click()
eq_("Your Intranet > Transit Subsidy Request", self.driver.title)
def withdraw_enrollment(self):
# Running out of time this morning. This aint workin!
# self.driver.find_element_by_link_text('Cancel my enrollment.').click()
#test no agree (for grins)
# time.sleep(.5)
# self.driver.find_element_by_id("btn_withdraw_no_agree").click()
#Selenium thowing Element is not clickable at point (558, 165). Other element would receive the click: <div id="cboxOverlay" style="cursor: pointer; opacity: 0.22499999403953552; "></div>
# self.driver.find_element_by_id('id_withdrawl_dialog').send_keys(Keys.ESCAPE) #Just hit Escape instead
#In theaory, this should work, too: self.driver.find_element_by_id("cboxClose").click()
#OK - now let'
self.driver.find_element_by_partial_link_text('Cancel my enrollment').click()
time.sleep(.5)
self.driver.find_element_by_id("btn_withdraw_agree").click()
eq_("Your Intranet > Transit Subsidy Withdrawl Confirmation", self.driver.title)
| 43.743056 | 195 | 0.70027 | 898 | 6,299 | 4.5902 | 0.227171 | 0.128578 | 0.149442 | 0.224163 | 0.552644 | 0.525958 | 0.499272 | 0.409267 | 0.370451 | 0.284328 | 0 | 0.012783 | 0.180346 | 6,299 | 143 | 196 | 44.048951 | 0.78559 | 0.160184 | 0 | 0.190476 | 0 | 0 | 0.16823 | 0.022775 | 0 | 0 | 0 | 0 | 0 | 1 | 0.202381 | false | 0.035714 | 0.107143 | 0 | 0.321429 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a4ce0dec21700def35c367e1f7aad0c64db1140 | 23,063 | py | Python | RADGIS/RETIRED/detection.py | jtapanes21/RADGIS | 2322f75f23cec4dde9f8c7b21d9137f1986e6382 | [
"MIT"
] | null | null | null | RADGIS/RETIRED/detection.py | jtapanes21/RADGIS | 2322f75f23cec4dde9f8c7b21d9137f1986e6382 | [
"MIT"
] | null | null | null | RADGIS/RETIRED/detection.py | jtapanes21/RADGIS | 2322f75f23cec4dde9f8c7b21d9137f1986e6382 | [
"MIT"
] | null | null | null | '''
from ..utils import gislib, utils, constants
from ..core.trajectorydataframe import *
import numpy as np
import pandas as pd
def stops(tdf, stop_radius_meters=20, minutes_for_a_stop=10):
""" Stops detection
Detect the stops for each individual in a TrajDataFrame. A stop is
detected when the individual spends at least 'minutes_for_a_stop' minutes
within a distance 'stop_radius_meters' from a given trajectory point.
The stop's coordinates are the median latitude and longitude values
of the points found within the specified distance.
Parameters
----------
tdf : TrajDataFrame
the input trajectories of the individuals.
stop_radius_meters : integer, optional
the minimum distance between two consecutive points to be considered
a stop. The default is 20 meters.
minutes_for_a_stop : integer, optional
the minimum stop duration, in minutes. The default is '10' minutes.
no_data_for_days : integer, optional
if the number of minutes between two consecutive points is larger
than 'no_data_for_days', then this is interpreted as missing data
and dows not count as a stop or a trip.
Returns
-------
TrajDataFrame
a TrajDataFrame with the coordinates (latitude, longitude) of
the stop locations.
"""
# convert the minutes_for_a_stop variable to seconds.
minutes_for_a_stop = minutes_for_a_stop * 60
# Update the STOP_TIME global variable in the constants .py file.
constants.STOP_TIME = minutes_for_a_stop
# Sort
tdf = tdf.sort_by_uid_and_datetime()
# Reset the index.
tdf.reset_index(drop=True, inplace=True)
# Order the columns; important for numpy operations where column numbers are used.
# Add a "uid" column name if not multi_user.
if utils.is_multi_user(tdf) == False:
tdf["uid"] = 1
else:
pass
tdf = utils.column_order(tdf, "timestamp", "latitude", "longitude", "uid")
stdf = _stops_array(tdf, stop_radius_meters, minutes_for_a_stop)
return stdf
def _stops_array(tdf, stop_radius_meters, minutes_for_a_stop):
# Save the column names
column_names = tdf.columns.to_list()
# From dataframe convert to a numpy matrix.
array = tdf.values
# Save the uid edge index. This is used to overwrite the distance that spans from one
# uid to the next uid. Three is the column that contains the uid.
uid_edge_index = np.where(np.diff(array[:,3]))
# Haversine distance calculation is added as a column to the array.
array = np.hstack((((gislib.haversine_np(array[:,1],array[:,2], array[:,1][1:], array[:,2][1:]))[...,np.newaxis]), array))
# Use the 'uid_edge_index' to assign very large distance to the edge of each uid.
# This ensures that the uids remain separate.
np.put(array[:,0], uid_edge_index[0], 99999999)
# Identify stop candidates using distance. Retain the index of the rows that are less than
# the 'stop_radius_meters' distance. Add a unique ident to the rows that meet this distance threshold.
array = np.hstack((((np.where(array[:,0] > stop_radius_meters, array[:,0], (np.where(array[:,0] < stop_radius_meters, -1111, np.nan))))[...,np.newaxis]), array))
# Save the indicies that meet the distance threshold.
old_stop_index = np.where(array[:,0] == -1111)
# Add a unique ident for each candidate stop group. The stop group was previously
# identified using distance and labeled -1111.
np.put(array[:,0],np.where(array[:,0] == -1111), np.cumsum(np.not_equal((np.concatenate(([0], np.array(np.where(array[:,0] == -1111))[0])))[:-1], (np.concatenate(([0], np.array(np.where(array[:,0] == -1111))[0])))[1:]-1)))
# The last row in the candidate stop group is not initially labeled with the stop group ident.
put_index = old_stop_index[0]+1
put_values = array[:,0][old_stop_index[0]]
np.put(array[:,0], put_index, put_values)
# Save the complete stop group index to a variable for later use.
old_stop_index_complete = np.unique(np.concatenate((old_stop_index[0],put_index),0))
# Filter the original array to only include the candidate stops.
stop_cand = array[old_stop_index_complete]
""" "Chaining" is a common problem that simple stop detection algorithms experience. Chaining
is when false stops are identified that are most commonly the result of walking especially
with highly sampled datasets. For example, a gps beacon set to ping every five seconds is
considered highly sampled data. To a simple distance and time stop detection algorithm, walking
would look like a stop: little distance between each consecutive point until the person speeds up at
which point the begining of the walk to the end would be considered the stop per the distance component
of the algorithm. It is likely that the time component of the algorithm would also be satified where the time
difference from the begining to the end of the distance break are summed. Thus, long walks, with highly sampled
data, can be falsly labeled as stops. These false stops have the appearence of linear chains hence the term "chaining".
I use a primitive method to combat chaining. The method is not perfect, but the positives outweigh the negatives.
The method is to select a large, relative to the original distance threshold, max distance and generate intra-group groups
using the cumulative sum of each consecutive distance within each group. The mean latitude and longitude are then taken
for each intra-group. If the mean coordinates are within a distance threshold of the next consecutive intra-group, then
the intra-groups are merged. This distance threshold is larger than the original distance threshold selected by the user,
but smaller than the cumulative sum max distance. Esentially the original groups are broken into smaller intra-groups
and then re-merged if the mean center of the intra-groups are less than the distance threshold.
This method combats the chaining effects that occur with highly sampled datasets. If a highly sampled GPS stops, then there
should be many pings within a close distance of eachother. Many detects will have large azimuth changes from GPS error or
the user moving within a stop location. But in the end, breaking these pings up into intra-groups, their mean center will
be close to eachother. This is not the case with a walk.
"""
# Edit the distance column before the groups are broken up into intra-groups.
# Change all 0s to 1s and round up to whole numbers. This is done so that
# the modulo operator will work to create the increasing pattern, which is
# how the intra-groups are created. Yes, rounding up and changing 0s to 1s
# is inaccurate, but we accept this small inaccuracy.
stop_cand[:,1] = np.round(np.int64(np.float64(stop_cand[:,1])))
stop_cand[:,1][stop_cand[:,1] == 0] = 1
# Get the counts of each stop group size.
users = np.float64(stop_cand[:,0])
unames, idx, counts = np.unique(users, return_inverse=True, return_counts=True)
""" What are we about to do and why?
This block is a way to do a one-level groupby on our data and make
the cumsum on each groupby reset at a certain limit. The limit is
taken using the modulo. Overall this code was taken from a different
application where the modulo worked differently. But this still kind of
works for our purposes to create a pattern of increasing to decreasing on
the reset. It seems to work better with a larger limit aka like 100 or 200
meters.
Why do this? This might not need to be done with sparse data. But with
highly sampled GPS data where a new sample is taken ever 5-60 seconds this
is helpful to remove false positive stops. Specfically, where someone walks
slowly. Without doing this, the walk might be determined to be a stop.
"""
def _intervaled_cumsum(ar, sizes):
# Make a copy to be used as output array
out = ar.copy()
# Get cumumlative values of array
arc = ar.cumsum()
# Get cumsumed indices to be used to place differentiated values into
# input array's copy
idx = sizes.cumsum()
# Place differentiated values that when cumumlatively summed later on would
# give us the desired intervaled cumsum
out[idx[0]] = ar[idx[0]] - arc[idx[0]-1]
out[idx[1:-1]] = ar[idx[1:-1]] - np.diff(arc[idx[:-1]-1])
limit = 50
return out.cumsum() % limit
# Similar function as above but returns the pattern of each group.
def _intervaled_cumsum2(ar, sizes):
# Make a copy to be used as output array
out = ar.copy()
# Get cumumlative values of array
arc = ar.cumsum()
# Get cumsumed indices to be used to place differentiated values into
# input array's copy
idx = sizes.cumsum()
# Place differentiated values that when cumumlatively summed later on would
# give us the desired intervaled cumsum
out[idx[0]] = ar[idx[0]] - arc[idx[0]-1]
out[idx[1:-1]] = ar[idx[1:-1]] - np.diff(arc[idx[:-1]-1])
return (np.where(np.diff(out) > 0)[0] + 1)
# Start to break each group into a sub-group by taking the cumsum of each group's
# distance and reseting it once the distance threshold is met. The reset is done
# by using the modulo operator. In reality it does not reset it, but the pattern changes
# from an increasing number to then a smaller number, which is the reset.
stop_cand = np.hstack((((_intervaled_cumsum(stop_cand[:,1], counts))[...,np.newaxis]), stop_cand))
# Get the sub_group index and use it to assign a unique number, in our case -111111,
# back to the filtered array.
pattern = _intervaled_cumsum2(stop_cand[:,0], counts)
np.put(stop_cand[:,0], pattern, -111111)
# The subgroups are almost complete, but each sub-group contains one row that
# was not assigned the unique ident. Assign this row the unique ident.
old_cumsum_index = np.where(stop_cand[:,0] == -111111)
old_cumsum_index_shifted = old_cumsum_index[0] - 1
# Get the index that is not in one of these variables.
back_fill_index = np.setdiff1d(old_cumsum_index_shifted, old_cumsum_index)
# Create the complete index.
combined_indexes = np.unique(np.concatenate((old_cumsum_index[0],old_cumsum_index_shifted),0))
# Save the index of the previous stops that were not given a unique ident.
forgotten_guys = np.setdiff1d(np.arange(len(stop_cand)), combined_indexes)
# Create the inque idents.
np.put(stop_cand[:,0],np.where(stop_cand[:,0] == -111111), np.cumsum(np.not_equal((np.concatenate(([0], np.array(np.where(stop_cand[:,0] == -111111))[0])))[:-1], (np.concatenate(([0], np.array(np.where(stop_cand[:,0] == -111111))[0])))[1:]-1)))
np.put(stop_cand[:,0], back_fill_index, (stop_cand[:,0])[back_fill_index + 1] )
# insert a unique ident for the previous stops that were not
# given a unique ident. This is not 100 mandatory but is good
# practice to avoid having the not given stop ident having the same
#value as the preceding or following value, which would mess up the
#cumsum unique ident.
np.put(stop_cand[:,0], forgotten_guys, -111111)
# Add unique idents again. This fixes the problem of the previous not labeled stop groups.
np.put(stop_cand[:,0], np.arange(len(stop_cand)), np.cumsum(np.not_equal((np.concatenate(([0], stop_cand[:,0])))[:-1], (np.concatenate(([0], stop_cand[:,0])))[1:])))
# Latitude mean center.
lat_column = np.float64(stop_cand[:,4])
lat_users = np.float64(stop_cand[:,0])
unames, idx, counts = np.unique(lat_users, return_inverse=True, return_counts=True)
sum_pred = np.bincount(idx, weights=lat_column)
mean_pred_lat = sum_pred / counts
# Add it to the array.
mean_pred_lat = mean_pred_lat[..., np.newaxis]
stop_cand = np.hstack((mean_pred_lat[idx],stop_cand))
# Longitude mean center.
lon_column = np.float64(stop_cand[:,6])
lon_users = np.float64(stop_cand[:,1])
unames, idx, counts = np.unique(lon_users, return_inverse=True, return_counts=True)
sum_pred = np.bincount(idx, weights=lon_column)
mean_pred_lon = sum_pred / counts
# Add it to the array.
mean_pred_lon = mean_pred_lon[..., np.newaxis]
stop_cand = np.hstack((mean_pred_lon[idx],stop_cand))
# Run the distance meansurment again, but this time on the mean center of the intra-groups.
distance_2 = gislib.haversine_np(stop_cand[:,1],stop_cand[:,0], stop_cand[:,1][1:], stop_cand[:,0][1:])
distance_2 = distance_2[...,np.newaxis]
stop_cand = np.hstack((distance_2,stop_cand))
# Insert impossible distances between stop group edges.
unames, idx, counts = np.unique(stop_cand[:,4], return_inverse=True, return_counts=True)
group_breaks = np.cumsum(counts) - 1
np.put(stop_cand[:,0], group_breaks, 9999999)
# Make the groups again using a slighly larger distance threshold than the user previously specified.
# Use the original stop radius meters provided by the user, but increase it by 40%.
increased_radius = (stop_radius_meters * .40) + stop_radius_meters
temp_dist_2 = np.where(stop_cand[:,0] > increased_radius, stop_cand[:,0], (np.where(stop_cand[:,0] < increased_radius, -1111, np.nan)))
temp_dist_2 = temp_dist_2[..., np.newaxis]
stop_cand = np.hstack((temp_dist_2,stop_cand))
old_stop_index_2 = np.where(stop_cand[:,0] == -1111)
np.put(stop_cand[:,0],np.where(stop_cand[:,0] == -1111), np.cumsum(np.not_equal((np.concatenate(([0], np.array(np.where(stop_cand[:,0] == -1111))[0])))[:-1], (np.concatenate(([0], np.array(np.where(stop_cand[:,0] == -1111))[0])))[1:]-1)))
put_index_2 = old_stop_index_2[0]+1
put_values_2 = stop_cand[:,0][old_stop_index_2[0]]
np.put(stop_cand[:,0], put_index_2, put_values_2)
#Sometimes only one record is leftover after the cumsum. This fixes that by
# identifying those records and assigning them to the above group.
unames, idx, counts = np.unique(stop_cand[:,4], return_inverse=True, return_counts=True)
group_breaks2 = np.cumsum(counts) - 1
np.put(stop_cand[:,0], group_breaks2, stop_cand[:,0][group_breaks2-1])
# Test these new groups for time.
filtered_array_time = stop_cand
filtered_array_time_diff = filtered_array_time[:,7][1:] - filtered_array_time[:,7][:-1]
filtered_array_time_diff = np.append(filtered_array_time_diff, np.timedelta64(0,"s"))
filtered_array_time_diff = filtered_array_time_diff.astype("timedelta64[ms]").astype(int)/1000
filtered_array_time_diff = filtered_array_time_diff[...,np.newaxis]
stop_cand = np.hstack((filtered_array_time_diff,stop_cand))
# make a copy of the time difference column that will be used later.
copied = stop_cand[:,0][...,np.newaxis]
stop_cand = np.hstack((copied, stop_cand))
# The edge of the new groups.
tester = np.where(np.diff(filtered_array_time[:,0])!=0)
np.put(stop_cand[:,1], tester[0], 0)
filtered_array_time_diff2 = stop_cand
time_column = np.float64(filtered_array_time_diff2[:,1])
users_3 = np.float64(filtered_array_time_diff2[:,2])
# assign integer indices to each unique user name, and get the total
# number of occurrences for each name
unames, idx, counts = np.unique(users_3, return_inverse=True, return_counts=True)
# now sum the values of pred corresponding to each index value
sum_pred = np.bincount(idx, weights=time_column)
add_time = sum_pred[idx]
add_time = add_time[...,np.newaxis]
filtered_array_time_diff2 = np.hstack((add_time, filtered_array_time_diff2))
# Identify stops that occur with just two points that might not be detected
# using the cumsum.
tester3 = np.where(np.diff(filtered_array_time_diff2[:,8])==1)[0]
np.put(filtered_array_time_diff2[:,1], tester3, 0)
# Add a new placeholder column made up of 1s.
filtered_array_time_diff2 = np.c_[np.ones(len(filtered_array_time_diff2)) ,filtered_array_time_diff2]
# Assign an ident to each row that meets this time threshold.
np.put(filtered_array_time_diff2[:,0],np.where(filtered_array_time_diff2[:,2] >= minutes_for_a_stop)[0],9999999)
# will have to carry over the 99999999 to the row below. But first get rid
#of any 9999999 that is assigned to the edge of a group.
# Assign each group edge a value of 1.
# Now these are the edges of the original groups made from the first distance measurment.
np.put(filtered_array_time_diff2[:,0], np.where(np.diff(filtered_array_time_diff2[:,9]) == 1)[0], 1)
np.put(filtered_array_time_diff2[:,0],np.where(filtered_array_time_diff2[:,0] == 9999999)[0] + 1, 9999999)
# Assign ident back to array if two records are a stop and were not labeled as a stop.
np.put(filtered_array_time_diff2[:,1], np.where(np.logical_and(filtered_array_time_diff2[:,0]==9999999, filtered_array_time_diff2[:,1]<= minutes_for_a_stop))[0], 9999999)
# Place the newest group idents and group times back into the original array.
array = np.c_[np.ones(len(array)) ,array]
np.put(array[:,0],old_stop_index_complete,filtered_array_time_diff2[:,4])
array = np.c_[np.ones(len(array)) ,array]
np.put(array[:,0],old_stop_index_complete,filtered_array_time_diff2[:,1])
# filter the array to only include the groups that are over the stop limit time.
# Create new group idents for them and then add them back to the array.
real_stop = array[np.where(array[:,0] >= minutes_for_a_stop)]
np.put(real_stop[:,1], np.arange(len(real_stop)), np.cumsum(np.not_equal((np.concatenate(([0], real_stop[:,1])))[:-1], (np.concatenate(([0], real_stop[:,1])))[1:])))
# Need to recalculate the time bc if there were two row stops that we found
# their cumsum time would be 9999999
second_time_diff = real_stop[:,4][1:] - real_stop[:,4][:-1]
second_time_diff = np.append(second_time_diff, np.timedelta64(0,"s"))
second_time_diff = second_time_diff.astype("timedelta64[ms]").astype(int)/1000
second_time_diff = second_time_diff[...,np.newaxis]
real_stop = np.hstack((second_time_diff,real_stop))
# The edge of the new groups.
tester4 = np.where(np.diff(real_stop[:,2])!=0)
np.put(real_stop[:,0], tester4[0], 0)
time_column = np.float64(real_stop[:,0])
users_3 = np.float64(real_stop[:,2])
# assign integer indices to each unique user name, and get the total
# number of occurrences for each name
unames, idx, counts = np.unique(users_3, return_inverse=True, return_counts=True)
# now sum the values of pred corresponding to each index value
sum_pred = np.bincount(idx, weights=time_column)
add_time = sum_pred[idx]
add_time = add_time[...,np.newaxis]
real_stop = np.hstack((add_time, real_stop))
# Calculate the mean center for each final stop group.
# Lat
lat_column = np.float64(real_stop[:,7])
lat_users = np.float64(real_stop[:,3])
unames, idx, counts = np.unique(lat_users, return_inverse=True, return_counts=True)
sum_pred = np.bincount(idx, weights=lat_column)
mean_pred_lat = sum_pred / counts
mean_pred_lat = mean_pred_lat[..., np.newaxis]
# Lon
lon_column = np.float64(real_stop[:,8])
lon_users = np.float64(real_stop[:,3])
unames, idx, counts = np.unique(lon_users, return_inverse=True, return_counts=True)
sum_pred = np.bincount(idx, weights=lon_column)
mean_pred_lon = sum_pred / counts
mean_pred_lon = mean_pred_lon[..., np.newaxis]
# Save the index of the final stop groups.
final_stop_index = (np.where(array[:,0] >= minutes_for_a_stop))[0]
# Place the sum time for each final stop group back into the array.
np.put(array[:,0], final_stop_index, real_stop[:,0])
# Do the same for their new idents. First have to add a column to the array
# made up of 0s. Zeros work well bc the group starts at 1 so we can replace
# the zeroes easily.
add_zeros = np.zeros_like(array[:,0])
add_zeros = add_zeros[...,np.newaxis]
array = np.hstack((add_zeros, array))
np.put(array[:,0], final_stop_index, real_stop[:,3])
# Replace the 0s with -1, which is akin to dbscan.
np.put(array[:,0], np.where(array[:,0] == 0), -1)
# Put the mean center into the array.
array = np.hstack((add_zeros, array))
np.put(array[:,0], final_stop_index, mean_pred_lon[idx])
array = np.hstack((add_zeros, array))
np.put(array[:,0], final_stop_index, mean_pred_lat[idx])
# Remove unnecessary columns.
array = np.delete(array, [4,5,6], 1)
# Add the min and max timestamp for each stop group.
rstops = np.where(array[:,2] != -1)
rarray = array[rstops]
users = np.float64(rarray[:,2])
unames, idx, counts = np.unique(users, return_inverse=True, return_counts=True)
last_rows = (np.where(np.diff(rarray[:,2]) != 0))
last_rows = np.insert(last_rows[0], len(last_rows[0]), len(rarray) -1)
max_timestamp = rarray[:,4][last_rows]
max_timestamp = max_timestamp[idx]
max_timestamp = max_timestamp[...,np.newaxis]
rarray = np.hstack((max_timestamp,rarray))
first_rows = (np.where(np.diff(rarray[:,3]) != 0))[0]+1
first_rows = np.insert(first_rows, 0, 0)
min_timestamp = rarray[:,5][first_rows]
min_timestamp = min_timestamp[idx]
min_timestamp = min_timestamp[...,np.newaxis]
rarray = np.hstack((min_timestamp,rarray))
add_zeros = np.zeros_like(array[:,0])
add_zeros = add_zeros[...,np.newaxis]
array = np.hstack((add_zeros, array))
np.put(array[:,0], rstops, rarray[:,1])
add_zeros = np.zeros_like(array[:,0])
add_zeros = add_zeros[...,np.newaxis]
array = np.hstack((add_zeros, array))
np.put(array[:,0], rstops, rarray[:,0])
# Convert the array back into a traj dataframe and return to user.
final_tdf = TrajDataFrame(array,
latitude='latitude',
longitude="longitude",
datetime='timestamp',user_id="uid")
# Name the dataframe's columns.
new_column_names = ["min", "max", "mean_lat", "mean_lon", "group_ident", "total_seconds"]
new_column_names.extend(column_names)
final_tdf.columns = new_column_names
# Convert the "mean_lat" and "mean_lon" columns from float to np.float64.
final_tdf["mean_lat"] = np.float64(final_tdf["mean_lat"])
final_tdf["mean_lon"] = np.float64(final_tdf["mean_lon"])
return final_tdf
'''
| 47.552577 | 248 | 0.685557 | 3,575 | 23,063 | 4.265734 | 0.146294 | 0.033049 | 0.036787 | 0.030295 | 0.397115 | 0.345115 | 0.299803 | 0.268393 | 0.227607 | 0.225311 | 0 | 0.029063 | 0.206304 | 23,063 | 484 | 249 | 47.650826 | 0.804043 | 0.99935 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a5e032e73ecbbf37822517ea29519b68a3d99c9 | 1,701 | py | Python | tests/conftest.py | rarity-adventure/rarity-integration | 6cb142c129affd56066d7b9565a2f9055ac207f5 | [
"MIT"
] | null | null | null | tests/conftest.py | rarity-adventure/rarity-integration | 6cb142c129affd56066d7b9565a2f9055ac207f5 | [
"MIT"
] | null | null | null | tests/conftest.py | rarity-adventure/rarity-integration | 6cb142c129affd56066d7b9565a2f9055ac207f5 | [
"MIT"
] | 1 | 2022-02-09T06:04:44.000Z | 2022-02-09T06:04:44.000Z | import pytest
from brownie import *
from scripts.abis import *
@pytest.fixture(scope="function", autouse=True)
def isolate(fn_isolation):
pass
@pytest.fixture(scope="module")
def owner():
yield accounts[-1]
@pytest.fixture(scope="module")
def rm(owner):
yield Contract.from_abi("RM", "0xce761D788DF608BD21bdd59d6f4B54b2e27F25Bb", abi=rm_abi, owner=owner)
@pytest.fixture(scope="module")
def gold(owner):
yield Contract.from_abi("Gold", "0x2069B76Afe6b734Fb65D1d099E7ec64ee9CC76B2", abi=gold_abi, owner=owner)
@pytest.fixture(scope="module")
def cellar(owner):
yield Contract.from_abi("Cellar", "0x2A0F1cB17680161cF255348dDFDeE94ea8Ca196A", abi=cellar_abi, owner=owner)
@pytest.fixture(scope="module")
def attr(owner):
yield Contract.from_abi("Attr", "0xB5F5AF1087A8DA62A23b08C00C6ec9af21F397a1", abi=attr_abi, owner=owner)
@pytest.fixture(scope="module")
def daily(owner):
yield owner.deploy(rarity_daily)
@pytest.fixture(scope="module")
def summoners(owner, rm):
""" Level 1 """
summoners = []
for _ in range(3):
tx = rm.summon(1, {'from': owner})
summoners.append(tx.events['summoned']['summoner'])
yield summoners
@pytest.fixture(scope="module")
def summoners2(owner, rm):
""" Level 2 """
summoners = []
for _ in range(3):
tx = rm.summon(1, {'from': owner})
summoners.append(tx.events['summoned']['summoner'])
# adventure 4 times
for _ in range(4):
for s in summoners:
rm.adventure(s)
chain.sleep(60 * 60 * 25)
chain.mine()
yield summoners
@pytest.fixture(scope="module")
def lib():
yield rarity_library.deploy({'from': accounts[0]})
| 22.986486 | 112 | 0.678424 | 205 | 1,701 | 5.560976 | 0.292683 | 0.114035 | 0.157895 | 0.189474 | 0.509649 | 0.350877 | 0.350877 | 0.278947 | 0.138596 | 0.138596 | 0 | 0.079489 | 0.171664 | 1,701 | 73 | 113 | 23.30137 | 0.729595 | 0.021164 | 0 | 0.413043 | 0 | 0 | 0.175545 | 0.101695 | 0 | 0 | 0.101695 | 0 | 0 | 1 | 0.217391 | false | 0.021739 | 0.065217 | 0 | 0.282609 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a5efdb9f61ff3e9c2e3530570d8a0b085205fca | 88 | py | Python | notebooks/_solutions/pandas_06_groupby_operations30.py | rprops/Python_DS-WS | b2fc449a74be0c82863e5fcf1ddbe7d64976d530 | [
"BSD-3-Clause"
] | 183 | 2016-08-24T12:32:07.000Z | 2022-03-26T14:05:04.000Z | notebooks/_solutions/pandas_06_groupby_operations30.py | rprops/Python_DS-WS | b2fc449a74be0c82863e5fcf1ddbe7d64976d530 | [
"BSD-3-Clause"
] | 100 | 2016-12-15T03:44:06.000Z | 2022-03-07T08:14:07.000Z | notebooks/_solutions/pandas_06_groupby_operations30.py | rprops/Python_DS-WS | b2fc449a74be0c82863e5fcf1ddbe7d64976d530 | [
"BSD-3-Clause"
] | 204 | 2016-08-24T14:22:58.000Z | 2022-03-29T15:09:03.000Z | hamlets = titles[titles['title'].str.contains('Hamlet')]
hamlets['title'].value_counts() | 44 | 56 | 0.738636 | 11 | 88 | 5.818182 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034091 | 88 | 2 | 57 | 44 | 0.752941 | 0 | 0 | 0 | 0 | 0 | 0.179775 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a6a8ff6e86a9a345c3c7ef691650995c6d9fee1 | 13,408 | py | Python | lesson7.4/tensorflow/contrib/boosted_trees/python/ops/gen_training_ops.py | magnusmel/Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda | cc226deb7b46852407900f9fec0caf62638defe2 | [
"MIT"
] | 21 | 2018-12-11T20:07:47.000Z | 2021-11-08T13:12:32.000Z | lesson7.4/tensorflow/contrib/boosted_trees/python/ops/gen_training_ops.py | magnusmel/Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda | cc226deb7b46852407900f9fec0caf62638defe2 | [
"MIT"
] | 1 | 2020-07-07T21:30:02.000Z | 2020-07-08T18:16:03.000Z | lesson7.4/tensorflow/contrib/boosted_trees/python/ops/gen_training_ops.py | magnusmel/Serverless-Deep-Learning-with-TensorFlow-and-AWS-Lambda | cc226deb7b46852407900f9fec0caf62638defe2 | [
"MIT"
] | 15 | 2018-12-12T02:32:28.000Z | 2021-11-05T20:40:10.000Z | """Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
Original C++ source file: gen_training_ops_py.cc
"""
import collections as _collections
from tensorflow.python.eager import execute as _execute
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
def center_tree_ensemble_bias(tree_ensemble_handle, stamp_token, next_stamp_token, delta_updates, learner_config, centering_epsilon=0.01, name=None):
r"""Centers the tree ensemble bias before adding trees based on feature splits.
Args:
tree_ensemble_handle: A `Tensor` of type `resource`.
Handle to the ensemble variable.
stamp_token: A `Tensor` of type `int64`.
Stamp token for validating operation consistency.
next_stamp_token: A `Tensor` of type `int64`.
Stamp token to be used for the next iteration.
delta_updates: A `Tensor` of type `float32`.
Rank 1 Tensor containing delta updates per bias dimension.
learner_config: A `string`.
Config for the learner of type LearnerConfig proto.
centering_epsilon: An optional `float`. Defaults to `0.01`.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `bool`.
Scalar indicating whether more centering is needed.
"""
learner_config = _execute.make_str(learner_config, "learner_config")
if centering_epsilon is None:
centering_epsilon = 0.01
centering_epsilon = _execute.make_float(centering_epsilon, "centering_epsilon")
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"CenterTreeEnsembleBias", tree_ensemble_handle=tree_ensemble_handle,
stamp_token=stamp_token, next_stamp_token=next_stamp_token,
delta_updates=delta_updates, learner_config=learner_config,
centering_epsilon=centering_epsilon, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = ("learner_config", _op.get_attr("learner_config"),
"centering_epsilon", _op.get_attr("centering_epsilon"))
else:
tree_ensemble_handle = _ops.convert_to_tensor(tree_ensemble_handle, _dtypes.resource)
stamp_token = _ops.convert_to_tensor(stamp_token, _dtypes.int64)
next_stamp_token = _ops.convert_to_tensor(next_stamp_token, _dtypes.int64)
delta_updates = _ops.convert_to_tensor(delta_updates, _dtypes.float32)
_inputs_flat = [tree_ensemble_handle, stamp_token, next_stamp_token, delta_updates]
_attrs = ("learner_config", learner_config, "centering_epsilon",
centering_epsilon)
_result = _execute.execute(b"CenterTreeEnsembleBias", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"CenterTreeEnsembleBias", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_ops.RegisterShape("CenterTreeEnsembleBias")(None)
def grow_tree_ensemble(tree_ensemble_handle, stamp_token, next_stamp_token, learning_rate, dropout_seed, partition_ids, gains, splits, learner_config, center_bias, name=None):
r"""Grows the tree ensemble by either adding a layer to the last tree being grown
or by starting a new tree.
Args:
tree_ensemble_handle: A `Tensor` of type `resource`.
Handle to the ensemble variable.
stamp_token: A `Tensor` of type `int64`.
Stamp token for validating operation consistency.
next_stamp_token: A `Tensor` of type `int64`.
Stamp token to be used for the next iteration.
learning_rate: A `Tensor` of type `float32`. Scalar learning rate.
dropout_seed: A `Tensor` of type `int64`.
partition_ids: A list of `Tensor` objects with type `int32`.
List of Rank 1 Tensors containing partition Id per candidate.
gains: A list with the same length as `partition_ids` of `Tensor` objects with type `float32`.
List of Rank 1 Tensors containing gains per candidate.
splits: A list with the same length as `partition_ids` of `Tensor` objects with type `string`.
List of Rank 1 Tensors containing serialized SplitInfo protos per candidate.
learner_config: A `string`.
Config for the learner of type LearnerConfig proto.
center_bias: A `bool`.
name: A name for the operation (optional).
Returns:
The created Operation.
"""
if not isinstance(partition_ids, (list, tuple)):
raise TypeError(
"Expected list for 'partition_ids' argument to "
"'grow_tree_ensemble' Op, not %r." % partition_ids)
_attr_num_handlers = len(partition_ids)
if not isinstance(gains, (list, tuple)):
raise TypeError(
"Expected list for 'gains' argument to "
"'grow_tree_ensemble' Op, not %r." % gains)
if len(gains) != _attr_num_handlers:
raise ValueError(
"List argument 'gains' to 'grow_tree_ensemble' Op with length %d "
"must match length %d of argument 'partition_ids'." %
(len(gains), _attr_num_handlers))
if not isinstance(splits, (list, tuple)):
raise TypeError(
"Expected list for 'splits' argument to "
"'grow_tree_ensemble' Op, not %r." % splits)
if len(splits) != _attr_num_handlers:
raise ValueError(
"List argument 'splits' to 'grow_tree_ensemble' Op with length %d "
"must match length %d of argument 'partition_ids'." %
(len(splits), _attr_num_handlers))
learner_config = _execute.make_str(learner_config, "learner_config")
center_bias = _execute.make_bool(center_bias, "center_bias")
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"GrowTreeEnsemble", tree_ensemble_handle=tree_ensemble_handle,
stamp_token=stamp_token, next_stamp_token=next_stamp_token,
learning_rate=learning_rate, dropout_seed=dropout_seed,
partition_ids=partition_ids, gains=gains, splits=splits,
learner_config=learner_config, center_bias=center_bias, name=name)
return _op
else:
tree_ensemble_handle = _ops.convert_to_tensor(tree_ensemble_handle, _dtypes.resource)
stamp_token = _ops.convert_to_tensor(stamp_token, _dtypes.int64)
next_stamp_token = _ops.convert_to_tensor(next_stamp_token, _dtypes.int64)
learning_rate = _ops.convert_to_tensor(learning_rate, _dtypes.float32)
dropout_seed = _ops.convert_to_tensor(dropout_seed, _dtypes.int64)
partition_ids = _ops.convert_n_to_tensor(partition_ids, _dtypes.int32)
gains = _ops.convert_n_to_tensor(gains, _dtypes.float32)
splits = _ops.convert_n_to_tensor(splits, _dtypes.string)
_inputs_flat = [tree_ensemble_handle, stamp_token, next_stamp_token, learning_rate, dropout_seed] + list(partition_ids) + list(gains) + list(splits)
_attrs = ("learner_config", learner_config, "num_handlers",
_attr_num_handlers, "center_bias", center_bias)
_result = _execute.execute(b"GrowTreeEnsemble", 0, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
return _result
_ops.RegisterShape("GrowTreeEnsemble")(None)
_tree_ensemble_stats_outputs = ["num_trees", "num_layers", "active_tree",
"active_layer", "attempted_trees",
"attempted_layers"]
_TreeEnsembleStatsOutput = _collections.namedtuple(
"TreeEnsembleStats", _tree_ensemble_stats_outputs)
def tree_ensemble_stats(tree_ensemble_handle, stamp_token, name=None):
r"""Retrieves stats related to the tree ensemble.
Args:
tree_ensemble_handle: A `Tensor` of type `resource`.
Handle to the ensemble variable.
stamp_token: A `Tensor` of type `int64`.
Stamp token for validating operation consistency.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (num_trees, num_layers, active_tree, active_layer, attempted_trees, attempted_layers).
num_trees: A `Tensor` of type `int64`. Scalar indicating the number of finalized trees in the ensemble.
num_layers: A `Tensor` of type `int64`. Scalar indicating the number of layers in the ensemble.
active_tree: A `Tensor` of type `int64`. Scalar indicating the active tree being trained.
active_layer: A `Tensor` of type `int64`. Scalar indicating the active layer being trained.
attempted_trees: A `Tensor` of type `int64`.
attempted_layers: A `Tensor` of type `int64`.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"TreeEnsembleStats", tree_ensemble_handle=tree_ensemble_handle,
stamp_token=stamp_token, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
tree_ensemble_handle = _ops.convert_to_tensor(tree_ensemble_handle, _dtypes.resource)
stamp_token = _ops.convert_to_tensor(stamp_token, _dtypes.int64)
_inputs_flat = [tree_ensemble_handle, stamp_token]
_attrs = None
_result = _execute.execute(b"TreeEnsembleStats", 6, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"TreeEnsembleStats", _inputs_flat, _attrs, _result, name)
_result = _TreeEnsembleStatsOutput._make(_result)
return _result
_ops.RegisterShape("TreeEnsembleStats")(None)
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "CenterTreeEnsembleBias"
# input_arg {
# name: "tree_ensemble_handle"
# type: DT_RESOURCE
# }
# input_arg {
# name: "stamp_token"
# type: DT_INT64
# }
# input_arg {
# name: "next_stamp_token"
# type: DT_INT64
# }
# input_arg {
# name: "delta_updates"
# type: DT_FLOAT
# }
# output_arg {
# name: "continue_centering"
# type: DT_BOOL
# }
# attr {
# name: "learner_config"
# type: "string"
# }
# attr {
# name: "centering_epsilon"
# type: "float"
# default_value {
# f: 0.01
# }
# }
# is_stateful: true
# }
# op {
# name: "GrowTreeEnsemble"
# input_arg {
# name: "tree_ensemble_handle"
# type: DT_RESOURCE
# }
# input_arg {
# name: "stamp_token"
# type: DT_INT64
# }
# input_arg {
# name: "next_stamp_token"
# type: DT_INT64
# }
# input_arg {
# name: "learning_rate"
# type: DT_FLOAT
# }
# input_arg {
# name: "dropout_seed"
# type: DT_INT64
# }
# input_arg {
# name: "partition_ids"
# type: DT_INT32
# number_attr: "num_handlers"
# }
# input_arg {
# name: "gains"
# type: DT_FLOAT
# number_attr: "num_handlers"
# }
# input_arg {
# name: "splits"
# type: DT_STRING
# number_attr: "num_handlers"
# }
# attr {
# name: "learner_config"
# type: "string"
# }
# attr {
# name: "num_handlers"
# type: "int"
# has_minimum: true
# }
# attr {
# name: "center_bias"
# type: "bool"
# }
# is_stateful: true
# }
# op {
# name: "TreeEnsembleStats"
# input_arg {
# name: "tree_ensemble_handle"
# type: DT_RESOURCE
# }
# input_arg {
# name: "stamp_token"
# type: DT_INT64
# }
# output_arg {
# name: "num_trees"
# type: DT_INT64
# }
# output_arg {
# name: "num_layers"
# type: DT_INT64
# }
# output_arg {
# name: "active_tree"
# type: DT_INT64
# }
# output_arg {
# name: "active_layer"
# type: DT_INT64
# }
# output_arg {
# name: "attempted_trees"
# type: DT_INT64
# }
# output_arg {
# name: "attempted_layers"
# type: DT_INT64
# }
# is_stateful: true
# }
_op_def_lib = _InitOpDefLibrary(b"\n\304\001\n\026CenterTreeEnsembleBias\022\030\n\024tree_ensemble_handle\030\024\022\017\n\013stamp_token\030\t\022\024\n\020next_stamp_token\030\t\022\021\n\rdelta_updates\030\001\032\026\n\022continue_centering\030\n\"\030\n\016learner_config\022\006string\"!\n\021centering_epsilon\022\005float\032\005%\n\327#<\210\001\001\n\225\002\n\020GrowTreeEnsemble\022\030\n\024tree_ensemble_handle\030\024\022\017\n\013stamp_token\030\t\022\024\n\020next_stamp_token\030\t\022\021\n\rlearning_rate\030\001\022\020\n\014dropout_seed\030\t\022\037\n\rpartition_ids\030\003*\014num_handlers\022\027\n\005gains\030\001*\014num_handlers\022\030\n\006splits\030\007*\014num_handlers\"\030\n\016learner_config\022\006string\"\025\n\014num_handlers\022\003int(\001\"\023\n\013center_bias\022\004bool\210\001\001\n\256\001\n\021TreeEnsembleStats\022\030\n\024tree_ensemble_handle\030\024\022\017\n\013stamp_token\030\t\032\r\n\tnum_trees\030\t\032\016\n\nnum_layers\030\t\032\017\n\013active_tree\030\t\032\020\n\014active_layer\030\t\032\023\n\017attempted_trees\030\t\032\024\n\020attempted_layers\030\t\210\001\001")
| 39.551622 | 1,139 | 0.711665 | 1,802 | 13,408 | 4.966149 | 0.145394 | 0.05252 | 0.048274 | 0.026148 | 0.590792 | 0.519611 | 0.481953 | 0.416583 | 0.379819 | 0.344955 | 0 | 0.044942 | 0.186829 | 13,408 | 338 | 1,140 | 39.668639 | 0.775842 | 0.371495 | 0 | 0.335878 | 1 | 0.015267 | 0.195373 | 0.09279 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030534 | false | 0 | 0.083969 | 0 | 0.152672 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6a4f450d5559b5a3c1655e081f3e828c985fe7fb | 700 | py | Python | desktop/core/ext-py/nose-1.3.7/functional_tests/doc_tests/test_issue119/test_zeronine.py | kokosing/hue | 2307f5379a35aae9be871e836432e6f45138b3d9 | [
"Apache-2.0"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | desktop/core/ext-py/nose-1.3.7/functional_tests/doc_tests/test_issue119/test_zeronine.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | desktop/core/ext-py/nose-1.3.7/functional_tests/doc_tests/test_issue119/test_zeronine.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | import os
import unittest
from nose.plugins import Plugin
from nose.plugins.plugintest import PluginTester
from nose.plugins.manager import ZeroNinePlugin
here = os.path.abspath(os.path.dirname(__file__))
support = os.path.join(os.path.dirname(os.path.dirname(here)), 'support')
class EmptyPlugin(Plugin):
pass
class TestEmptyPlugin(PluginTester, unittest.TestCase):
activate = '--with-empty'
plugins = [ZeroNinePlugin(EmptyPlugin())]
suitepath = os.path.join(here, 'empty_plugin.rst')
def test_empty_zero_nine_does_not_crash(self):
print self.output
assert "'EmptyPlugin' object has no attribute 'loadTestsFromPath'" \
not in self.output
| 25.925926 | 76 | 0.732857 | 87 | 700 | 5.770115 | 0.517241 | 0.071713 | 0.089641 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164286 | 700 | 26 | 77 | 26.923077 | 0.85812 | 0 | 0 | 0 | 0 | 0 | 0.131617 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 0 | null | null | 0.058824 | 0.294118 | null | null | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
6a509393a4dbf1961515c1c6bef26de4dd386ac6 | 519 | py | Python | task_1/Test_ReadMatrix.py | Hickey3197/educoder | bf45cef420c7b1f1d052cb108e9be8e14a724068 | [
"Apache-2.0"
] | null | null | null | task_1/Test_ReadMatrix.py | Hickey3197/educoder | bf45cef420c7b1f1d052cb108e9be8e14a724068 | [
"Apache-2.0"
] | null | null | null | task_1/Test_ReadMatrix.py | Hickey3197/educoder | bf45cef420c7b1f1d052cb108e9be8e14a724068 | [
"Apache-2.0"
] | null | null | null | import numpy as np
import CreateMatrix
import WriteMatrix
import ReadMatrix
if __name__ == '__main__':
test = input().split(' ')
length,rows,cols=list(map(int, test))
matrix_list = CreateMatrix.createMatrixList(length,rows,cols)
WriteMatrix.writeMatrixList('/data/workspace/myshixun/creatematrix/data/data.idx3.ubyte',matrix_list)
matrix_list = ReadMatrix.readMatrixFromFile('/data/workspace/myshixun/creatematrix/data/data.idx3.ubyte')
print(type(matrix_list))
print(matrix_list) | 43.25 | 110 | 0.753372 | 61 | 519 | 6.196721 | 0.491803 | 0.132275 | 0.074074 | 0.174603 | 0.26455 | 0.26455 | 0.26455 | 0.26455 | 0 | 0 | 0 | 0.004425 | 0.129094 | 519 | 12 | 111 | 43.25 | 0.831858 | 0 | 0 | 0 | 0 | 0 | 0.24558 | 0.227898 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
6a50a109df5bca7b1e43c0b21bfde5707190bcfd | 1,311 | py | Python | electrum_blk/tests/__init__.py | nedcloud-blackchain/electrum-blk | bf1992ecac9fffcc52e229e249da400b8751324e | [
"MIT"
] | 2 | 2022-03-09T18:21:02.000Z | 2022-03-13T13:27:07.000Z | electrum_blk/tests/__init__.py | nedcloud-blackchain/electrum-blk | bf1992ecac9fffcc52e229e249da400b8751324e | [
"MIT"
] | null | null | null | electrum_blk/tests/__init__.py | nedcloud-blackchain/electrum-blk | bf1992ecac9fffcc52e229e249da400b8751324e | [
"MIT"
] | 1 | 2022-02-21T07:38:29.000Z | 2022-02-21T07:38:29.000Z | import unittest
import threading
import tempfile
import shutil
import electrum_blk as electrum
import electrum_blk.logging
from electrum_blk import constants
# Set this locally to make the test suite run faster.
# If set, unit tests that would normally test functions with multiple implementations,
# will only be run once, using the fastest implementation.
# e.g. libsecp256k1 vs python-ecdsa. pycryptodomex vs pyaes.
FAST_TESTS = False
electrum.logging._configure_stderr_logging()
# some unit tests are modifying globals...
class SequentialTestCase(unittest.TestCase):
test_lock = threading.Lock()
def setUp(self):
super().setUp()
self.test_lock.acquire()
def tearDown(self):
super().tearDown()
self.test_lock.release()
class ElectrumTestCase(SequentialTestCase):
"""Base class for our unit tests."""
def setUp(self):
super().setUp()
self.electrum_path = tempfile.mkdtemp()
def tearDown(self):
super().tearDown()
shutil.rmtree(self.electrum_path)
class TestCaseForTestnet(ElectrumTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
constants.set_testnet()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
constants.set_mainnet()
| 22.603448 | 86 | 0.705568 | 152 | 1,311 | 5.993421 | 0.519737 | 0.039517 | 0.037322 | 0.037322 | 0.118551 | 0.05708 | 0 | 0 | 0 | 0 | 0 | 0.003835 | 0.204424 | 1,311 | 57 | 87 | 23 | 0.869607 | 0.247902 | 0 | 0.30303 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017544 | 0 | 1 | 0.181818 | false | 0 | 0.212121 | 0 | 0.515152 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6a5b25dc4f072646fbd6aeca82813d1aed80c3fe | 986 | py | Python | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewColumnStateChangedEventArgs.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewColumnStateChangedEventArgs.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/System/Windows/Forms/__init___parts/DataGridViewColumnStateChangedEventArgs.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | class DataGridViewColumnStateChangedEventArgs(EventArgs):
"""
Provides data for the System.Windows.Forms.DataGridView.ColumnStateChanged event.
DataGridViewColumnStateChangedEventArgs(dataGridViewColumn: DataGridViewColumn,stateChanged: DataGridViewElementStates)
"""
@staticmethod
def __new__(self, dataGridViewColumn, stateChanged):
""" __new__(cls: type,dataGridViewColumn: DataGridViewColumn,stateChanged: DataGridViewElementStates) """
pass
Column = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Gets the column whose state changed.
Get: Column(self: DataGridViewColumnStateChangedEventArgs) -> DataGridViewColumn
"""
StateChanged = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Gets the new column state.
Get: StateChanged(self: DataGridViewColumnStateChangedEventArgs) -> DataGridViewElementStates
"""
| 25.947368 | 121 | 0.721095 | 77 | 986 | 9.12987 | 0.441558 | 0.085349 | 0.136558 | 0.207681 | 0.170697 | 0.170697 | 0.170697 | 0.170697 | 0.170697 | 0.170697 | 0 | 0 | 0.191684 | 986 | 37 | 122 | 26.648649 | 0.882058 | 0.306288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.125 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
6a63cf0a3c01bcf631ccf8fe7452c54103a3e875 | 758 | py | Python | {{ cookiecutter.repo_name }}/{{ cookiecutter.module_name }}/tests/test_ci.py | farhanreynaldo/easydata | e168538e36b84cca11c6a0c2a8a78fbe824b3a72 | [
"MIT"
] | null | null | null | {{ cookiecutter.repo_name }}/{{ cookiecutter.module_name }}/tests/test_ci.py | farhanreynaldo/easydata | e168538e36b84cca11c6a0c2a8a78fbe824b3a72 | [
"MIT"
] | null | null | null | {{ cookiecutter.repo_name }}/{{ cookiecutter.module_name }}/tests/test_ci.py | farhanreynaldo/easydata | e168538e36b84cca11c6a0c2a8a78fbe824b3a72 | [
"MIT"
] | null | null | null | ## Test dataset information
import logging
import unittest
from {{ cookiecutter.module_name }}.data import Dataset
from {{ cookiecutter.module_name }} import workflow
from {{ cookiecutter.module_name }}.log import logger
import {{ cookiecutter.module_name }}.log.debug
class TestDatasetsSmall(unittest.TestCase):
"""
Basic smoke tests to ensure that the smaller (and more quickly processed)
available datasets load and have some expected property.
"""
def test_20_newsgroups(self):
ds = Dataset.load('20_newsgroups')
ds = Dataset.load('20_newsgroups')
assert len(ds.data) == 18846
assert len(ds.target) == 18846
def test_logging_is_debug_level():
assert logger.getEffectiveLevel() == logging.DEBUG
| 31.583333 | 77 | 0.724274 | 93 | 758 | 5.774194 | 0.526882 | 0.134078 | 0.163873 | 0.145251 | 0.09311 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025723 | 0.17942 | 758 | 23 | 78 | 32.956522 | 0.837621 | 0.031662 | 0 | 0.142857 | 0 | 0 | 0.04475 | 0 | 0 | 0 | 0 | 0 | 0.214286 | 0 | null | null | 0 | 0.428571 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
6a6d990ca0490c457536cf7ca29cf2405f69008d | 2,175 | py | Python | lib/ansiblelint/rules/EnvVarsInCommandRule.py | rvben/ansible-lint | 88e9114eeaff9758dfe491f93aa5a647969fdf51 | [
"MIT"
] | 1 | 2019-04-15T16:27:31.000Z | 2019-04-15T16:27:31.000Z | lib/ansiblelint/rules/EnvVarsInCommandRule.py | ellerbrock/ansible-lint | 88e9114eeaff9758dfe491f93aa5a647969fdf51 | [
"MIT"
] | null | null | null | lib/ansiblelint/rules/EnvVarsInCommandRule.py | ellerbrock/ansible-lint | 88e9114eeaff9758dfe491f93aa5a647969fdf51 | [
"MIT"
] | null | null | null | # Copyright (c) 2016 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
from ansiblelint.utils import LINE_NUMBER_KEY, FILENAME_KEY
class EnvVarsInCommandRule(AnsibleLintRule):
id = '304'
shortdesc = "Environment variables don't work as part of command"
description = 'Environment variables should be passed to shell or ' \
'command through environment argument'
tags = ['command-shell', 'bug', 'ANSIBLE0014']
expected_args = ['chdir', 'creates', 'executable', 'removes', 'stdin', 'warn',
'cmd', '__ansible_module__', '__ansible_arguments__',
LINE_NUMBER_KEY, FILENAME_KEY]
def matchtask(self, file, task):
if task["action"]["__ansible_module__"] in ['shell', 'command']:
if 'cmd' in task['action']:
first_cmd_arg = task['action']['cmd'].split()[0]
else:
first_cmd_arg = task['action']['__ansible_arguments__'][0]
return any([arg not in self.expected_args for arg in task['action']] +
["=" in first_cmd_arg])
| 49.431818 | 82 | 0.698851 | 288 | 2,175 | 5.159722 | 0.517361 | 0.059219 | 0.022207 | 0.028264 | 0.060565 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007607 | 0.214253 | 2,175 | 43 | 83 | 50.581395 | 0.861908 | 0.492414 | 0 | 0 | 0 | 0 | 0.309963 | 0.038745 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0.052632 | 0.105263 | 0 | 0.526316 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 2 |
6a720944250b29aaa478b4f30dd7c339c870d400 | 2,159 | py | Python | nnvm/python/nnvm/top/registry.py | coderzbx/seg-mxnet | a3ef65a1991d59c23b6111048fdc16d38e2862af | [
"Apache-2.0"
] | 4 | 2018-09-06T22:45:03.000Z | 2018-09-07T05:48:34.000Z | nnvm/python/nnvm/top/registry.py | coderzbx/seg-mxnet | a3ef65a1991d59c23b6111048fdc16d38e2862af | [
"Apache-2.0"
] | null | null | null | nnvm/python/nnvm/top/registry.py | coderzbx/seg-mxnet | a3ef65a1991d59c23b6111048fdc16d38e2862af | [
"Apache-2.0"
] | 1 | 2018-05-26T02:44:42.000Z | 2018-05-26T02:44:42.000Z | # pylint: disable=invalid-name
"""Information registry to register operator information for compiler"""
import tvm
class OpPattern(object):
"""Operator generic patterns
See Also
--------
top.tag : Contains explanation of the tag type.
"""
# Elementwise operator
ELEMWISE = 0
# Broadcast operator
BROADCAST = 1
# Injective mapping
INJECTIVE = 2
# Comunication
COMM_REDUCE = 3
# Complex op, can still fuse ewise into it
OUT_ELEMWISE_FUSABLE = 4
# Not fusable opaque op
OPAQUE = 8
_register_compute = tvm.get_global_func("nnvm._register_compute")
_register_schedule = tvm.get_global_func("nnvm._register_schedule")
_register_pattern = tvm.get_global_func("nnvm._register_pattern")
def register_compute(op_name, f=None, level=10):
"""Register compute function for operator
Parameters
----------
op_name : str
The name of operator
f : function
The schedule function
level : int
The priority level
Returns
-------
fregister : function
Register function if f is not specified.
"""
def register(myf):
"""internal register function"""
_register_compute(op_name, myf, level)
return myf
return register(f) if f else register
def register_schedule(op_name, f=None, level=10):
"""Register schedule function for operator
Parameters
----------
op_name : str
The name of operator
f : function
The schedule function
level : int
The priority level
Returns
-------
fregister : function
Register function if f is not specified.
"""
def register(myf):
"""internal register function"""
_register_schedule(op_name, myf, level)
return myf
return register(f) if f else register
def register_pattern(op_name, pattern, level=10):
"""Register pattern code for operator
Parameters
----------
op_name : str
The name of operator
pattern : int
The pattern code.
level : int
The priority level
"""
_register_pattern(op_name, pattern, level)
| 22.489583 | 72 | 0.639185 | 254 | 2,159 | 5.279528 | 0.314961 | 0.040268 | 0.026846 | 0.035794 | 0.606264 | 0.588367 | 0.482476 | 0.443699 | 0.443699 | 0.443699 | 0 | 0.007624 | 0.270959 | 2,159 | 95 | 73 | 22.726316 | 0.844346 | 0.494673 | 0 | 0.26087 | 0 | 0 | 0.075366 | 0.075366 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0.043478 | 0 | 0.73913 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6a73e5636359b0f2880d1517343a6ec1581baa3c | 8,104 | py | Python | testcases/elichika_tests/model/EspNet_AttLoc.py | vermashresth/chainer-compiler | 5f5ad365d14398d6ae0214fa012eb10360db8e7e | [
"MIT"
] | null | null | null | testcases/elichika_tests/model/EspNet_AttLoc.py | vermashresth/chainer-compiler | 5f5ad365d14398d6ae0214fa012eb10360db8e7e | [
"MIT"
] | null | null | null | testcases/elichika_tests/model/EspNet_AttLoc.py | vermashresth/chainer-compiler | 5f5ad365d14398d6ae0214fa012eb10360db8e7e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# AttLoc from EspNet's e2e_asr.py.
#
import argparse
import datetime
import logging
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
import chainer.links as L
from chainer import training
from chainer.training import extensions
from testcases.elichika_tests.utils import sequence_utils
# TODO(kan-bayashi): no need to use linear tensor
def linear_tensor(linear, x):
'''Apply linear matrix operation only for the last dimension of a tensor
:param Link linear: Linear link (M x N matrix)
:param Variable x: Tensor (D_1 x D_2 x ... x M matrix)
:return:
:param Variable y: Tensor (D_1 x D_2 x ... x N matrix)
'''
y = linear(F.reshape(x, (-1, x.shape[-1])))
return F.reshape(y, (x.shape[:-1] + (-1,)))
# EDIT(hamaji): Use linear_tensor_3d.
def linear_tensor_3d(linear, x):
'''Apply linear matrix operation only for the last dimension of a tensor
:param Link linear: Linear link (M x N matrix)
:param Variable x: Tensor (D_1 x D_2 x M matrix)
:return:
:param Variable y: Tensor (D_1 x D_2 x N matrix)
'''
return linear(x, n_batch_axes=2)
# location based attention
class AttLoc(chainer.Chain):
def __init__(self, eprojs, dunits, att_dim, aconv_chans, aconv_filts):
super(AttLoc, self).__init__()
with self.init_scope():
self.mlp_enc = L.Linear(eprojs, att_dim)
self.mlp_dec = L.Linear(dunits, att_dim, nobias=True)
self.mlp_att = L.Linear(aconv_chans, att_dim, nobias=True)
self.loc_conv = L.Convolution2D(1, aconv_chans, ksize=(
1, 2 * aconv_filts + 1), pad=(0, aconv_filts))
self.gvec = L.Linear(att_dim, 1)
self.dunits = dunits
self.eprojs = eprojs
self.att_dim = att_dim
self.h_length = None
self.enc_h = None
self.pre_compute_enc_h = None
self.aconv_chans = aconv_chans
def reset(self):
'''reset states
:return:
'''
self.h_length = None
self.enc_h = None
self.pre_compute_enc_h = None
def forward(self, enc_hs, dec_z, att_prev):
'''AttLoc forward
:param enc_hs:
:param dec_z:
:param att_prev:
:param scaling:
:return:
'''
# EDIT(hamaji): scaling is now a local variable.
scaling = 2.0
batch = len(enc_hs)
# pre-compute all h outside the decoder loop
if self.pre_compute_enc_h is None:
self.enc_h = F.pad_sequence(enc_hs) # utt x frame x hdim
self.h_length = self.enc_h.shape[1]
# utt x frame x att_dim
self.pre_compute_enc_h = linear_tensor_3d(self.mlp_enc, self.enc_h)
if dec_z is None:
dec_z = chainer.Variable(self.xp.zeros(
(batch, self.dunits), dtype=np.float32))
else:
dec_z = F.reshape(dec_z, (batch, self.dunits))
# initialize attention weight with uniform dist.
if att_prev is None:
att_prev = [self.xp.full(
hh.shape[0], 1.0 / hh.shape[0], dtype=np.float32) for hh in enc_hs]
att_prev = [chainer.Variable(att) for att in att_prev]
att_prev = F.pad_sequence(att_prev)
# TODO(watanabe) use <chainer variable>.reshpae(), instead of F.reshape()
# att_prev: utt x frame -> utt x 1 x 1 x frame -> utt x att_conv_chans x 1 x frame
att_conv = self.loc_conv(
F.reshape(att_prev, (batch, 1, 1, self.h_length)))
# att_conv: utt x att_conv_chans x 1 x frame -> utt x frame x att_conv_chans
att_conv = F.swapaxes(F.squeeze(att_conv, axis=2), 1, 2)
# att_conv: utt x frame x att_conv_chans -> utt x frame x att_dim
att_conv = linear_tensor_3d(self.mlp_att, att_conv)
# dec_z_tiled: utt x frame x att_dim
dec_z_tiled = F.broadcast_to(
F.expand_dims(self.mlp_dec(dec_z), 1), self.pre_compute_enc_h.shape)
# dot with gvec
# utt x frame x att_dim -> utt x frame
# TODO(watanabe) use batch_matmul
e = F.squeeze(linear_tensor_3d(self.gvec, F.tanh(
att_conv + self.pre_compute_enc_h + dec_z_tiled)), axis=2)
# Applying a minus-large-number filter to make a probability value zero for a padded area
# simply degrades the performance, and I gave up this implementation
# Apply a scaling to make an attention sharp
w = F.softmax(scaling * e)
# weighted sum over flames
# utt x hdim
c = F.sum(self.enc_h * F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape), axis=1)
return c, w
def original(self, enc_hs, dec_z, att_prev, scaling=2.0):
'''AttLoc forward
:param enc_hs:
:param dec_z:
:param att_prev:
:param scaling:
:return:
'''
batch = len(enc_hs)
# pre-compute all h outside the decoder loop
if self.pre_compute_enc_h is None:
self.enc_h = F.pad_sequence(enc_hs) # utt x frame x hdim
self.h_length = self.enc_h.shape[1]
# utt x frame x att_dim
self.pre_compute_enc_h = linear_tensor(self.mlp_enc, self.enc_h)
if dec_z is None:
dec_z = chainer.Variable(self.xp.zeros(
(batch, self.dunits), dtype=np.float32))
else:
dec_z = F.reshape(dec_z, (batch, self.dunits))
# initialize attention weight with uniform dist.
if att_prev is None:
att_prev = [self.xp.full(
hh.shape[0], 1.0 / hh.shape[0], dtype=np.float32) for hh in enc_hs]
att_prev = [chainer.Variable(att) for att in att_prev]
att_prev = F.pad_sequence(att_prev)
# TODO(watanabe) use <chainer variable>.reshpae(), instead of F.reshape()
# att_prev: utt x frame -> utt x 1 x 1 x frame -> utt x att_conv_chans x 1 x frame
att_conv = self.loc_conv(
F.reshape(att_prev, (batch, 1, 1, self.h_length)))
# att_conv: utt x att_conv_chans x 1 x frame -> utt x frame x att_conv_chans
att_conv = F.swapaxes(F.squeeze(att_conv, axis=2), 1, 2)
# att_conv: utt x frame x att_conv_chans -> utt x frame x att_dim
att_conv = linear_tensor(self.mlp_att, att_conv)
# dec_z_tiled: utt x frame x att_dim
dec_z_tiled = F.broadcast_to(
F.expand_dims(self.mlp_dec(dec_z), 1), self.pre_compute_enc_h.shape)
# dot with gvec
# utt x frame x att_dim -> utt x frame
# TODO(watanabe) use batch_matmul
e = F.squeeze(linear_tensor(self.gvec, F.tanh(
att_conv + self.pre_compute_enc_h + dec_z_tiled)), axis=2)
# Applying a minus-large-number filter to make a probability value zero for a padded area
# simply degrades the performance, and I gave up this implementation
# Apply a scaling to make an attention sharp
w = F.softmax(scaling * e)
# weighted sum over flames
# utt x hdim
c = F.sum(self.enc_h * F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape), axis=1)
return c, w
from chainer_compiler.elichika import testtools
def main():
import numpy as np
np.random.seed(314)
eprojs = 3
dunits = 4
att_dim = 5
batch_size = 3
sequence_length = 4
num_vocabs = 10
aconv_chans = 7
aconv_filts = 6
model_fn = lambda: AttLoc(eprojs, dunits, att_dim, aconv_chans, aconv_filts)
labels, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, num_vocabs)
xs = []
for l in ilens:
xs.append(np.random.rand(l, eprojs).astype(dtype=np.float32))
# Check if our modification is valid.
model = model_fn()
expected = model.original(xs, None, None)
model.reset()
actual = model.forward(xs, None, None)
for e, a in zip(expected, actual):
assert np.allclose(e.array, a.array)
testtools.generate_testcase(model_fn, [xs, None, None])
if __name__ == '__main__':
main()
| 34.781116 | 97 | 0.622038 | 1,268 | 8,104 | 3.77918 | 0.171136 | 0.021703 | 0.033806 | 0.029215 | 0.700751 | 0.691569 | 0.691569 | 0.683222 | 0.667154 | 0.667154 | 0 | 0.015097 | 0.280726 | 8,104 | 232 | 98 | 34.931034 | 0.806999 | 0.302813 | 0 | 0.453782 | 0 | 0 | 0.001475 | 0 | 0 | 0 | 0 | 0.012931 | 0.008403 | 1 | 0.058824 | false | 0 | 0.109244 | 0 | 0.210084 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6a77bfa00a2f36205b687fbb2714cddb08021a78 | 17,600 | py | Python | twilio/rest/api/v2010/account/available_phone_number/local.py | scotta/twilio-python | 93cf463f914f55c4c4bd1c259b834953dd81609d | [
"MIT"
] | 1 | 2021-02-24T04:59:03.000Z | 2021-02-24T04:59:03.000Z | twilio/rest/api/v2010/account/available_phone_number/local.py | scotta/twilio-python | 93cf463f914f55c4c4bd1c259b834953dd81609d | [
"MIT"
] | null | null | null | twilio/rest/api/v2010/account/available_phone_number/local.py | scotta/twilio-python | 93cf463f914f55c4c4bd1c259b834953dd81609d | [
"MIT"
] | 1 | 2018-12-09T00:53:21.000Z | 2018-12-09T00:53:21.000Z | # coding=utf-8
"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class LocalList(ListResource):
""" """
def __init__(self, version, account_sid, country_code):
"""
Initialize the LocalList
:param Version version: Version that contains the resource
:param account_sid: The 34 character string that uniquely identifies your account.
:param country_code: The ISO Country code to lookup phone numbers for.
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalList
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalList
"""
super(LocalList, self).__init__(version)
# Path Solution
self._solution = {'account_sid': account_sid, 'country_code': country_code}
self._uri = '/Accounts/{account_sid}/AvailablePhoneNumbers/{country_code}/Local.json'.format(**self._solution)
def stream(self, area_code=values.unset, contains=values.unset,
sms_enabled=values.unset, mms_enabled=values.unset,
voice_enabled=values.unset,
exclude_all_address_required=values.unset,
exclude_local_address_required=values.unset,
exclude_foreign_address_required=values.unset, beta=values.unset,
near_number=values.unset, near_lat_long=values.unset,
distance=values.unset, in_postal_code=values.unset,
in_region=values.unset, in_rate_center=values.unset,
in_lata=values.unset, in_locality=values.unset,
fax_enabled=values.unset, limit=None, page_size=None):
"""
Streams LocalInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param unicode area_code: The area_code
:param unicode contains: The contains
:param bool sms_enabled: The sms_enabled
:param bool mms_enabled: The mms_enabled
:param bool voice_enabled: The voice_enabled
:param bool exclude_all_address_required: The exclude_all_address_required
:param bool exclude_local_address_required: The exclude_local_address_required
:param bool exclude_foreign_address_required: The exclude_foreign_address_required
:param bool beta: The beta
:param unicode near_number: The near_number
:param unicode near_lat_long: The near_lat_long
:param unicode distance: The distance
:param unicode in_postal_code: The in_postal_code
:param unicode in_region: The in_region
:param unicode in_rate_center: The in_rate_center
:param unicode in_lata: The in_lata
:param unicode in_locality: The in_locality
:param bool fax_enabled: The fax_enabled
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
area_code=area_code,
contains=contains,
sms_enabled=sms_enabled,
mms_enabled=mms_enabled,
voice_enabled=voice_enabled,
exclude_all_address_required=exclude_all_address_required,
exclude_local_address_required=exclude_local_address_required,
exclude_foreign_address_required=exclude_foreign_address_required,
beta=beta,
near_number=near_number,
near_lat_long=near_lat_long,
distance=distance,
in_postal_code=in_postal_code,
in_region=in_region,
in_rate_center=in_rate_center,
in_lata=in_lata,
in_locality=in_locality,
fax_enabled=fax_enabled,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, area_code=values.unset, contains=values.unset,
sms_enabled=values.unset, mms_enabled=values.unset,
voice_enabled=values.unset, exclude_all_address_required=values.unset,
exclude_local_address_required=values.unset,
exclude_foreign_address_required=values.unset, beta=values.unset,
near_number=values.unset, near_lat_long=values.unset,
distance=values.unset, in_postal_code=values.unset,
in_region=values.unset, in_rate_center=values.unset,
in_lata=values.unset, in_locality=values.unset,
fax_enabled=values.unset, limit=None, page_size=None):
"""
Lists LocalInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param unicode area_code: The area_code
:param unicode contains: The contains
:param bool sms_enabled: The sms_enabled
:param bool mms_enabled: The mms_enabled
:param bool voice_enabled: The voice_enabled
:param bool exclude_all_address_required: The exclude_all_address_required
:param bool exclude_local_address_required: The exclude_local_address_required
:param bool exclude_foreign_address_required: The exclude_foreign_address_required
:param bool beta: The beta
:param unicode near_number: The near_number
:param unicode near_lat_long: The near_lat_long
:param unicode distance: The distance
:param unicode in_postal_code: The in_postal_code
:param unicode in_region: The in_region
:param unicode in_rate_center: The in_rate_center
:param unicode in_lata: The in_lata
:param unicode in_locality: The in_locality
:param bool fax_enabled: The fax_enabled
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance]
"""
return list(self.stream(
area_code=area_code,
contains=contains,
sms_enabled=sms_enabled,
mms_enabled=mms_enabled,
voice_enabled=voice_enabled,
exclude_all_address_required=exclude_all_address_required,
exclude_local_address_required=exclude_local_address_required,
exclude_foreign_address_required=exclude_foreign_address_required,
beta=beta,
near_number=near_number,
near_lat_long=near_lat_long,
distance=distance,
in_postal_code=in_postal_code,
in_region=in_region,
in_rate_center=in_rate_center,
in_lata=in_lata,
in_locality=in_locality,
fax_enabled=fax_enabled,
limit=limit,
page_size=page_size,
))
def page(self, area_code=values.unset, contains=values.unset,
sms_enabled=values.unset, mms_enabled=values.unset,
voice_enabled=values.unset, exclude_all_address_required=values.unset,
exclude_local_address_required=values.unset,
exclude_foreign_address_required=values.unset, beta=values.unset,
near_number=values.unset, near_lat_long=values.unset,
distance=values.unset, in_postal_code=values.unset,
in_region=values.unset, in_rate_center=values.unset,
in_lata=values.unset, in_locality=values.unset,
fax_enabled=values.unset, page_token=values.unset,
page_number=values.unset, page_size=values.unset):
"""
Retrieve a single page of LocalInstance records from the API.
Request is executed immediately
:param unicode area_code: The area_code
:param unicode contains: The contains
:param bool sms_enabled: The sms_enabled
:param bool mms_enabled: The mms_enabled
:param bool voice_enabled: The voice_enabled
:param bool exclude_all_address_required: The exclude_all_address_required
:param bool exclude_local_address_required: The exclude_local_address_required
:param bool exclude_foreign_address_required: The exclude_foreign_address_required
:param bool beta: The beta
:param unicode near_number: The near_number
:param unicode near_lat_long: The near_lat_long
:param unicode distance: The distance
:param unicode in_postal_code: The in_postal_code
:param unicode in_region: The in_region
:param unicode in_rate_center: The in_rate_center
:param unicode in_lata: The in_lata
:param unicode in_locality: The in_locality
:param bool fax_enabled: The fax_enabled
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
"""
params = values.of({
'AreaCode': area_code,
'Contains': contains,
'SmsEnabled': sms_enabled,
'MmsEnabled': mms_enabled,
'VoiceEnabled': voice_enabled,
'ExcludeAllAddressRequired': exclude_all_address_required,
'ExcludeLocalAddressRequired': exclude_local_address_required,
'ExcludeForeignAddressRequired': exclude_foreign_address_required,
'Beta': beta,
'NearNumber': near_number,
'NearLatLong': near_lat_long,
'Distance': distance,
'InPostalCode': in_postal_code,
'InRegion': in_region,
'InRateCenter': in_rate_center,
'InLata': in_lata,
'InLocality': in_locality,
'FaxEnabled': fax_enabled,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return LocalPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of LocalInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return LocalPage(self._version, response, self._solution)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.LocalList>'
class LocalPage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the LocalPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The 34 character string that uniquely identifies your account.
:param country_code: The ISO Country code to lookup phone numbers for.
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalPage
"""
super(LocalPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of LocalInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
"""
return LocalInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
country_code=self._solution['country_code'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.LocalPage>'
class LocalInstance(InstanceResource):
""" """
def __init__(self, version, payload, account_sid, country_code):
"""
Initialize the LocalInstance
:returns: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.local.LocalInstance
"""
super(LocalInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'friendly_name': payload['friendly_name'],
'phone_number': payload['phone_number'],
'lata': payload['lata'],
'locality': payload['locality'],
'rate_center': payload['rate_center'],
'latitude': deserialize.decimal(payload['latitude']),
'longitude': deserialize.decimal(payload['longitude']),
'region': payload['region'],
'postal_code': payload['postal_code'],
'iso_country': payload['iso_country'],
'address_requirements': payload['address_requirements'],
'beta': payload['beta'],
'capabilities': payload['capabilities'],
}
# Context
self._context = None
self._solution = {'account_sid': account_sid, 'country_code': country_code}
@property
def friendly_name(self):
"""
:returns: The friendly_name
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def phone_number(self):
"""
:returns: The phone_number
:rtype: unicode
"""
return self._properties['phone_number']
@property
def lata(self):
"""
:returns: The lata
:rtype: unicode
"""
return self._properties['lata']
@property
def locality(self):
"""
:returns: The locality
:rtype: unicode
"""
return self._properties['locality']
@property
def rate_center(self):
"""
:returns: The rate_center
:rtype: unicode
"""
return self._properties['rate_center']
@property
def latitude(self):
"""
:returns: The latitude
:rtype: unicode
"""
return self._properties['latitude']
@property
def longitude(self):
"""
:returns: The longitude
:rtype: unicode
"""
return self._properties['longitude']
@property
def region(self):
"""
:returns: The region
:rtype: unicode
"""
return self._properties['region']
@property
def postal_code(self):
"""
:returns: The postal_code
:rtype: unicode
"""
return self._properties['postal_code']
@property
def iso_country(self):
"""
:returns: The iso_country
:rtype: unicode
"""
return self._properties['iso_country']
@property
def address_requirements(self):
"""
:returns: The address_requirements
:rtype: unicode
"""
return self._properties['address_requirements']
@property
def beta(self):
"""
:returns: The beta
:rtype: bool
"""
return self._properties['beta']
@property
def capabilities(self):
"""
:returns: The capabilities
:rtype: unicode
"""
return self._properties['capabilities']
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.LocalInstance>'
| 38.012959 | 118 | 0.640682 | 1,986 | 17,600 | 5.416918 | 0.106747 | 0.058282 | 0.018126 | 0.032534 | 0.692508 | 0.64947 | 0.63971 | 0.633575 | 0.621677 | 0.612103 | 0 | 0.006477 | 0.280682 | 17,600 | 462 | 119 | 38.095238 | 0.843286 | 0.407045 | 0 | 0.419192 | 1 | 0 | 0.099685 | 0.027003 | 0 | 0 | 0 | 0 | 0 | 1 | 0.121212 | false | 0 | 0.025253 | 0 | 0.267677 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6a7c74cf41954b62339ab3c8e3bcd0b900237231 | 341 | py | Python | frappe/www/robots.py | chentaoz/frappe | ee3c4943bf6177ad3b410cdb0d802af486751a65 | [
"MIT"
] | 5 | 2017-09-12T15:56:31.000Z | 2022-03-09T13:50:21.000Z | frappe/www/robots.py | chentaoz/frappe | ee3c4943bf6177ad3b410cdb0d802af486751a65 | [
"MIT"
] | 212 | 2017-08-16T13:03:18.000Z | 2020-10-06T12:26:21.000Z | frappe/www/robots.py | chentaoz/frappe | ee3c4943bf6177ad3b410cdb0d802af486751a65 | [
"MIT"
] | 14 | 2020-11-04T11:22:44.000Z | 2022-02-01T20:59:37.000Z | from __future__ import unicode_literals
import frappe
base_template_path = "templates/www/robots.txt"
def get_context(context):
robots_txt = (
frappe.db.get_single_value('Website Settings', 'robots_txt') or
(frappe.local.conf.robots_txt and frappe.read_file(frappe.local.conf.robots_txt)) or '')
return { 'robots_txt': robots_txt }
| 28.416667 | 90 | 0.777126 | 50 | 341 | 4.96 | 0.56 | 0.254032 | 0.08871 | 0.169355 | 0.193548 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108504 | 341 | 11 | 91 | 31 | 0.815789 | 0 | 0 | 0 | 0 | 0 | 0.175953 | 0.070381 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0.25 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6a8070f53425f61a84d8414612a0ea546e5cfed8 | 2,090 | py | Python | geoflow1D/PhysicalPropertiesModule.py | HerminioTH/GeoFlow1D | 44a5c11e3297827b265c1ea44bb18256b074fa66 | [
"MIT"
] | 2 | 2020-02-10T11:23:16.000Z | 2020-07-01T20:28:57.000Z | geoflow1D/PhysicalPropertiesModule.py | HerminioTH/GeoFlow1D | 44a5c11e3297827b265c1ea44bb18256b074fa66 | [
"MIT"
] | null | null | null | geoflow1D/PhysicalPropertiesModule.py | HerminioTH/GeoFlow1D | 44a5c11e3297827b265c1ea44bb18256b074fa66 | [
"MIT"
] | null | null | null | from .UtilsModule import *
from .FieldsModule import *
import json
class PhysicalProperties(object):
def __init__(self, grid, folderName):
self.folderName = folderName
self.getFluidProps()
self.getSolidProps(grid)
def getFluidProps(self):
fluid = getJsonData(self.folderName + "fluid.json")
self.rho_f = fluid.get("WATER").get("Density")
self.mu = fluid.get("WATER").get("Viscosity")
self.cf = fluid.get("WATER").get("Compressibility")
def getSolidProps(self, grid):
solid = getJsonData(self.folderName + "solid.json")
self.k = ScalarField(grid.getNumberOfRegions())
self.phi = ScalarField(grid.getNumberOfRegions())
self.cs = ScalarField(grid.getNumberOfRegions())
self.rho_s = ScalarField(grid.getNumberOfRegions())
self.M = ScalarField(grid.getNumberOfRegions())
self.K = ScalarField(grid.getNumberOfRegions())
self.Q = ScalarField(grid.getNumberOfRegions())
self.biot = ScalarField(grid.getNumberOfRegions())
for region in grid.getRegions():
regionName = region.getName()
self.k.setValue(region, solid.get(regionName).get("Permeability"))
self.phi.setValue(region, solid.get(regionName).get("Porosity"))
self.cs.setValue(region, solid.get(regionName).get("Compressibility"))
self.rho_s.setValue(region, solid.get(regionName).get("Density"))
G = solid.get(regionName).get("ShearModulus")
nu = solid.get(regionName).get("PoissonsRatio")
CS = solid.get(regionName).get("Compressibility")
bulk = 2*G*(1 + nu)/(3*(1 - 2*nu))
pWave = bulk + 4*G/3.
alpha = 1 - CS*bulk
self.M.setValue(region, pWave)
self.K.setValue(region, bulk)
self.biot.setValue(region, alpha)
self.Q.setValue(region, 1./(self.cf*self.phi.getValue(region) + (alpha - self.phi.getValue(region))*CS))
self.rho = ScalarField(grid.getNumberOfRegions())
for region in grid.getRegions():
self.rho.setValue(region, self.phi.getValue(region)*self.rho_f + (1 - self.phi.getValue(region))*self.rho_s.getValue(region))
def getJsonData(data_file):
with open(data_file, "r") as jsonFile:
data= json.load(jsonFile)
return data
| 40.192308 | 128 | 0.719139 | 267 | 2,090 | 5.588015 | 0.247191 | 0.090483 | 0.199062 | 0.173592 | 0.299598 | 0.265416 | 0.077748 | 0.077748 | 0 | 0 | 0 | 0.005453 | 0.122488 | 2,090 | 51 | 129 | 40.980392 | 0.80807 | 0 | 0 | 0.043478 | 0 | 0 | 0.071292 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086957 | false | 0 | 0.065217 | 0 | 0.195652 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6a883cb764de8a5ef72540c2a9502f9758b1ae1e | 207 | py | Python | scripts/script_for_test_and_learn.py | zhaojw1998/Auto-mask-Music-Generative-Model-via-EC2-VAE-Disentanglement | 95782f88efe675205be79eb7c93b3a223c6bb8ef | [
"MIT"
] | null | null | null | scripts/script_for_test_and_learn.py | zhaojw1998/Auto-mask-Music-Generative-Model-via-EC2-VAE-Disentanglement | 95782f88efe675205be79eb7c93b3a223c6bb8ef | [
"MIT"
] | null | null | null | scripts/script_for_test_and_learn.py | zhaojw1998/Auto-mask-Music-Generative-Model-via-EC2-VAE-Disentanglement | 95782f88efe675205be79eb7c93b3a223c6bb8ef | [
"MIT"
] | null | null | null | import pretty_midi as pyd
import music21 as m21
import os
track_statistics={}
midi = 'results/presentation sample/Nottingham - 3.mid'
midi_data = pyd.PrettyMIDI(midi)
print(midi_data.instruments[1].notes) | 23 | 55 | 0.792271 | 31 | 207 | 5.16129 | 0.709677 | 0.1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032609 | 0.111111 | 207 | 9 | 56 | 23 | 0.836957 | 0 | 0 | 0 | 0 | 0 | 0.221154 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.428571 | 0 | 0.428571 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
6a9177327e863a44a95ff92d53e1742c8bb89b64 | 828 | py | Python | example/13.Singel_motors/run_for_seconds.py | rundhall/PC-LEGO-SPIKE-Simulator | 5b2fae19293875b2f60d599940d77237700798d3 | [
"MIT"
] | null | null | null | example/13.Singel_motors/run_for_seconds.py | rundhall/PC-LEGO-SPIKE-Simulator | 5b2fae19293875b2f60d599940d77237700798d3 | [
"MIT"
] | null | null | null | example/13.Singel_motors/run_for_seconds.py | rundhall/PC-LEGO-SPIKE-Simulator | 5b2fae19293875b2f60d599940d77237700798d3 | [
"MIT"
] | null | null | null | '''run_for_seconds(seconds, speed=None)
Runs the motor for a specified number of seconds.
Parameters
seconds
The number of seconds for which the motor should run.
Type:float (decimal number)
Values:any number
Default:no default value
speed
The motor’s speed.
Type:integer (a positive or negative whole number, including 0)
Values:-100% to 100%
Default:If no value is specified, it will use the default speed that’s been set by set_default_speed().
Errors
TypeError
seconds is not a number or speed is not an integer.
RuntimeError
The motor has been disconnected from the Port.
Example
'''
from spike import Motor
import time
motor = Motor('A')
# Run clockwise for half a second at 75% speed
motor.run_for_seconds(0.5, 100)
time.sleep_ms(3000)
# Run counterclockwise for 6 seconds at 30% speed
motor.run_for_seconds(3, 100) | 24.352941 | 103 | 0.781401 | 143 | 828 | 4.461538 | 0.475524 | 0.050157 | 0.061129 | 0.050157 | 0.0721 | 0 | 0 | 0 | 0 | 0 | 0 | 0.035562 | 0.150966 | 828 | 34 | 104 | 24.352941 | 0.871977 | 0.815217 | 0 | 0 | 0 | 0 | 0.006849 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
6a91d5f896f9590261f3ffbf0218c1175f712b64 | 916 | py | Python | route/www.py | apanly/python_learn_master | 93a214241812f77a006cc8350a7bad6c4eec6c89 | [
"BSD-3-Clause"
] | 5 | 2020-11-29T14:21:18.000Z | 2021-10-07T04:11:29.000Z | route/www.py | apanly/python_learn_master | 93a214241812f77a006cc8350a7bad6c4eec6c89 | [
"BSD-3-Clause"
] | null | null | null | route/www.py | apanly/python_learn_master | 93a214241812f77a006cc8350a7bad6c4eec6c89 | [
"BSD-3-Clause"
] | 2 | 2020-11-30T09:55:53.000Z | 2022-03-19T12:49:40.000Z | # -*- coding: utf-8 -*-
'''
专门为web程序准备的初始化入口
'''
from application import app
from common.components.helper.StaticPluginsHelper import StaticPluginsHelper
from common.components.helper.UtilHelper import UtilHelper
from common.services.GlobalUrlService import GlobalUrlService
from common.services.CommonConstant import CommonConstant
'''
toolbar
'''
# from flask_debugtoolbar import DebugToolbarExtension
# toolbar = DebugToolbarExtension(app)
'''
函数模板
'''
app.add_template_global(GlobalUrlService, 'GlobalUrlService')
app.add_template_global(StaticPluginsHelper, 'StaticPluginsHelper')
app.add_template_global(UtilHelper, 'UtilHelper')
app.add_template_global(CommonConstant, 'CommonConstant')
'''
统一拦截处理和统一错误处理
'''
from web.interceptors.AuthHome import *
from web.interceptors.AuthWWW import *
from web.interceptors.ErrorHandler import *
'''
蓝图功能,对所有的url进行蓝图功能配置
'''
from web.controllers.route import *
| 22.341463 | 76 | 0.80786 | 91 | 916 | 8.032967 | 0.373626 | 0.05472 | 0.076607 | 0.109439 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001203 | 0.092795 | 916 | 40 | 77 | 22.9 | 0.87846 | 0.14083 | 0 | 0 | 0 | 0 | 0.084406 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.692308 | 0 | 0.692308 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
6aab9a11ff3eb1e6982d436de4c21836b50a5fed | 752 | py | Python | src/Connection.py | duarteocarmo/technological_capabilities | 62a3911cf451c3f45afe093e7af8f2e76e0ac5a2 | [
"MIT"
] | 1 | 2018-01-29T14:06:10.000Z | 2018-01-29T14:06:10.000Z | src/Connection.py | duarteocarmo/technological_capabilities | 62a3911cf451c3f45afe093e7af8f2e76e0ac5a2 | [
"MIT"
] | 3 | 2020-03-24T17:38:08.000Z | 2021-06-02T00:29:26.000Z | src/Connection.py | duarteocarmo/technological_capabilities | 62a3911cf451c3f45afe093e7af8f2e76e0ac5a2 | [
"MIT"
] | null | null | null | from py2neo import Graph
from pandas import DataFrame
graph_url = "http://localhost:7474/db/data"
query = """ MATCH p1=(feed:Feedstock)<-[:CONTAINS]-(a1:Asset)-[:CONTAINS]->(proc:ProcessingTech)
MATCH p2=(proc:ProcessingTech)<-[:CONTAINS]-(a1:Asset)-[:CONTAINS]->(out:Output)
WITH feed.term AS Feedstock, proc.term AS Processing_Technology, out.term AS Output, count(p1) AS count
RETURN Feedstock, Processing_Technology, Output, count
ORDER BY count
DESC LIMIT 300"""
graph = Graph(graph_url)
# get data as a dump
# graph.run(query).dump()
# get data as dict
# output = graph.data(query)
# get data as Pd
a = DataFrame(graph.data(query))
print a
graph.run("CALL db.schema()").dump()
| 31.333333 | 116 | 0.668883 | 104 | 752 | 4.798077 | 0.442308 | 0.054108 | 0.054108 | 0.092184 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021346 | 0.19016 | 752 | 23 | 117 | 32.695652 | 0.79803 | 0.134309 | 0 | 0 | 0 | 0.076923 | 0.71938 | 0.303876 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.153846 | null | null | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6aaf513ab608fdd1a706d749f2efca7ca400cc01 | 1,477 | py | Python | plugins/ti.alloy/plugin.py | aaronksaunders/ci.alloy.adapter.one | baf4dd9b36411c5105ed09883062ef4068437890 | [
"Apache-2.0"
] | 1 | 2017-09-05T03:00:51.000Z | 2017-09-05T03:00:51.000Z | plugins/ti.alloy/plugin.py | aaronksaunders/ci.alloy.adapter.one | baf4dd9b36411c5105ed09883062ef4068437890 | [
"Apache-2.0"
] | null | null | null | plugins/ti.alloy/plugin.py | aaronksaunders/ci.alloy.adapter.one | baf4dd9b36411c5105ed09883062ef4068437890 | [
"Apache-2.0"
] | null | null | null |
import os, sys, subprocess, hashlib
def compile(config):
f = os.path.abspath(os.path.join(config['project_dir'], 'app'))
if os.path.exists(f):
print "[INFO] alloy app found at %s" % f
rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources'))
# FIXME path resolution
# FIXME - right now this works on OSX only
devicefamily = 'none'
simtype = 'none'
version = '0'
deploytype = 'development'
if config['platform']==u'ios':
version = config['iphone_version']
devicefamily = config['devicefamily']
deploytype = config['deploytype']
if config['platform']==u'android':
builder = config['android_builder']
version = builder.tool_api_level
deploytype = config['deploy_type']
if config['platform']==u'mobileweb':
builder = config['mobileweb_builder']
deploytype = config['deploytype']
cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s" % (config['platform'],version,simtype,devicefamily,deploytype)
cmd = ["/usr/local/bin/node","/usr/local/bin/alloy", "compile", f, "--no-colors", "--config", cfg]
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
print ex.output
print "[ERROR] Alloy compile failed"
sys.exit(ex.returncode)
| 43.441176 | 142 | 0.600542 | 164 | 1,477 | 5.353659 | 0.45122 | 0.034169 | 0.05467 | 0.058087 | 0.088838 | 0.088838 | 0.088838 | 0.088838 | 0.088838 | 0 | 0 | 0.000915 | 0.259986 | 1,477 | 33 | 143 | 44.757576 | 0.802379 | 0.041977 | 0 | 0.068966 | 0 | 0 | 0.267895 | 0.044649 | 0 | 0 | 0 | 0.030303 | 0 | 0 | null | null | 0 | 0.034483 | null | null | 0.103448 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6ab3df3af1f7a34f3bfafc8bf6af7056c511d0d9 | 10,753 | py | Python | examples/dataflow-python-examples/batch-examples/cookbook-examples/pipelines/data_lake_to_mart.py | sourcery-ai-bot/professional-services | 0dd87e18560894bc68c05b580c6c9f2322eabc47 | [
"Apache-2.0"
] | null | null | null | examples/dataflow-python-examples/batch-examples/cookbook-examples/pipelines/data_lake_to_mart.py | sourcery-ai-bot/professional-services | 0dd87e18560894bc68c05b580c6c9f2322eabc47 | [
"Apache-2.0"
] | null | null | null | examples/dataflow-python-examples/batch-examples/cookbook-examples/pipelines/data_lake_to_mart.py | sourcery-ai-bot/professional-services | 0dd87e18560894bc68c05b580c6c9f2322eabc47 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" data_lake_to_mart.py demonstrates a Dataflow pipeline which reads a
large BigQuery Table, joins in another dataset, and writes its contents to a
BigQuery table.
"""
import argparse
import logging
import os
import traceback
import apache_beam as beam
from apache_beam.io.gcp.bigquery import parse_table_schema_from_json
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.pvalue import AsDict
class DataLakeToDataMart:
"""A helper class which contains the logic to translate the file into
a format BigQuery will accept.
This example uses side inputs to join two datasets together.
"""
def __init__(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
self.schema_str = ''
# This is the schema of the destination table in BigQuery.
schema_file = os.path.join(dir_path, 'resources',
'orders_denormalized.json')
with open(schema_file) as f:
data = f.read()
# Wrapping the schema in fields is required for the BigQuery API.
self.schema_str = '{"fields": ' + data + '}'
def get_orders_query(self):
"""This returns a query against a very large fact table. We are
using a fake orders dataset to simulate a fact table in a typical
data warehouse."""
return """SELECT
acct_number,
col_number,
col_number_1,
col_number_10,
col_number_100,
col_number_101,
col_number_102,
col_number_103,
col_number_104,
col_number_105,
col_number_106,
col_number_107,
col_number_108,
col_number_109,
col_number_11,
col_number_110,
col_number_111,
col_number_112,
col_number_113,
col_number_114,
col_number_115,
col_number_116,
col_number_117,
col_number_118,
col_number_119,
col_number_12,
col_number_120,
col_number_121,
col_number_122,
col_number_123,
col_number_124,
col_number_125,
col_number_126,
col_number_127,
col_number_128,
col_number_129,
col_number_13,
col_number_130,
col_number_131,
col_number_132,
col_number_133,
col_number_134,
col_number_135,
col_number_136,
col_number_14,
col_number_15,
col_number_16,
col_number_17,
col_number_18,
col_number_19,
col_number_2,
col_number_20,
col_number_21,
col_number_22,
col_number_23,
col_number_24,
col_number_25,
col_number_26,
col_number_27,
col_number_28,
col_number_29,
col_number_3,
col_number_30,
col_number_31,
col_number_32,
col_number_33,
col_number_34,
col_number_35,
col_number_36,
col_number_37,
col_number_38,
col_number_39,
col_number_4,
col_number_40,
col_number_41,
col_number_42,
col_number_43,
col_number_44,
col_number_45,
col_number_46,
col_number_47,
col_number_48,
col_number_49,
col_number_5,
col_number_50,
col_number_51,
col_number_52,
col_number_53,
col_number_54,
col_number_55,
col_number_56,
col_number_57,
col_number_58,
col_number_59,
col_number_6,
col_number_60,
col_number_61,
col_number_62,
col_number_63,
col_number_64,
col_number_65,
col_number_66,
col_number_67,
col_number_68,
col_number_69,
col_number_7,
col_number_70,
col_number_71,
col_number_72,
col_number_73,
col_number_74,
col_number_75,
col_number_76,
col_number_77,
col_number_78,
col_number_79,
col_number_8,
col_number_80,
col_number_81,
col_number_82,
col_number_83,
col_number_84,
col_number_85,
col_number_86,
col_number_87,
col_number_88,
col_number_89,
col_number_9,
col_number_90,
col_number_91,
col_number_92,
col_number_93,
col_number_94,
col_number_95,
col_number_96,
col_number_97,
col_number_98,
col_number_99,
col_number_num1,
date,
foo,
num1,
num2,
num3,
num5,
num6,
product_number,
quantity
FROM
`python-dataflow-example.example_data.orders` orders
LIMIT
10
"""
def add_account_details(self, row, account_details):
"""add_account_details joins two datasets together. Dataflow passes in the
a row from the orders dataset along with the entire account details dataset.
This works because the entire account details dataset can be passed in memory.
The function then looks up the account details, and adds all columns to a result
dictionary, which will be written to BigQuery."""
result = row.copy()
try:
result.update(account_details[row['acct_number']])
except KeyError as err:
traceback.print_exc()
logging.error("Account Not Found error: %s", err)
return result
def run(argv=None):
"""The main function which creates the pipeline and runs it."""
parser = argparse.ArgumentParser()
# Here we add some specific command line arguments we expect. S
# This defaults the output table in your BigQuery you'll have
# to create the example_data dataset yourself using bq mk temp
parser.add_argument('--output',
dest='output',
required=False,
help='Output BQ table to write results to.',
default='lake.orders_denormalized_sideinput')
# Parse arguments from the command line.
known_args, pipeline_args = parser.parse_known_args(argv)
# DataLakeToDataMart is a class we built in this script to hold the logic for
# transforming the file into a BigQuery table.
data_lake_to_data_mart = DataLakeToDataMart()
p = beam.Pipeline(options=PipelineOptions(pipeline_args))
schema = parse_table_schema_from_json(data_lake_to_data_mart.schema_str)
pipeline = beam.Pipeline(options=PipelineOptions(pipeline_args))
# This query returns details about the account, normalized into a
# different table. We will be joining the data in to the main orders dataset in order
# to create a denormalized table.
account_details_source = (
pipeline | 'Read Account Details from BigQuery ' >> beam.io.Read(
beam.io.BigQuerySource(
query="""
SELECT
acct_number,
acct_company_name,
acct_group_name,
acct_name,
acct_org_name,
address,
city,
state,
zip_code,
country
FROM
`python-dataflow-example.example_data.account`""",
# This next stage of the pipeline maps the acct_number to a single row of
# results from BigQuery. Mapping this way helps Dataflow move your data around
# to different workers. When later stages of the pipeline run, all results from
# a given account number will run on one worker.
use_standard_sql=True)) |
'Account Details' >> beam.Map(lambda row: (row['acct_number'], row)))
orders_query = data_lake_to_data_mart.get_orders_query()
(p
# Read the orders from BigQuery. This is the source of the pipeline. All further
# processing starts with rows read from the query results here.
| 'Read Orders from BigQuery ' >> beam.io.Read(
beam.io.BigQuerySource(query=orders_query, use_standard_sql=True))
# Here we pass in a side input, which is data that comes from outside our
# main source. The side input contains a map of states to their full name
| 'Join Data with sideInput' >> beam.Map(
data_lake_to_data_mart.add_account_details,
AsDict(account_details_source))
# This is the final stage of the pipeline, where we define the destination
# of the data. In this case we are writing to BigQuery.
| 'Write Data to BigQuery' >> beam.io.Write(
beam.io.BigQuerySink(
# The table name is a required argument for the BigQuery sink.
# In this case we use the value passed in from the command line.
known_args.output,
# Here we use the JSON schema read in from a JSON file.
# Specifying the schema allows the API to create the table correctly if it does not yet exist.
schema=schema,
# Creates the table in BigQuery if it does not yet exist.
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
# Deletes all data in the BigQuery table before writing.
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE)))
p.run().wait_until_finish()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
| 35.255738 | 107 | 0.591649 | 1,327 | 10,753 | 4.501884 | 0.335343 | 0.207901 | 0.00837 | 0.009374 | 0.088718 | 0.058587 | 0.015735 | 0.015735 | 0.015735 | 0 | 0 | 0.045533 | 0.354599 | 10,753 | 304 | 108 | 35.371711 | 0.815274 | 0.293221 | 0 | 0.017544 | 0 | 0 | 0.640525 | 0.019941 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017544 | false | 0 | 0.035088 | 0 | 0.065789 | 0.004386 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6ade9eae8d4476e2e4c4b2c1437ce75c2a2908da | 871 | py | Python | bloomon/utils/stream.py | belushkin/bloomon | 472dd48d297737335d114d770c27a6cac986c4e6 | [
"MIT"
] | null | null | null | bloomon/utils/stream.py | belushkin/bloomon | 472dd48d297737335d114d770c27a6cac986c4e6 | [
"MIT"
] | null | null | null | bloomon/utils/stream.py | belushkin/bloomon | 472dd48d297737335d114d770c27a6cac986c4e6 | [
"MIT"
] | null | null | null | import sys
class Stream(object):
"""
A class used to consume standard input by generator and then yield it line by line to the bouqet manager
Attributes
----------
manager : BouqetManager
Bouqet Manager instance
Methods
-------
streamReader()
Wrap and work with standard input
readStream()
Return stream generator
"""
def __init__(self, manager):
self.manager = manager
def streamReader(self):
""" Wrap and work with standard input
"""
for line in sys.stdin:
yield line
def readStream(self):
""" Returns generator of standard input
:return: input generator
:rtype: generator
"""
return (self.manager.manage(design.strip()) for design in self.streamReader())
| 22.921053 | 112 | 0.56946 | 90 | 871 | 5.466667 | 0.477778 | 0.105691 | 0.044715 | 0.060976 | 0.113821 | 0.113821 | 0 | 0 | 0 | 0 | 0 | 0 | 0.346728 | 871 | 37 | 113 | 23.540541 | 0.864675 | 0.463835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.111111 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6ae38f84ef008552e4c222fb2a77a48eaf6b79c4 | 231 | py | Python | setup.py | zenador/sporebusbot | 2ccc214cd8da4f8d3931db7cabfc4fb8e5183a78 | [
"MIT"
] | null | null | null | setup.py | zenador/sporebusbot | 2ccc214cd8da4f8d3931db7cabfc4fb8e5183a78 | [
"MIT"
] | null | null | null | setup.py | zenador/sporebusbot | 2ccc214cd8da4f8d3931db7cabfc4fb8e5183a78 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(name='SporeBusBot',
version='1.0',
description='A Telegram bot for next bus timings in Singapore',
author='Zenador',
author_email='zenador9@gmail.com',
install_requires=['Flask>=0.10.1'],
)
| 23.1 | 64 | 0.74026 | 33 | 231 | 5.121212 | 0.878788 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033981 | 0.108225 | 231 | 9 | 65 | 25.666667 | 0.786408 | 0 | 0 | 0 | 0 | 0 | 0.4329 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.125 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6ae794da29c05e1b355f2d1d6ec88cd05da13856 | 1,802 | py | Python | cnn_block.py | mouhcineToumi/face-recognition | c203fd2faf5273c4d036f3d967398d1365d1ce6c | [
"Apache-2.0"
] | 1 | 2019-10-18T21:29:50.000Z | 2019-10-18T21:29:50.000Z | cnn_block.py | mouhcineToumi/face-recognition | c203fd2faf5273c4d036f3d967398d1365d1ce6c | [
"Apache-2.0"
] | null | null | null | cnn_block.py | mouhcineToumi/face-recognition | c203fd2faf5273c4d036f3d967398d1365d1ce6c | [
"Apache-2.0"
] | null | null | null | from keras.models import Sequential
from keras.layers import Conv2D, ZeroPadding2D, Activation, Input, concatenate
from keras.models import Model
from keras.layers.normalization import BatchNormalization
from keras.layers.pooling import MaxPooling2D, AveragePooling2D
from keras.layers.merge import Concatenate
from keras.layers.core import Lambda, Flatten, Dense
from keras.initializers import glorot_uniform
from keras.engine.topology import Layer
from keras import backend as K
K.set_image_data_format('channels_first')
import cv2
import os
import numpy as np
from numpy import genfromtxt
import pandas as pd
import tensorflow as tf
from fr_utils import *
from inception_blocks_v2 import *
# %matplotlib inline
# %load_ext autoreload
# %autoreload 2
#np.set_printoptions(threshold=np.nan)
def triplet_loss(y_true, y_pred, alpha = 0.2):
anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]
pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), axis=-1)
neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), axis=-1)
basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), alpha)
loss = tf.reduce_sum(tf.maximum(basic_loss, 0.0))
return loss
class CnnBlock:
"""docstring for CnnBlock"""
def __init__(self):
self.model = faceRecoModel( input_shape=(3, 96, 96) )
def load(self):
self.model.compile(optimizer = 'adam', loss = triplet_loss, metrics = ['accuracy'])
load_weights_from_FaceNet(self.model)
def encode_image(self, image):
return img_to_encoding(image, self.model)
def encoding_folder(image):
dic = []
path = "images/"+path
print(path)
l = os.listdir(path)
for file in l:
dic.append(self.encode_image(cv2.imread(path+file)))
return dic
| 29.064516 | 86 | 0.733629 | 260 | 1,802 | 4.934615 | 0.446154 | 0.070148 | 0.058457 | 0.030398 | 0.060795 | 0.060795 | 0.060795 | 0.060795 | 0.060795 | 0 | 0 | 0.014647 | 0.166482 | 1,802 | 61 | 87 | 29.540984 | 0.839547 | 0.063263 | 0 | 0 | 0 | 0 | 0.020433 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.119048 | false | 0 | 0.428571 | 0.02381 | 0.642857 | 0.02381 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
6aeca2dc8b7e36d1aa799b8a4d1e1a5b5b81f2e4 | 242 | py | Python | generator/generate_SDL_sensor.py | vaiorabbit/sdl2-bindings | 67b94873b41d78ae7b84907cc250fe7181093996 | [
"Zlib"
] | 6 | 2015-09-29T01:17:06.000Z | 2022-01-19T17:57:08.000Z | generator/generate_SDL_sensor.py | vaiorabbit/sdl2-bindings | 67b94873b41d78ae7b84907cc250fe7181093996 | [
"Zlib"
] | 1 | 2022-01-19T22:33:42.000Z | 2022-02-02T08:50:13.000Z | generator/generate_SDL_sensor.py | vaiorabbit/sdl2-bindings | 67b94873b41d78ae7b84907cc250fe7181093996 | [
"Zlib"
] | 1 | 2021-05-23T20:13:05.000Z | 2021-05-23T20:13:05.000Z | import sdl2_parser, sdl2_generator
if __name__ == "__main__":
ctx = sdl2_parser.ParseContext('./SDL2/SDL_sensor.h')
sdl2_parser.execute(ctx)
sdl2_generator.sanitize(ctx)
sdl2_generator.generate(ctx, module_name = 'sensor')
| 24.2 | 57 | 0.735537 | 31 | 242 | 5.225806 | 0.516129 | 0.185185 | 0.197531 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033816 | 0.144628 | 242 | 9 | 58 | 26.888889 | 0.748792 | 0 | 0 | 0 | 0 | 0 | 0.136364 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a7c26edef2097c5da17c88e3567809a35be9165 | 253 | py | Python | docs/demos/multi_page_basics/pages/outlook.py | ruxi/dash-labs | 991f8e479886672bb24dba9cf878dfd748777730 | [
"MIT"
] | 110 | 2021-04-16T14:41:54.000Z | 2022-03-24T22:29:41.000Z | docs/demos/multi_page_basics/pages/outlook.py | ruxi/dash-labs | 991f8e479886672bb24dba9cf878dfd748777730 | [
"MIT"
] | 59 | 2021-04-16T10:42:34.000Z | 2022-03-21T18:43:25.000Z | docs/demos/multi_page_basics/pages/outlook.py | ruxi/dash-labs | 991f8e479886672bb24dba9cf878dfd748777730 | [
"MIT"
] | 28 | 2021-04-16T16:26:32.000Z | 2022-03-28T17:32:42.000Z | import dash
dash.register_page(
__name__,
title="Forward Outlook",
description="This is the forward outlook", # should accept callable too
path="/forward-outlook",
image="birds.jpeg",
)
def layout():
return "Forward outlook"
| 18.071429 | 76 | 0.679842 | 30 | 253 | 5.566667 | 0.766667 | 0.335329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.205534 | 253 | 13 | 77 | 19.461538 | 0.830846 | 0.102767 | 0 | 0 | 0 | 0 | 0.368889 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | true | 0 | 0.1 | 0.1 | 0.3 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a842a126a57f57ff2b86e1bdce6f75af329fa1a | 2,297 | py | Python | optimus/outliers/mad.py | atwoodjw/Optimus | 938463cec41a6683d2077c9afc7d6ba05c3b993f | [
"Apache-2.0"
] | null | null | null | optimus/outliers/mad.py | atwoodjw/Optimus | 938463cec41a6683d2077c9afc7d6ba05c3b993f | [
"Apache-2.0"
] | null | null | null | optimus/outliers/mad.py | atwoodjw/Optimus | 938463cec41a6683d2077c9afc7d6ba05c3b993f | [
"Apache-2.0"
] | null | null | null | from pyspark.sql import functions as F
from optimus.helpers.filters import dict_filter
from optimus.helpers.constants import RELATIVE_ERROR
class MAD:
"""
Handle outliers using mad
"""
def __init__(self, df, col_name, threshold, relative_error=RELATIVE_ERROR):
"""
:param df:
:param col_name:
"""
self.df = df
self.col_name = col_name
self.threshold = threshold
self.relative_error = relative_error
def whiskers(self):
"""
Get the wisker used to defined outliers
:return:
"""
mad_value = self.df.cols.mad(self.col_name, self.relative_error, more=True)
lower_bound = mad_value["median"] - self.threshold * mad_value["mad"]
upper_bound = mad_value["median"] + self.threshold * mad_value["mad"]
return {"lower_bound": lower_bound, "upper_bound": upper_bound}
def drop(self):
col_name = self.col_name
upper_bound, lower_bound = dict_filter(self.whiskers(), ["upper_bound", "lower_bound"])
return self.df.rows.drop((F.col(col_name) > upper_bound) | (F.col(col_name) < lower_bound))
def select(self):
"""
Select outliers rows using the selected column
:return:
"""
col_name = self.col_name
upper_bound, lower_bound = dict_filter(self.whiskers(), ["upper_bound", "lower_bound"])
return self.df.rows.select((F.col(col_name) > upper_bound) | (F.col(col_name) < lower_bound))
def count(self):
"""
Count the outliers rows using the selected column
:return:
"""
return self.select().count()
def non_outliers_count(self):
"""
Count non outliers rows using the selected column
:return:
"""
return self.drop().count()
def info(self):
"""
Get whiskers, iqrs and outliers and non outliers count
:return:
"""
upper_bound, lower_bound, = dict_filter(self.whiskers(),
["upper_bound", "lower_bound"])
return {"count_outliers": self.count(), "count_non_outliers": self.non_outliers_count(),
"lower_bound": lower_bound,
"upper_bound": upper_bound, }
| 31.040541 | 101 | 0.598172 | 274 | 2,297 | 4.777372 | 0.19708 | 0.069519 | 0.091673 | 0.091673 | 0.501146 | 0.501146 | 0.501146 | 0.470588 | 0.409473 | 0.26738 | 0 | 0 | 0.287331 | 2,297 | 73 | 102 | 31.465753 | 0.799633 | 0.148019 | 0 | 0.125 | 0 | 0 | 0.09122 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.21875 | false | 0 | 0.09375 | 0 | 0.53125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
0a8a4d64badfe359c54f89c27f2cd4f00d47dfdb | 260 | py | Python | E1.py | adilsonfuta/Scripty-Python | c4bf58bd9e48901fa95e180b0f3c01d9c162151d | [
"MIT"
] | null | null | null | E1.py | adilsonfuta/Scripty-Python | c4bf58bd9e48901fa95e180b0f3c01d9c162151d | [
"MIT"
] | null | null | null | E1.py | adilsonfuta/Scripty-Python | c4bf58bd9e48901fa95e180b0f3c01d9c162151d | [
"MIT"
] | null | null | null | #n=int(input('Diga o valor'))
#print('O quadrado {}'.format(n**2))
#print('O Antecessor {} Sucessor {}'.format(n+1,n-1))
dist=int(input('Diga o valor Distancia'))
comb=int(input('Diga o valor Combustivel'))
print('O consumo Medio {}'.format(dist/comb))
| 23.636364 | 53 | 0.657692 | 42 | 260 | 4.071429 | 0.452381 | 0.140351 | 0.210526 | 0.22807 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013043 | 0.115385 | 260 | 10 | 54 | 26 | 0.730435 | 0.446154 | 0 | 0 | 0 | 0 | 0.471429 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a8a9efa490507cac7adbbc734db8798a6c271d4 | 10,782 | py | Python | numba/cuda/tests/cudapy/test_gufunc.py | rs2/numba | ee78bfe3e66439197905551a451ea264704a3cdd | [
"BSD-2-Clause"
] | null | null | null | numba/cuda/tests/cudapy/test_gufunc.py | rs2/numba | ee78bfe3e66439197905551a451ea264704a3cdd | [
"BSD-2-Clause"
] | null | null | null | numba/cuda/tests/cudapy/test_gufunc.py | rs2/numba | ee78bfe3e66439197905551a451ea264704a3cdd | [
"BSD-2-Clause"
] | null | null | null | from __future__ import print_function, absolute_import
from timeit import default_timer as time
import numpy as np
import numpy.core.umath_tests as ut
from numba import void, float32, float64
from numba import guvectorize
from numba import cuda
from numba import unittest_support as unittest
from numba.cuda.testing import skip_on_cudasim
non_stream_speedups = []
stream_speedups = []
@skip_on_cudasim('ufunc API unsupported in the simulator')
class TestCUDAGufunc(unittest.TestCase):
def test_gufunc_small(self):
@guvectorize([void(float32[:, :], float32[:, :], float32[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
gufunc.max_blocksize = 512
matrix_ct = 2
A = np.arange(matrix_ct * 2 * 4, dtype=np.float32).reshape(matrix_ct, 2,
4)
B = np.arange(matrix_ct * 4 * 5, dtype=np.float32).reshape(matrix_ct, 4,
5)
ts = time()
C = gufunc(A, B)
tcuda = time() - ts
ts = time()
Gold = ut.matrix_multiply(A, B)
tcpu = time() - ts
non_stream_speedups.append(tcpu / tcuda)
print(C, Gold)
self.assertTrue(np.allclose(C, Gold))
def test_gufunc_auto_transfer(self):
@guvectorize([void(float32[:, :], float32[:, :], float32[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
gufunc.max_blocksize = 512
matrix_ct = 2
A = np.arange(matrix_ct * 2 * 4, dtype=np.float32).reshape(matrix_ct, 2,
4)
B = np.arange(matrix_ct * 4 * 5, dtype=np.float32).reshape(matrix_ct, 4,
5)
dB = cuda.to_device(B)
ts = time()
C = gufunc(A, dB).copy_to_host()
tcuda = time() - ts
ts = time()
Gold = ut.matrix_multiply(A, B)
tcpu = time() - ts
non_stream_speedups.append(tcpu / tcuda)
print(C, Gold)
self.assertTrue(np.allclose(C, Gold))
def test_gufunc(self):
@guvectorize([void(float32[:, :], float32[:, :], float32[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
gufunc.max_blocksize = 512
matrix_ct = 1001 # an odd number to test thread/block division in CUDA
A = np.arange(matrix_ct * 2 * 4, dtype=np.float32).reshape(matrix_ct, 2,
4)
B = np.arange(matrix_ct * 4 * 5, dtype=np.float32).reshape(matrix_ct, 4,
5)
ts = time()
C = gufunc(A, B)
tcuda = time() - ts
ts = time()
Gold = ut.matrix_multiply(A, B)
tcpu = time() - ts
non_stream_speedups.append(tcpu / tcuda)
self.assertTrue(np.allclose(C, Gold))
def test_gufunc_hidim(self):
@guvectorize([void(float32[:, :], float32[:, :], float32[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
gufunc.max_blocksize = 512
matrix_ct = 100 # an odd number to test thread/block division in CUDA
A = np.arange(matrix_ct * 2 * 4, dtype=np.float32).reshape(4, 25, 2, 4)
B = np.arange(matrix_ct * 4 * 5, dtype=np.float32).reshape(4, 25, 4, 5)
ts = time()
C = gufunc(A, B)
tcuda = time() - ts
ts = time()
Gold = ut.matrix_multiply(A, B)
tcpu = time() - ts
non_stream_speedups.append(tcpu / tcuda)
self.assertTrue(np.allclose(C, Gold))
def test_gufunc_new_axis(self):
@guvectorize([void(float64[:, :], float64[:, :], float64[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
X = np.random.randn(10, 3, 3)
Y = np.random.randn(3, 3)
gold = ut.matrix_multiply(X, Y)
res1 = gufunc(X, Y)
np.testing.assert_allclose(gold, res1)
res2 = gufunc(X, np.tile(Y, (10, 1, 1)))
np.testing.assert_allclose(gold, res2)
def test_gufunc_adjust_blocksize(self):
@guvectorize([void(float32[:, :], float32[:, :], float32[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
gufunc.max_blocksize = 512
matrix_ct = 1001 # an odd number to test thread/block division in CUDA
A = np.arange(matrix_ct * 2 * 4, dtype=np.float32).reshape(matrix_ct, 2,
4)
B = np.arange(matrix_ct * 4 * 5, dtype=np.float32).reshape(matrix_ct, 4,
5)
gufunc.max_blocksize = 32
C = gufunc(A, B)
Gold = ut.matrix_multiply(A, B)
self.assertTrue(np.allclose(C, Gold))
def test_gufunc_stream(self):
@guvectorize([void(float32[:, :], float32[:, :], float32[:, :])],
'(m,n),(n,p)->(m,p)',
target='cuda')
def matmulcore(A, B, C):
m, n = A.shape
n, p = B.shape
for i in range(m):
for j in range(p):
C[i, j] = 0
for k in range(n):
C[i, j] += A[i, k] * B[k, j]
gufunc = matmulcore
gufunc.max_blocksize = 512
#cuda.driver.flush_pending_free()
matrix_ct = 1001 # an odd number to test thread/block division in CUDA
A = np.arange(matrix_ct * 2 * 4, dtype=np.float32).reshape(matrix_ct, 2,
4)
B = np.arange(matrix_ct * 4 * 5, dtype=np.float32).reshape(matrix_ct, 4,
5)
ts = time()
stream = cuda.stream()
dA = cuda.to_device(A, stream)
dB = cuda.to_device(B, stream)
dC = cuda.device_array(shape=(1001, 2, 5), dtype=A.dtype, stream=stream)
dC = gufunc(dA, dB, out=dC, stream=stream)
C = dC.copy_to_host(stream=stream)
stream.synchronize()
tcuda = time() - ts
ts = time()
Gold = ut.matrix_multiply(A, B)
tcpu = time() - ts
stream_speedups.append(tcpu / tcuda)
self.assertTrue(np.allclose(C, Gold))
def test_copy(self):
@guvectorize([void(float32[:], float32[:])],
'(x)->(x)',
target='cuda')
def copy(A, B):
for i in range(B.size):
B[i] = A[i]
A = np.arange(10, dtype=np.float32) + 1
B = np.zeros_like(A)
copy(A, out=B)
self.assertTrue(np.allclose(A, B))
def test_copy_odd(self):
@guvectorize([void(float32[:], float32[:])],
'(x)->(x)',
target='cuda')
def copy(A, B):
for i in range(B.size):
B[i] = A[i]
A = np.arange(11, dtype=np.float32) + 1
B = np.zeros_like(A)
copy(A, out=B)
self.assertTrue(np.allclose(A, B))
def test_copy2d(self):
@guvectorize([void(float32[:, :], float32[:, :])],
'(x, y)->(x, y)',
target='cuda')
def copy2d(A, B):
for x in range(B.shape[0]):
for y in range(B.shape[1]):
B[x, y] = A[x, y]
A = np.arange(30, dtype=np.float32).reshape(5, 6) + 1
B = np.zeros_like(A)
copy2d(A, out=B)
self.assertTrue(np.allclose(A, B))
def test_nopython_flag(self):
def foo(A, B):
pass
# nopython = True is fine
guvectorize([void(float32[:], float32[:])], '(x)->(x)', target='cuda',
nopython=True)(foo)
# nopython = False is bad
with self.assertRaises(TypeError) as raises:
guvectorize([void(float32[:], float32[:])], '(x)->(x)',
target='cuda', nopython=False)(foo)
self.assertEqual("nopython flag must be True", str(raises.exception))
def test_invalid_flags(self):
# Check invalid flags
def foo(A, B):
pass
with self.assertRaises(TypeError) as raises:
guvectorize([void(float32[:], float32[:])], '(x)->(x)',
target='cuda', what1=True, ever2=False)(foo)
head = "The following target options are not supported:"
msg = str(raises.exception)
self.assertEqual(msg[:len(head)], head)
items = msg[len(head):].strip().split(',')
items = [i.strip("'\" ") for i in items]
self.assertEqual(set(['what1', 'ever2']), set(items))
if __name__ == '__main__':
unittest.main()
| 31.526316 | 80 | 0.455203 | 1,340 | 10,782 | 3.577612 | 0.123881 | 0.046725 | 0.043805 | 0.056946 | 0.718607 | 0.693158 | 0.677305 | 0.677305 | 0.677305 | 0.656654 | 0 | 0.037135 | 0.405583 | 10,782 | 341 | 81 | 31.618768 | 0.710875 | 0.028473 | 0 | 0.72549 | 0 | 0 | 0.034779 | 0 | 0 | 0 | 0 | 0 | 0.062745 | 1 | 0.094118 | false | 0.007843 | 0.035294 | 0 | 0.133333 | 0.011765 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0a9af480935217232e6723366082474cfdec69fb | 954 | py | Python | tests/test_Merchant.py | fraser-langton/Quandoo | 3a5e1241b645129d805213d01221ede8f2b79aa2 | [
"MIT"
] | 1 | 2019-08-08T11:05:28.000Z | 2019-08-08T11:05:28.000Z | tests/test_Merchant.py | fraser-langton/Quandoo | 3a5e1241b645129d805213d01221ede8f2b79aa2 | [
"MIT"
] | 1 | 2021-01-31T23:16:09.000Z | 2021-03-05T01:33:49.000Z | tests/test_Merchant.py | fraser-langton/Quandoo | 3a5e1241b645129d805213d01221ede8f2b79aa2 | [
"MIT"
] | 1 | 2020-08-19T09:06:42.000Z | 2020-08-19T09:06:42.000Z | import unittest
from quandoo.QuandooModel import QuandooDatetime
class TestMerchant(unittest.TestCase):
def get_customers(self, offset=0, limit=100, modified_since: QuandooDatetime = None,
modified_until: QuandooDatetime = None):
pass
def get_available_times(self, pax: int, qdt: QuandooDatetime, duration=2, area_id=None):
pass
def is_available(self, pax: int, qdt: QuandooDatetime, duration=2, area_id=None):
pass
def get_reviews(self, offset=0, limit=10):
pass
def create_reservation(self, customer, pax: int, qdt: QuandooDatetime, area_id=None, order_id=None, extra_info=None,
reservation_tags=[]):
pass
def create_reservation_enquiry(self, customer, pax: int, start_qdt: QuandooDatetime, end_qdt: QuandooDatetime,
message: str):
pass
def get_reservation_tags(self):
pass
| 30.774194 | 120 | 0.655136 | 110 | 954 | 5.5 | 0.418182 | 0.069421 | 0.054545 | 0.119008 | 0.178512 | 0.178512 | 0.178512 | 0.178512 | 0.178512 | 0.178512 | 0 | 0.012766 | 0.261006 | 954 | 30 | 121 | 31.8 | 0.84539 | 0 | 0 | 0.35 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.35 | false | 0.35 | 0.1 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
0a9dc336313219fac5d5a71a889ccb04d278c2d0 | 11,089 | py | Python | test/dpt_tests/dpt_float_test.py | kistlin/xknx | a402c4609a8f3d5d96d83af271db52cb9006d6ec | [
"MIT"
] | 1 | 2022-03-06T14:42:39.000Z | 2022-03-06T14:42:39.000Z | test/dpt_tests/dpt_float_test.py | kistlin/xknx | a402c4609a8f3d5d96d83af271db52cb9006d6ec | [
"MIT"
] | 11 | 2022-03-14T22:32:38.000Z | 2022-03-31T22:35:08.000Z | test/dpt_tests/dpt_float_test.py | kistlin/xknx | a402c4609a8f3d5d96d83af271db52cb9006d6ec | [
"MIT"
] | null | null | null | """Unit test for KNX 2 and 4 byte float objects."""
import math
import struct
from unittest.mock import patch
import pytest
from xknx.dpt import (
DPT2ByteFloat,
DPT4ByteFloat,
DPTElectricCurrent,
DPTElectricPotential,
DPTEnthalpy,
DPTFrequency,
DPTHumidity,
DPTLux,
DPTPartsPerMillion,
DPTPhaseAngleDeg,
DPTPower,
DPTTemperature,
DPTVoltage,
)
from xknx.exceptions import ConversionError
class TestDPTFloat:
"""Test class for KNX 2 & 4 byte/octet float object."""
# ####################################################################
# DPT2ByteFloat
#
def test_value_from_documentation(self):
"""Test parsing and streaming of DPT2ByteFloat -30.00. Example from the internet[tm]."""
assert DPT2ByteFloat.to_knx(-30.00) == (0x8A, 0x24)
assert DPT2ByteFloat.from_knx((0x8A, 0x24)) == -30.00
def test_value_taken_from_live_thermostat(self):
"""Test parsing and streaming of DPT2ByteFloat 19.96."""
assert DPT2ByteFloat.to_knx(16.96) == (0x06, 0xA0)
assert DPT2ByteFloat.from_knx((0x06, 0xA0)) == 16.96
def test_zero_value(self):
"""Test parsing and streaming of DPT2ByteFloat zero value."""
assert DPT2ByteFloat.to_knx(0.00) == (0x00, 0x00)
assert DPT2ByteFloat.from_knx((0x00, 0x00)) == 0.00
def test_room_temperature(self):
"""Test parsing and streaming of DPT2ByteFloat 21.00. Room temperature."""
assert DPT2ByteFloat.to_knx(21.00) == (0x0C, 0x1A)
assert DPT2ByteFloat.from_knx((0x0C, 0x1A)) == 21.00
def test_high_temperature(self):
"""Test parsing and streaming of DPT2ByteFloat 500.00, 499.84, 500.16. Testing rounding issues."""
assert DPT2ByteFloat.to_knx(500.00) == (0x2E, 0x1A)
assert round(abs(DPT2ByteFloat.from_knx((0x2E, 0x1A)) - 499.84), 7) == 0
assert round(abs(DPT2ByteFloat.from_knx((0x2E, 0x1B)) - 500.16), 7) == 0
assert DPT2ByteFloat.to_knx(499.84) == (0x2E, 0x1A)
assert DPT2ByteFloat.to_knx(500.16) == (0x2E, 0x1B)
def test_minor_negative_temperature(self):
"""Test parsing and streaming of DPT2ByteFloat -10.00. Testing negative values."""
assert DPT2ByteFloat.to_knx(-10.00) == (0x84, 0x18)
assert DPT2ByteFloat.from_knx((0x84, 0x18)) == -10.00
def test_very_cold_temperature(self):
"""
Test parsing and streaming of DPT2ByteFloat -1000.00,-999.68, -1000.32.
Testing rounding issues of negative values.
"""
assert DPT2ByteFloat.to_knx(-1000.00) == (0xB1, 0xE6)
assert DPT2ByteFloat.from_knx((0xB1, 0xE6)) == -999.68
assert DPT2ByteFloat.from_knx((0xB1, 0xE5)) == -1000.32
assert DPT2ByteFloat.to_knx(-999.68) == (0xB1, 0xE6)
assert DPT2ByteFloat.to_knx(-1000.32) == (0xB1, 0xE5)
def test_max(self):
"""Test parsing and streaming of DPT2ByteFloat with maximum value."""
assert DPT2ByteFloat.to_knx(DPT2ByteFloat.value_max) == (0x7F, 0xFF)
assert DPT2ByteFloat.from_knx((0x7F, 0xFF)) == DPT2ByteFloat.value_max
def test_min(self):
"""Test parsing and streaming of DPT2ByteFloat with minimum value."""
assert DPT2ByteFloat.to_knx(DPT2ByteFloat.value_min) == (0xF8, 0x00)
assert DPT2ByteFloat.from_knx((0xF8, 0x00)) == DPT2ByteFloat.value_min
def test_close_to_max(self):
"""Test parsing and streaming of DPT2ByteFloat with maximum value -1."""
assert DPT2ByteFloat.to_knx(670433.28) == (0x7F, 0xFE)
assert DPT2ByteFloat.from_knx((0x7F, 0xFE)) == 670433.28
def test_close_to_min(self):
"""Test parsing and streaming of DPT2ByteFloat with minimum value +1."""
assert DPT2ByteFloat.to_knx(-670760.96) == (0xF8, 0x01)
assert DPT2ByteFloat.from_knx((0xF8, 0x01)) == -670760.96
def test_to_knx_min_exceeded(self):
"""Test parsing of DPT2ByteFloat with wrong value (underflow)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.to_knx(DPT2ByteFloat.value_min - 1)
def test_to_knx_max_exceeded(self):
"""Test parsing of DPT2ByteFloat with wrong value (overflow)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.to_knx(DPT2ByteFloat.value_max + 1)
def test_to_knx_wrong_parameter(self):
"""Test parsing of DPT2ByteFloat with wrong value (string)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.to_knx("fnord")
def test_from_knx_wrong_parameter(self):
"""Test parsing of DPT2ByteFloat with wrong value (wrong number of bytes)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.from_knx((0xF8, 0x01, 0x23))
def test_from_knx_wrong_parameter2(self):
"""Test parsing of DPT2ByteFloat with wrong value (second parameter is a string)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.from_knx((0xF8, "0x23"))
#
# DPTTemperature
#
def test_temperature_settings(self):
"""Test attributes of DPTTemperature."""
assert DPTTemperature.value_min == -273
assert DPTTemperature.value_max == 670760
assert DPTTemperature.unit == "°C"
assert DPTTemperature.resolution == 0.01
def test_temperature_assert_min_exceeded(self):
"""Testing parsing of DPTTemperature with wrong value."""
with pytest.raises(ConversionError):
DPTTemperature.to_knx(-274)
def test_temperature_assert_min_exceeded_from_knx(self):
"""Testing parsing of DPTTemperature with wrong value."""
with pytest.raises(ConversionError):
DPTTemperature.from_knx((0xB1, 0xE6)) # -1000
#
# DPTLux
#
def test_lux_settings(self):
"""Test attributes of DPTLux."""
assert DPTLux.value_min == 0
assert DPTLux.value_max == 670760
assert DPTLux.unit == "lx"
assert DPTLux.resolution == 0.01
def test_lux_assert_min_exceeded(self):
"""Test parsing of DPTLux with wrong value."""
with pytest.raises(ConversionError):
DPTLux.to_knx(-1)
#
# DPTHumidity
#
def test_humidity_settings(self):
"""Test attributes of DPTHumidity."""
assert DPTHumidity.value_min == 0
assert DPTHumidity.value_max == 670760
assert DPTHumidity.unit == "%"
assert DPTHumidity.resolution == 0.01
def test_humidity_assert_min_exceeded(self):
"""Test parsing of DPTHumidity with wrong value."""
with pytest.raises(ConversionError):
DPTHumidity.to_knx(-1)
#
# DPTEnthalpy
#
def test_enthalpy_settings(self):
"""Test attributes of DPTEnthalpy."""
assert DPTEnthalpy.unit == "H"
#
# DPTPartsPerMillion
#
def test_partspermillion_settings(self):
"""Test attributes of DPTPartsPerMillion."""
assert DPTPartsPerMillion.unit == "ppm"
#
# DPTVoltage
#
def test_voltage_settings(self):
"""Test attributes of DPTVoltage."""
assert DPTVoltage.unit == "mV"
# ####################################################################
# DPT4ByteFloat
#
def test_4byte_float_values_from_power_meter(self):
"""Test parsing DPT4ByteFloat value from power meter."""
assert DPT4ByteFloat.from_knx((0x43, 0xC6, 0x80, 00)) == 397
assert DPT4ByteFloat.to_knx(397) == (0x43, 0xC6, 0x80, 00)
assert DPT4ByteFloat.from_knx((0x42, 0x38, 0x00, 00)) == 46
assert DPT4ByteFloat.to_knx(46) == (0x42, 0x38, 0x00, 00)
def test_14_033(self):
"""Test parsing DPTFrequency unit."""
assert DPTFrequency.unit == "Hz"
def test_14_055(self):
"""Test DPTPhaseAngleDeg object."""
assert DPT4ByteFloat.from_knx((0x42, 0xEF, 0x00, 0x00)) == 119.5
assert DPT4ByteFloat.to_knx(119.5) == (0x42, 0xEF, 0x00, 0x00)
assert DPTPhaseAngleDeg.unit == "°"
def test_14_057(self):
"""Test DPT4ByteFloat object."""
assert DPT4ByteFloat.from_knx((0x3F, 0x71, 0xEB, 0x86)) == 0.9450001
assert DPT4ByteFloat.to_knx(0.945000052452) == (0x3F, 0x71, 0xEB, 0x86)
assert DPT4ByteFloat.unit == ""
def test_4byte_float_values_from_voltage_meter(self):
"""Test parsing DPT4ByteFloat from voltage meter."""
assert DPT4ByteFloat.from_knx((0x43, 0x65, 0xE3, 0xD7)) == 229.89
assert DPT4ByteFloat.to_knx(229.89) == (0x43, 0x65, 0xE3, 0xD7)
def test_4byte_float_zero_value(self):
"""Test parsing and streaming of DPT4ByteFloat zero value."""
assert DPT4ByteFloat.from_knx((0x00, 0x00, 0x00, 0x00)) == 0.00
assert DPT4ByteFloat.to_knx(0.00) == (0x00, 0x00, 0x00, 0x00)
def test_4byte_float_special_value(self):
"""Test parsing and streaming of DPT4ByteFloat special value."""
assert math.isnan(DPT4ByteFloat.from_knx((0x7F, 0xC0, 0x00, 0x00)))
assert DPT4ByteFloat.to_knx(float("nan")) == (0x7F, 0xC0, 0x00, 0x00)
assert math.isinf(DPT4ByteFloat.from_knx((0x7F, 0x80, 0x00, 0x00)))
assert DPT4ByteFloat.to_knx(float("inf")) == (0x7F, 0x80, 0x00, 0x00)
assert DPT4ByteFloat.from_knx((0xFF, 0x80, 0x00, 0x00)) == float("-inf")
assert DPT4ByteFloat.to_knx(float("-inf")) == (0xFF, 0x80, 0x00, 0x00)
assert DPT4ByteFloat.from_knx((0x80, 0x00, 0x00, 0x00)) == float("-0")
assert DPT4ByteFloat.to_knx(float("-0")) == (0x80, 0x00, 0x00, 0x00)
def test_4byte_float_to_knx_wrong_parameter(self):
"""Test parsing of DPT4ByteFloat with wrong value (string)."""
with pytest.raises(ConversionError):
DPT4ByteFloat.to_knx("fnord")
def test_4byte_float_from_knx_wrong_parameter(self):
"""Test parsing of DPT4ByteFloat with wrong value (wrong number of bytes)."""
with pytest.raises(ConversionError):
DPT4ByteFloat.from_knx((0xF8, 0x01, 0x23))
def test_4byte_float_from_knx_wrong_parameter2(self):
"""Test parsing of DPT4ByteFloat with wrong value (second parameter is a string)."""
with pytest.raises(ConversionError):
DPT4ByteFloat.from_knx((0xF8, "0x23", 0x00, 0x00))
def test_4byte_flaot_from_knx_unpack_error(self):
"""Test DPT4ByteFloat parsing with unpack error."""
with patch("struct.unpack") as unpack_mock:
unpack_mock.side_effect = struct.error()
with pytest.raises(ConversionError):
DPT4ByteFloat.from_knx((0x01, 0x23, 0x02, 0x02))
#
# DPTElectricCurrent
#
def test_electric_current_settings(self):
"""Test attributes of DPTElectricCurrent."""
assert DPTElectricCurrent.unit == "A"
#
# DPTElectricPotential
#
def test_electric_potential_settings(self):
"""Test attributes of DPTElectricPotential."""
assert DPTElectricPotential.unit == "V"
#
# DPTPower
#
def test_power_settings(self):
"""Test attributes of DPTPower."""
assert DPTPower.unit == "W"
| 38.908772 | 106 | 0.649472 | 1,290 | 11,089 | 5.420155 | 0.149612 | 0.040046 | 0.055778 | 0.051487 | 0.55964 | 0.422483 | 0.361985 | 0.254863 | 0.164188 | 0.130149 | 0 | 0.090814 | 0.224457 | 11,089 | 284 | 107 | 39.045775 | 0.721977 | 0.220399 | 0 | 0.082278 | 0 | 0 | 0.008011 | 0 | 0 | 0 | 0.072824 | 0 | 0.462025 | 1 | 0.253165 | false | 0 | 0.037975 | 0 | 0.297468 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0ab3855122a966be72b6a48e34c590ce1431679e | 598 | py | Python | models/User.py | IT-Creastation/radar-sat-API | ebcf12ab4fb66e826e0f30888515ba6193951cf6 | [
"MIT"
] | null | null | null | models/User.py | IT-Creastation/radar-sat-API | ebcf12ab4fb66e826e0f30888515ba6193951cf6 | [
"MIT"
] | null | null | null | models/User.py | IT-Creastation/radar-sat-API | ebcf12ab4fb66e826e0f30888515ba6193951cf6 | [
"MIT"
] | null | null | null | from DB.database import Base
from sqlalchemy import Column, Integer, String, Float
from sqlalchemy.orm import relationship
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True)
password = Column(String)
# TODO: satellite should be an enum instead of String :p
satellite = Column(String)
download_image_from = Column(String)
latitude = Column(Float)
longitude = Column(Float)
cloud_coverage = Column(Integer)
images = relationship("Image", back_populates="user")
| 28.47619 | 60 | 0.720736 | 74 | 598 | 5.702703 | 0.567568 | 0.113744 | 0.061611 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.187291 | 598 | 20 | 61 | 29.9 | 0.868313 | 0.090301 | 0 | 0 | 0 | 0 | 0.02583 | 0 | 0 | 0 | 0 | 0.05 | 0 | 1 | 0 | false | 0.071429 | 0.214286 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
0aba1c3bb2854ba97bd2e2cc8f39e8521576b412 | 8,739 | py | Python | sdk/python/pulumi_azure_nextgen/network/v20191201/route_filter.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/network/v20191201/route_filter.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/network/v20191201/route_filter.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['RouteFilter']
class RouteFilter(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
route_filter_name: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteFilterRuleArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Route Filter Resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] route_filter_name: The name of the route filter.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RouteFilterRuleArgs']]]] rules: Collection of RouteFilterRules contained within a route filter.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['id'] = id
__props__['location'] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['route_filter_name'] = route_filter_name
__props__['rules'] = rules
__props__['tags'] = tags
__props__['etag'] = None
__props__['ipv6_peerings'] = None
__props__['name'] = None
__props__['peerings'] = None
__props__['provisioning_state'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/latest:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20161201:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20170301:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20170601:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20170801:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20170901:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20171001:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20171101:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20180101:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20180201:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20180401:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20180601:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20180701:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20180801:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20181001:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20181101:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20181201:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20190201:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20190401:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20190601:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20190701:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20190801:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20190901:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20191101:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20200301:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20200401:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20200501:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20200601:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20200701:RouteFilter"), pulumi.Alias(type_="azure-nextgen:network/v20200801:RouteFilter")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(RouteFilter, __self__).__init__(
'azure-nextgen:network/v20191201:RouteFilter',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'RouteFilter':
"""
Get an existing RouteFilter resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return RouteFilter(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="ipv6Peerings")
def ipv6_peerings(self) -> pulumi.Output[Sequence['outputs.ExpressRouteCircuitPeeringResponse']]:
"""
A collection of references to express route circuit ipv6 peerings.
"""
return pulumi.get(self, "ipv6_peerings")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def peerings(self) -> pulumi.Output[Sequence['outputs.ExpressRouteCircuitPeeringResponse']]:
"""
A collection of references to express route circuit peerings.
"""
return pulumi.get(self, "peerings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the route filter resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def rules(self) -> pulumi.Output[Optional[Sequence['outputs.RouteFilterRuleResponse']]]:
"""
Collection of RouteFilterRules contained within a route filter.
"""
return pulumi.get(self, "rules")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 49.372881 | 2,117 | 0.674448 | 976 | 8,739 | 5.815574 | 0.1875 | 0.067653 | 0.107118 | 0.109232 | 0.487139 | 0.418957 | 0.363284 | 0.113108 | 0.041226 | 0.041226 | 0 | 0.035663 | 0.210665 | 8,739 | 176 | 2,118 | 49.653409 | 0.787185 | 0.171644 | 0 | 0.169811 | 1 | 0 | 0.293639 | 0.217688 | 0 | 0 | 0 | 0 | 0 | 1 | 0.122642 | false | 0.009434 | 0.075472 | 0.018868 | 0.320755 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0aba56fd9fded29cf6515cb58b81e92f26f4314e | 375 | py | Python | monitoring/monitorlib/locality.py | skyguide-ansp/dss | 4762d9a0c6a8f0e946e88fb43fb27c532af5bae9 | [
"Apache-2.0"
] | 2 | 2022-02-13T19:13:16.000Z | 2022-02-17T14:52:05.000Z | monitoring/monitorlib/locality.py | skyguide-ansp/dss | 4762d9a0c6a8f0e946e88fb43fb27c532af5bae9 | [
"Apache-2.0"
] | 1 | 2021-11-29T21:53:39.000Z | 2021-11-29T21:53:39.000Z | monitoring/monitorlib/locality.py | skyguide-ansp/dss | 4762d9a0c6a8f0e946e88fb43fb27c532af5bae9 | [
"Apache-2.0"
] | 1 | 2022-02-16T20:17:38.000Z | 2022-02-16T20:17:38.000Z | from enum import Enum
class Locality(str, Enum):
"""Operating locations and their respective regulation and technical variations."""
CHE = 'CHE'
"""Switzerland"""
@property
def is_uspace_applicable(self) -> bool:
return self in {Locality.CHE}
@property
def allow_same_priority_intersections(self) -> bool:
return self in set()
| 23.4375 | 87 | 0.672 | 44 | 375 | 5.613636 | 0.681818 | 0.089069 | 0.11336 | 0.145749 | 0.161943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.229333 | 375 | 15 | 88 | 25 | 0.854671 | 0.205333 | 0 | 0.222222 | 0 | 0 | 0.010909 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.222222 | false | 0 | 0.111111 | 0.222222 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 2 |
0abaed6c0912e5df4f4d8cedcbb64a6eab48dd4e | 566 | py | Python | programmers/lv2/12909.py | KLumy/Basic-Algorithm | e52e4200c1955a9062569814ff3418dd06666845 | [
"MIT"
] | 1 | 2021-01-22T15:58:32.000Z | 2021-01-22T15:58:32.000Z | programmers/lv2/12909.py | KLumy/Basic-Algorithm | e52e4200c1955a9062569814ff3418dd06666845 | [
"MIT"
] | null | null | null | programmers/lv2/12909.py | KLumy/Basic-Algorithm | e52e4200c1955a9062569814ff3418dd06666845 | [
"MIT"
] | null | null | null | def solution(s: str) -> bool:
stack = []
left = 0
right = 0
for c in s:
if not stack:
if c == ")":
return False
stack.append(c)
left += 1
continue
if c == ")":
if stack[-1] == "(":
stack.pop()
right += 1
else:
return False
else:
stack.append(c)
left += 1
return left == right
if __name__ == "__main__":
# i = "(())()"
i = ")()("
print(solution(i))
| 20.214286 | 32 | 0.355124 | 56 | 566 | 3.446429 | 0.446429 | 0.031088 | 0.124352 | 0.165803 | 0.176166 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021429 | 0.5053 | 566 | 27 | 33 | 20.962963 | 0.667857 | 0.021201 | 0 | 0.416667 | 0 | 0 | 0.027174 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041667 | false | 0 | 0 | 0 | 0.166667 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0abe56338575d1f8297900758c58773beea85885 | 3,046 | py | Python | extensions/base.py | Tang142857/MyEditor | 2d532eecfa6c48719cf6db99495a910ddd0ff52c | [
"MulanPSL-1.0"
] | null | null | null | extensions/base.py | Tang142857/MyEditor | 2d532eecfa6c48719cf6db99495a910ddd0ff52c | [
"MulanPSL-1.0"
] | null | null | null | extensions/base.py | Tang142857/MyEditor | 2d532eecfa6c48719cf6db99495a910ddd0ff52c | [
"MulanPSL-1.0"
] | null | null | null | """
Base extension for MyEditor
please create extension by using following classes
please override all the member function as possible as you can
copyright: DFSA Software Develop Center
@author: tang142857
"""
import importlib # for load extension function
class BaseExtension(object):
"""
扩展类,所有扩展都继承此类
Please override every member function that are not protected.
Please use the same init function ,like core_editor
Please add function you need into child class,it well add into public
interface(surely except protected function)
need override:on_load,un_load(core extension use pass)
"""
def __init__(self, interface):
self.accessor = interface
def _get_element(self, path):
return self.accessor(path)
def on_load(self, **arg):
pass
def un_load(self):
pass
class BaseInterface(object):
"""
统一的接口,提供给apply,特殊的,这里的base interface不需要被继承
接口有两大工作:1.指向extension让它不要被回收了,2.向外部提供调用接口(绑定事件之类的)
"""
def __init__(self, extension_obj):
self._extension = extension_obj
# point to extension's object ,so that it won't disappear
def on_load(self):
"""On load the extension,usually call by apply to let it active"""
self._extension.on_load()
# extension 服务类 end
# following are the manage function(just base function,packed by others)
def _create_interface(extension_object):
"""
Create public interface for apply
:return: public interface:BaseInterface
"""
public_interface = BaseInterface(extension_object)
attributes_list = dir(extension_object)
attributes_list.remove('on_load')
attributes_list.remove('un_load')
# needn't load/unload function(usually ,they not call by function except manage)
for attribute_name in attributes_list:
if not attribute_name.startswith('_'):
attribute = getattr(extension_object, attribute_name)
if callable(attribute):
setattr(public_interface, attribute_name, attribute)
return public_interface
def manage(kind: str, name: str, **args):
"""
Import extension's lib and call extension's on_load member function,will not call on_load(need apply)
:kind: load or unload
load: name: extensions' name,accessor = get_element
unload: name: extensions' name,extensions_object = extensions_object
:return: extension's interface,or None(for unload)
"""
if kind == 'load':
package_path = 'extensions' # extension lib
try:
model_object = importlib.import_module('.'.join((package_path, name, 'main')))
extension_object = model_object.Extension(args['accessor'])
extension_interface = _create_interface(extension_object)
return extension_interface
except ImportError as msg:
print(f'Load extension {name} failed ,please check your extension.')
print(msg)
return None
elif kind == 'unload':
pass # TODO unload extensions
| 30.767677 | 105 | 0.688116 | 370 | 3,046 | 5.510811 | 0.367568 | 0.023541 | 0.01079 | 0.012751 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003422 | 0.232436 | 3,046 | 98 | 106 | 31.081633 | 0.868691 | 0.435982 | 0 | 0.075 | 0 | 0 | 0.066625 | 0 | 0 | 0 | 0 | 0.010204 | 0 | 1 | 0.2 | false | 0.075 | 0.075 | 0.025 | 0.425 | 0.05 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
0acc3fca5a756434d83792f42f5ede9d08449581 | 125 | py | Python | PESTO-test/client-test/test.py | AIT-IES/PESTO | ebd6f5e57060f62b138fa4c1fa5a9aaacce8cf56 | [
"BSD-2-Clause"
] | 1 | 2018-02-27T10:49:49.000Z | 2018-02-27T10:49:49.000Z | PESTO-test/client-test/test.py | AIT-IES/PESTO | ebd6f5e57060f62b138fa4c1fa5a9aaacce8cf56 | [
"BSD-2-Clause"
] | null | null | null | PESTO-test/client-test/test.py | AIT-IES/PESTO | ebd6f5e57060f62b138fa4c1fa5a9aaacce8cf56 | [
"BSD-2-Clause"
] | null | null | null | import socket
hostname = socket.gethostname()
print('Executed test.py on ' + hostname )
input('Press Enter to continue..') | 17.857143 | 41 | 0.728 | 16 | 125 | 5.6875 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.144 | 125 | 7 | 42 | 17.857143 | 0.850467 | 0 | 0 | 0 | 0 | 0 | 0.357143 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.25 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0ad0b1e123feb1b29d3fdc68db7d9512f548165a | 225 | py | Python | settings.py | ghostishev/async-image-uploader | 1e7b54d35dc3cb620d88a370f08fea7fa1ba6e39 | [
"BSD-2-Clause"
] | null | null | null | settings.py | ghostishev/async-image-uploader | 1e7b54d35dc3cb620d88a370f08fea7fa1ba6e39 | [
"BSD-2-Clause"
] | null | null | null | settings.py | ghostishev/async-image-uploader | 1e7b54d35dc3cb620d88a370f08fea7fa1ba6e39 | [
"BSD-2-Clause"
] | null | null | null | PORT = 80
DEBUG = False
MYSQL_HOST = 'localhost'
MYSQL_DATABASE = 'images'
MYSQL_LOGIN = 'root'
MYSQL_PASSWORD = 'root'
SELF_HOST = 'http://localhost:8000'
try:
from settings_local import *
except ImportError:
pass
| 16.071429 | 35 | 0.72 | 29 | 225 | 5.37931 | 0.793103 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032258 | 0.173333 | 225 | 13 | 36 | 17.307692 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0.195556 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.181818 | 0.181818 | 0 | 0.181818 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
0ad56ecd3d835d64fd15387cf4880690c31b2a2c | 151 | py | Python | Chapter 1/Ch1_Q20_sumofdigits.py | inshaal/CBSE_NCERT_SOLUTIONS | 0804c2b42e80ccf42ad7dc4d91998848529e216d | [
"Unlicense"
] | null | null | null | Chapter 1/Ch1_Q20_sumofdigits.py | inshaal/CBSE_NCERT_SOLUTIONS | 0804c2b42e80ccf42ad7dc4d91998848529e216d | [
"Unlicense"
] | null | null | null | Chapter 1/Ch1_Q20_sumofdigits.py | inshaal/CBSE_NCERT_SOLUTIONS | 0804c2b42e80ccf42ad7dc4d91998848529e216d | [
"Unlicense"
] | null | null | null | #To find sum of all digits entered by user.
a=input("Enter a number : ")
k=a
s=0
while k>0:
c=k%10
s=s+c
k=k/10
print "Sum is ",s
| 15.1 | 44 | 0.556291 | 34 | 151 | 2.470588 | 0.617647 | 0.047619 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.057143 | 0.304636 | 151 | 9 | 45 | 16.777778 | 0.742857 | 0.278146 | 0 | 0 | 0 | 0 | 0.242424 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0ad8eb26ae02e1c606d81d36ad60e84c40aeddbc | 280 | py | Python | tests/functional/postgres/factories.py | seandstewart/norma | 41f4d4974496627b6422ef337fde6a923e0f9f53 | [
"MIT"
] | 4 | 2021-07-20T17:18:30.000Z | 2021-10-04T00:35:31.000Z | tests/functional/sqlite/factories.py | seandstewart/norma | 41f4d4974496627b6422ef337fde6a923e0f9f53 | [
"MIT"
] | null | null | null | tests/functional/sqlite/factories.py | seandstewart/norma | 41f4d4974496627b6422ef337fde6a923e0f9f53 | [
"MIT"
] | null | null | null | import factory
from examples.pg.db import model
class PostFactory(factory.Factory):
class Meta:
model = model.Post
title = factory.Faker("catch_phrase")
subtitle = factory.Faker("bs")
tagline = factory.Faker("bs")
body = factory.Faker("paragraph")
| 20 | 41 | 0.678571 | 34 | 280 | 5.558824 | 0.588235 | 0.253968 | 0.148148 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.203571 | 280 | 13 | 42 | 21.538462 | 0.847534 | 0 | 0 | 0 | 0 | 0 | 0.089286 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.222222 | 0 | 0.888889 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
0ae2c6ab73bdf924b72092277877e1ae2d29810e | 1,398 | py | Python | src/compas_mobile_robot_reloc/three_pts_localization.py | gramaziokohler/total_station_robot_localization | aa4baa070d4d93a8058a0286615580ed8f95567f | [
"MIT"
] | null | null | null | src/compas_mobile_robot_reloc/three_pts_localization.py | gramaziokohler/total_station_robot_localization | aa4baa070d4d93a8058a0286615580ed8f95567f | [
"MIT"
] | 27 | 2020-12-22T13:20:52.000Z | 2022-03-04T23:04:51.000Z | src/compas_mobile_robot_reloc/three_pts_localization.py | gramaziokohler/total_station_robot_localization | aa4baa070d4d93a8058a0286615580ed8f95567f | [
"MIT"
] | 1 | 2021-03-26T02:59:59.000Z | 2021-03-26T02:59:59.000Z | """Three points method for robot relocalization"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compas.geometry import Frame
from compas.geometry import Transformation
from compas_mobile_robot_reloc.utils import TYPE_CHECKING
if TYPE_CHECKING:
from typing import List
from typing import Union
from compas.geometry import Point
def _pts_to_frame(pts): # type: (Point) -> Frame
return Frame.from_points(*pts)
def _coerce_frame(frame_or_pts): # type: (Union[List[Point], Frame]) -> Frame
if isinstance(frame_or_pts, Frame):
return frame_or_pts
return _pts_to_frame(frame_or_pts)
def three_pts_localization(
rcs_coords, wcs_coords
): # type: (List[Point], List[Point]) -> Frame
"""Get the robot base frame in WCS using three points method.
Parameters
----------
rcs_coords
List of the RCS coordinates used for measurements.
wcs_coords
List of the WCS coordinates used for measurements.
Returns
-------
The base frame of the robot in WCS.
"""
recorded_frame_rcs = _coerce_frame(rcs_coords)
recorded_frame_wcs = _coerce_frame(wcs_coords)
T = Transformation.from_frame_to_frame(recorded_frame_rcs, recorded_frame_wcs)
wcs_robot_base = Frame.worldXY()
wcs_robot_base.transform(T)
return wcs_robot_base
| 25.888889 | 82 | 0.731044 | 192 | 1,398 | 4.984375 | 0.276042 | 0.041797 | 0.041797 | 0.075235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.194564 | 1,398 | 53 | 83 | 26.377358 | 0.849911 | 0.30329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0 | 0.36 | 0.04 | 0.64 | 0.04 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
0ae951c8df68eef9a7a736e9ae051e71ae1229b7 | 191 | py | Python | govuk_forms/__init__.py | lgarvey/django-govuk-forms | 22a7be12c8fa4026eb4605042b419c65e557266c | [
"MIT"
] | null | null | null | govuk_forms/__init__.py | lgarvey/django-govuk-forms | 22a7be12c8fa4026eb4605042b419c65e557266c | [
"MIT"
] | null | null | null | govuk_forms/__init__.py | lgarvey/django-govuk-forms | 22a7be12c8fa4026eb4605042b419c65e557266c | [
"MIT"
] | null | null | null | VERSION = (0, 6)
__version__ = '.'.join(map(str, VERSION))
__author__ = 'Ministry of Justice'
__email__ = 'dev@digital.justice.gov.uk'
default_app_config = 'govuk_forms.apps.FormsAppConfig'
| 27.285714 | 54 | 0.743455 | 25 | 191 | 5.08 | 0.88 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011696 | 0.104712 | 191 | 6 | 55 | 31.833333 | 0.730994 | 0 | 0 | 0 | 0 | 0 | 0.403141 | 0.298429 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
0aec97e90762772f5a4114a70a05b523ef848a52 | 152 | py | Python | django_settings/__init__.py | ildus/django-settings | a1e76ad60f7e918a609f2ab4667eb9f782455c61 | [
"BSD-3-Clause"
] | null | null | null | django_settings/__init__.py | ildus/django-settings | a1e76ad60f7e918a609f2ab4667eb9f782455c61 | [
"BSD-3-Clause"
] | null | null | null | django_settings/__init__.py | ildus/django-settings | a1e76ad60f7e918a609f2ab4667eb9f782455c61 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
VERSION = (1, 0, 0, 'beta')
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = 'Kuba Janoszek'
| 25.333333 | 69 | 0.578947 | 21 | 152 | 3.809524 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.053846 | 0.144737 | 152 | 5 | 70 | 30.4 | 0.561538 | 0.138158 | 0 | 0 | 0 | 0 | 0.139535 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c0f4fa77be65ae8c60485e122db87b396a3c57a | 2,853 | py | Python | envec/multipart.py | jwodder/envec | 31b9d1db048dc1a96c42cd69a3c38ccb89446e1c | [
"MIT"
] | null | null | null | envec/multipart.py | jwodder/envec | 31b9d1db048dc1a96c42cd69a3c38ccb89446e1c | [
"MIT"
] | null | null | null | envec/multipart.py | jwodder/envec | 31b9d1db048dc1a96c42cd69a3c38ccb89446e1c | [
"MIT"
] | null | null | null | from enum import Enum
import itertools
import json
import warnings
from ._util import cheap_repr, for_json
class CardClass(Enum):
normal = 1
split = 2
flip = 3
double_faced = 4
BFM = 5
def for_json(self):
return self.name
class MultipartDB:
DEFAULT_DATAFILE = 'data/multipart.json'
def __init__(self, infile=None):
if infile is None:
infile = open(self.DEFAULT_DATAFILE)
with infile:
data = json.load(infile)
self.sourcefile = infile.name
self.byName = {}
self.byClass = {}
for cclass in CardClass:
if cclass == CardClass.normal:
continue
classed = data.get(cclass.name, [])
self.byClass[cclass] = classed
for entry in classed:
entry["cardClass"] = cclass
for name in (entry["primary"], entry["secondary"]):
if name in self.byName:
warnings.warn('%s: name appears more than once in'
' multipart file; subsequent appearance'
' ignored' % (name,))
else:
self.byName[name] = entry
def cardClass(self, name):
try:
entry = self.byName[name]
except KeyError:
return CardClass.normal
else:
return entry["cardClass"]
def isPrimary(self, name):
try:
return self.byName[name]["primary"] == name
except KeyError:
return False
def isSecondary(self, name):
try:
return self.byName[name]["secondary"] == name
except KeyError:
return False
def isSplit(self, name):
return self.cardClass(name) == CardClass.split
def isFlip(self, name):
return self.cardClass(name) == CardClass.flip
def isDouble(self, name):
return self.cardClass(name) == CardClass.double_faced
def isMultipart(self, name):
return self.cardClass(name) != CardClass.normal
def primaries(self):
for entry in self:
yield entry["primary"]
def secondaries(self):
for entry in self:
yield entry["secondary"]
def alternate(self, name):
try:
entry = self.byName[name]
except KeyError:
return None
else:
return entry["secondary" if entry["primary"] == name else "primary"]
def __iter__(self):
return itertools.chain.from_iterable(self.byClass.values())
def __len__(self):
return sum(map(len, self.byClass.values()))
def __repr__(self):
return cheap_repr(self)
def for_json(self):
return for_json(vars(self), trim=True)
| 27.699029 | 80 | 0.546442 | 301 | 2,853 | 5.086379 | 0.285714 | 0.047028 | 0.045722 | 0.062704 | 0.314827 | 0.2887 | 0.246897 | 0.065317 | 0.065317 | 0.065317 | 0 | 0.00274 | 0.360322 | 2,853 | 102 | 81 | 27.970588 | 0.836164 | 0 | 0 | 0.228916 | 0 | 0 | 0.065896 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.192771 | false | 0 | 0.060241 | 0.108434 | 0.554217 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 2 |
7c10f9fb139fb2a927d49d0974f7ce00da8a221b | 238 | py | Python | download-package/PythonExercises/ex013.py | MCLeitao/Python-Exercises | b24758ac6f95584ef03c320cc442c12a6fad2bd9 | [
"MIT"
] | null | null | null | download-package/PythonExercises/ex013.py | MCLeitao/Python-Exercises | b24758ac6f95584ef03c320cc442c12a6fad2bd9 | [
"MIT"
] | null | null | null | download-package/PythonExercises/ex013.py | MCLeitao/Python-Exercises | b24758ac6f95584ef03c320cc442c12a6fad2bd9 | [
"MIT"
] | null | null | null | # Make an algorithm that reads an employee's salary and shows his new salary, with a 15% increase
n1 = float(input('Enter the employee`s salary: US$ '))
n2 = n1 * 1.15
print('The new salary, with 15% increase, is US${:.2f}'.format(n2))
| 39.666667 | 97 | 0.693277 | 43 | 238 | 3.837209 | 0.651163 | 0.109091 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060914 | 0.172269 | 238 | 5 | 98 | 47.6 | 0.77665 | 0.39916 | 0 | 0 | 0 | 0 | 0.574468 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c12815fd85eaeafddc49e66b55743a0ae0df4b0 | 583 | py | Python | bolsa/migrations/0003_auto_20210502_0947.py | IgorAlmeeida/pet-site | 265c1d622548093c4b58679efe5f8b8f3c7ebb84 | [
"MIT"
] | null | null | null | bolsa/migrations/0003_auto_20210502_0947.py | IgorAlmeeida/pet-site | 265c1d622548093c4b58679efe5f8b8f3c7ebb84 | [
"MIT"
] | null | null | null | bolsa/migrations/0003_auto_20210502_0947.py | IgorAlmeeida/pet-site | 265c1d622548093c4b58679efe5f8b8f3c7ebb84 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.3 on 2021-05-02 12:47
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bolsa', '0002_auto_20201027_0152'),
]
operations = [
migrations.RemoveField(
model_name='person',
name='function',
),
migrations.RemoveField(
model_name='person',
name='institution',
),
migrations.DeleteModel(
name='Function',
),
migrations.DeleteModel(
name='Institution',
),
]
| 20.821429 | 47 | 0.540309 | 50 | 583 | 6.2 | 0.62 | 0.135484 | 0.167742 | 0.193548 | 0.258065 | 0.258065 | 0 | 0 | 0 | 0 | 0 | 0.081579 | 0.348199 | 583 | 27 | 48 | 21.592593 | 0.734211 | 0.077187 | 0 | 0.47619 | 1 | 0 | 0.145522 | 0.04291 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.047619 | 0 | 0.190476 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c156c9cc53fba70f7e1c6a60d6f43236fb144e4 | 171 | py | Python | setup.py | peternewman/kodespel | 01e92bf674e96f286aa470c90a62a47d3a0d4013 | [
"BSD-2-Clause"
] | null | null | null | setup.py | peternewman/kodespel | 01e92bf674e96f286aa470c90a62a47d3a0d4013 | [
"BSD-2-Clause"
] | 2 | 2022-02-27T17:30:22.000Z | 2022-02-28T02:17:01.000Z | setup.py | peternewman/kodespel | 01e92bf674e96f286aa470c90a62a47d3a0d4013 | [
"BSD-2-Clause"
] | 1 | 2022-02-27T06:56:51.000Z | 2022-02-27T06:56:51.000Z | from glob import glob
from setuptools import setup
dict_files = glob("dict/*.dict")
setup(
packages=["kodespel"],
data_files=[('share/kodespel', dict_files)],
)
| 17.1 | 48 | 0.695906 | 22 | 171 | 5.272727 | 0.5 | 0.155172 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.152047 | 171 | 9 | 49 | 19 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0.192982 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c1d31a5986e1afeaeebd972ab24e8d06ced1557 | 340 | py | Python | src/pysparkbundle/PySparkBundleTest.py | daipe-ai/pyspark-bundle | ffbb0f8835b69646a235ae4d7d1f4eaa03a05ea0 | [
"MIT"
] | null | null | null | src/pysparkbundle/PySparkBundleTest.py | daipe-ai/pyspark-bundle | ffbb0f8835b69646a235ae4d7d1f4eaa03a05ea0 | [
"MIT"
] | null | null | null | src/pysparkbundle/PySparkBundleTest.py | daipe-ai/pyspark-bundle | ffbb0f8835b69646a235ae4d7d1f4eaa03a05ea0 | [
"MIT"
] | null | null | null | import unittest
from pyfonycore.bootstrap import bootstrapped_container
from injecta.testing.services_tester import test_services
class PySparkBundleTest(unittest.TestCase):
def test_init(self):
container = bootstrapped_container.init("test")
test_services(container)
if __name__ == "__main__":
unittest.main()
| 22.666667 | 57 | 0.770588 | 37 | 340 | 6.702703 | 0.567568 | 0.169355 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.152941 | 340 | 14 | 58 | 24.285714 | 0.861111 | 0 | 0 | 0 | 0 | 0 | 0.035294 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.333333 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
7c2e6fc9e4a7e8e800aa3c395324c5f43f51ba04 | 2,161 | py | Python | examples/pyrigidbody3dmeshcat.py | erwincoumans/pyrigidbody3d | aedf0168b5ec6bca99fa7589f69a2e28680ba078 | [
"Apache-2.0"
] | 14 | 2021-04-25T21:47:41.000Z | 2021-08-09T18:12:17.000Z | examples/pyrigidbody3dmeshcat.py | erwincoumans/py_rigidbody_3d | aedf0168b5ec6bca99fa7589f69a2e28680ba078 | [
"Apache-2.0"
] | 1 | 2021-09-19T23:22:00.000Z | 2021-09-19T23:22:00.000Z | examples/pyrigidbody3dmeshcat.py | erwincoumans/py_rigidbody_3d | aedf0168b5ec6bca99fa7589f69a2e28680ba078 | [
"Apache-2.0"
] | 2 | 2021-06-06T12:46:17.000Z | 2021-08-08T19:22:32.000Z |
import sys
import copy
import numpy as np
from pyrigidbody3d import geometry
from pyrigidbody3d import rigidbody
from pyrigidbody3d import world
# real-time updates are a bit choppy
import meshcat
import meshcat.geometry as g
import meshcat.transformations as tf
import math
import time
import numpy as np
SIMULATION_TIME_STEP = 1. / 60.#240.
NUM_SOLVER_ITERATIONS = 20
RADIUS=0.5
physics_world = world.World(NUM_SOLVER_ITERATIONS)
physics_world.gravity = np.array([0.0, -2.0, -9.8])
vis = meshcat.Visualizer().open()
#physics plane
plane = geometry.Plane()
plane_id = rigidbody.RigidBody(inv_mass=0.0, collision_shape=plane)
physics_world.bodies.append(plane_id)
#rendering plane
ground = g.Box([10,10,0.01])
vis['ground'].set_object(ground,g.MeshLambertMaterial( color=0xffffff, wireframe=False))
#physics sphere
sphere = geometry.Sphere(RADIUS)
sphere_id = rigidbody.RigidBody(inv_mass=1.0, collision_shape=sphere)
sphere_id.world_pose.position = np.array([0., 0., 2.6])
physics_world.bodies.append(sphere_id)
#rendering sphere
sphere = g.Sphere([RADIUS])
vis['sphere'].set_object(sphere,g.MeshPhongMaterial(color=0x5555ff, wireframe=True))
dt = SIMULATION_TIME_STEP
#todo: convert the sphere orientation quaternion to mat3x3
mat4 = tf.rotation_matrix(0, [0, 0, 1])
mat4[:3, 3] = sphere_id.world_pose.position
vis['sphere'].set_transform(mat4)
#real-time updates are a bit choppy, so record an animation instead
#for _ in range(200):
# physics_world.step(dt)
# mat4[:3, 3] = sphere_id.world_pose.position
# vis['sphere'].set_transform(mat4)
# time.sleep(0.5*SIMULATION_TIME_STEP)
from meshcat.animation import Animation
import meshcat.transformations as tf
sphere_id.world_pose.position = np.array([0., 0., 2.6])
anim = Animation()
for frame_index in range(200):
physics_world.step(dt)
mat4 = sphere_id.world_pose.matrix()
with anim.at_frame(vis, frame_index) as frame:
frame["sphere"].set_transform(mat4)
# `set_animation` actually sends the animation to the
# viewer. By default, the viewer will play the animation
# right away. To avoid that, you can also pass `play=False`.
vis.set_animation(anim)#, play=False)
| 27.705128 | 88 | 0.765849 | 335 | 2,161 | 4.802985 | 0.340299 | 0.034804 | 0.040398 | 0.052828 | 0.266004 | 0.186451 | 0.186451 | 0.151647 | 0.114357 | 0.114357 | 0 | 0.03622 | 0.118464 | 2,161 | 77 | 89 | 28.064935 | 0.808399 | 0.260065 | 0 | 0.139535 | 0 | 0 | 0.01519 | 0 | 0 | 0 | 0.010127 | 0.012987 | 0 | 1 | 0 | false | 0 | 0.325581 | 0 | 0.325581 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
7c31faa9ed0c050238935c2cd0550db979239d3d | 1,345 | py | Python | testflows/database/argparser.py | testflows/TestFlows-Database | 2f32738c1265d8d37dfd12082268fe38373f1ec2 | [
"Apache-2.0"
] | null | null | null | testflows/database/argparser.py | testflows/TestFlows-Database | 2f32738c1265d8d37dfd12082268fe38373f1ec2 | [
"Apache-2.0"
] | null | null | null | testflows/database/argparser.py | testflows/TestFlows-Database | 2f32738c1265d8d37dfd12082268fe38373f1ec2 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Katteli Inc.
# TestFlows.com Open-Source Software Testing Framework (http://testflows.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from testflows._core.cli.arg.type import key_value as key_value_type
def argparser(parser):
parser.add_argument("--database", dest="_database", metavar="name=value", nargs="+",
help="""database output handler options, default handler: 'testflows.database.clickhouse'.
Options are specific to each output handler. For the default ClickHouse handler
the following options can be specified:
'host=<hostname>'
'database=<database>'
'user=<user>'
'password=<password>'.
For example: '--database host=localhost'
""", type=key_value_type, required=False)
| 48.035714 | 115 | 0.687732 | 173 | 1,345 | 5.300578 | 0.612717 | 0.065431 | 0.028353 | 0.034896 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007634 | 0.220818 | 1,345 | 27 | 116 | 49.814815 | 0.867366 | 0.465428 | 0 | 0 | 0 | 0 | 0.659091 | 0.106534 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0.083333 | 0.083333 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
7c35c7a10bf76be0b5ac1b837504e653aeddd6cc | 1,063 | py | Python | CSV/test/demo.py | philip-shen/note_python | db0ad84af25464a22ac52e348960107c81e74a56 | [
"MIT"
] | null | null | null | CSV/test/demo.py | philip-shen/note_python | db0ad84af25464a22ac52e348960107c81e74a56 | [
"MIT"
] | 11 | 2021-02-08T20:45:23.000Z | 2022-03-12T01:00:11.000Z | CSV/test/demo.py | philip-shen/note_python | db0ad84af25464a22ac52e348960107c81e74a56 | [
"MIT"
] | null | null | null | '''Test GetCsvColumn'''
import sys
sys.path.append('../lib')
from GetCsvColumn import CsvFile,EXCLUDE
csvfilename = 'demo.csv'
csvfile = CsvFile(csvfilename)
# example 1: get a column by its header
print('example 1:{}'.format(csvfile.get_column('Name')))
# example 2: get a column filtered by another column
#print 'example 2:', csvfile.get_column('Name', Gender='M')
# example 3: get a column filtered by other columns
print('example 3:{}'.format(csvfile.get_column('Name', Gender='M', Age=9)))
# example 4: exclusive filters
print('example 4:{}'.format(csvfile.get_column('Name', Gender=EXCLUDE('M'))))
# example 5: get a column filtered by other column with multi-criteria
print('example 5:{}'.format(csvfile.get_column('Name', Age=[8, 9, 13])))
# example 6: get a column exclusively filtered by other column with multi-criteria
print('example 6:{}'.format(csvfile.get_column('Name', Age=EXCLUDE([8, 9, 13]))))
# example 7: get multiple columns for unpacking
no, name = csvfile.get_column('No', 'Name')
print('example 7:{}, {}'.format(no, name)) | 35.433333 | 82 | 0.711195 | 159 | 1,063 | 4.710692 | 0.314465 | 0.11215 | 0.149533 | 0.160214 | 0.441923 | 0.380507 | 0.133511 | 0.133511 | 0.133511 | 0 | 0 | 0.024625 | 0.121355 | 1,063 | 30 | 83 | 35.433333 | 0.777302 | 0.413923 | 0 | 0 | 0 | 0 | 0.193126 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
7c38e564ad38d479034e84652bd7483293320f3c | 445 | py | Python | src/applications/player/admin.py | luisito666/M2-API-REST | 238837c2cbd0e9aadcce29def0dd9935b888047b | [
"MIT"
] | null | null | null | src/applications/player/admin.py | luisito666/M2-API-REST | 238837c2cbd0e9aadcce29def0dd9935b888047b | [
"MIT"
] | 3 | 2021-04-08T19:14:52.000Z | 2022-03-12T01:05:15.000Z | src/applications/player/admin.py | luisito666/M2-API-REST | 238837c2cbd0e9aadcce29def0dd9935b888047b | [
"MIT"
] | 1 | 2020-12-25T20:34:09.000Z | 2020-12-25T20:34:09.000Z | from django.contrib import admin
# Locals Models
from .models import Player, Guild
class PlayerAdmin(admin.ModelAdmin):
list_display = ("id", "account_id", "name", "level", "exp", "last_play", "ip")
search_fields = ["name"]
class GuildAdmin(admin.ModelAdmin):
list_display = ("id", "name", "master", "level", "exp")
search_fields = ["name"]
admin.site.register(Player, PlayerAdmin)
admin.site.register(Guild, GuildAdmin)
| 22.25 | 82 | 0.689888 | 54 | 445 | 5.574074 | 0.518519 | 0.106312 | 0.126246 | 0.172757 | 0.186047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148315 | 445 | 19 | 83 | 23.421053 | 0.794195 | 0.029213 | 0 | 0.2 | 0 | 0 | 0.146512 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
7c3f767ad1d940f91c4df9be4cf792c43c534374 | 3,357 | py | Python | src/audio/__init__.py | vincent-lg/cocomud | a2c174f15b024d834266e0bef3d0404732d34a47 | [
"BSD-3-Clause"
] | 3 | 2016-10-13T01:39:20.000Z | 2017-08-01T15:58:12.000Z | src/audio/__init__.py | vincent-lg/cocomud | a2c174f15b024d834266e0bef3d0404732d34a47 | [
"BSD-3-Clause"
] | 11 | 2018-11-27T16:13:11.000Z | 2019-12-29T11:34:54.000Z | src/audio/__init__.py | vlegoff/cocomud | a2c174f15b024d834266e0bef3d0404732d34a47 | [
"BSD-3-Clause"
] | 2 | 2017-08-02T19:36:57.000Z | 2017-10-21T04:13:34.000Z | # Copyright (c) 2016-2020, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Audio library, to play sounds in various formats.
This package includes:
- The `pybass` library, to play sounds using `BASS`, a thitd-party library
with its own license.
- A wrapper around the `pybass` library to make it easy to use `BASS` from
CocoMUD.
Notice that, even though this package could be useful in other projects,
it is not released as a separate library and doesn't attempt to be used
outside of CocoMUD. Hence, for instance, the wrapper is built with
Python 3.6 in mind and doesn't have any particular checks to be
compatible with other Python versions.
Wrapper usage:
1. From within CocoMUD, just import the wrapper:
from audio import audiolib
2. `audiolib` is an instance of `AudioLib`, the wrapper used to
communicate with `pybass`. Therefore, `audiolib` could be considered
as a standalone, although you could in theory create other objects
of this class.
3. You can then use the methods of the `audiolib` instance to play sounds
(see below).
Example:
from audio import audiolib
audiolib.play("path/of/my/file.ogg")
# Or alternatively, to keep a handle on the played sound
import time
sound = audiolib.play("path/of/my/file.ogg")
time.sleep(2) # notice that the sound will keep on playing while Python pauses
sound.pause()
sound.stop()
sound.volume = 80 # 80% of volume
sound.play() # unpause the sound
Frequently used methods:
- `AudioLib.play(str)`: play a sound, given the file path to access it. `BASS` supports WAV, MP3, OGG and other formats (see the full documentation).
- `AudioLib.stop()`: stop all sounds that are currently playing.
- `Sound.play()`: start playing or unpause a sound.
- `Sound.pause()`: pause a sound.
- `Sound.stop()`: stop a sound.
"""
from audio.wrapper import AudioLib
audiolib = AudioLib()
| 42.493671 | 150 | 0.753649 | 507 | 3,357 | 4.990138 | 0.435897 | 0.009486 | 0.014229 | 0.018182 | 0.094071 | 0.075099 | 0.075099 | 0.053755 | 0.053755 | 0.053755 | 0 | 0.006922 | 0.182306 | 3,357 | 78 | 151 | 43.038462 | 0.914754 | 0.965147 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
7c48e343d45ec8573355bb94653bd9a76a47cbfb | 403 | py | Python | admincommand/utils.py | johnnoone/django-admincommand | 8b362506beee0031d12edee69be1ccc8b2d741b0 | [
"BSD-3-Clause"
] | 14 | 2018-09-24T12:21:43.000Z | 2021-02-13T01:04:33.000Z | admincommand/utils.py | johnnoone/django-admincommand | 8b362506beee0031d12edee69be1ccc8b2d741b0 | [
"BSD-3-Clause"
] | 3 | 2019-09-04T15:10:29.000Z | 2022-02-17T16:06:37.000Z | admincommand/utils.py | johnnoone/django-admincommand | 8b362506beee0031d12edee69be1ccc8b2d741b0 | [
"BSD-3-Clause"
] | 10 | 2018-11-17T14:09:12.000Z | 2021-05-31T11:18:43.000Z | def generate_instance_name(name):
out = name[0].lower()
for char in name[1:]:
if char.isupper():
out += "_%s" % char.lower()
else:
out += char
return out
def generate_human_name(name):
out = name[0]
for char in name[1:]:
if char.isupper():
out += " %s" % char.lower()
else:
out += char
return out
| 21.210526 | 39 | 0.488834 | 51 | 403 | 3.764706 | 0.333333 | 0.114583 | 0.114583 | 0.15625 | 0.791667 | 0.625 | 0.625 | 0.625 | 0.625 | 0.625 | 0 | 0.015873 | 0.37469 | 403 | 18 | 40 | 22.388889 | 0.746032 | 0 | 0 | 0.625 | 1 | 0 | 0.014888 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0 | 0 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c4d064dc2593a9ca874948865e37f3de94e6ad1 | 830 | py | Python | moto/awslambda/urls.py | gvlproject/moto | b1c51faaf5dbf79a76eca29724b7d22b87e27502 | [
"Apache-2.0"
] | 2 | 2019-07-10T14:44:12.000Z | 2020-06-08T17:26:29.000Z | moto/awslambda/urls.py | gvlproject/moto | b1c51faaf5dbf79a76eca29724b7d22b87e27502 | [
"Apache-2.0"
] | 5 | 2018-04-25T21:04:20.000Z | 2018-11-02T19:59:27.000Z | moto/awslambda/urls.py | gvlproject/moto | b1c51faaf5dbf79a76eca29724b7d22b87e27502 | [
"Apache-2.0"
] | 12 | 2017-09-06T22:11:15.000Z | 2021-05-28T17:22:31.000Z | from __future__ import unicode_literals
from .responses import LambdaResponse
url_bases = [
"https?://lambda.(.+).amazonaws.com",
]
response = LambdaResponse()
url_paths = {
'{0}/(?P<api_version>[^/]+)/functions/?$': response.root,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/?$': response.function,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/versions/?$': response.versions,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/invocations/?$': response.invoke,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/invoke-async/?$': response.invoke_async,
r'{0}/(?P<api_version>[^/]+)/tags/(?P<resource_arn>.+)': response.tag,
r'{0}/(?P<api_version>[^/]+)/functions/(?P<function_name>[\w_-]+)/policy/?$': response.policy
}
| 43.684211 | 110 | 0.625301 | 103 | 830 | 4.786408 | 0.330097 | 0.028398 | 0.070994 | 0.170385 | 0.434077 | 0.365112 | 0.365112 | 0.365112 | 0.365112 | 0.365112 | 0 | 0.009198 | 0.083133 | 830 | 18 | 111 | 46.111111 | 0.638633 | 0 | 0 | 0 | 0 | 0 | 0.59759 | 0.59759 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.133333 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c4f00621dee11534b0882a146bdc169febc1be7 | 305 | py | Python | example/comments/urls.py | singhkumarpratik/django-vote | ca9b182a941c6a9b3426fe4b9e2d0cbf72737232 | [
"BSD-3-Clause"
] | null | null | null | example/comments/urls.py | singhkumarpratik/django-vote | ca9b182a941c6a9b3426fe4b9e2d0cbf72737232 | [
"BSD-3-Clause"
] | null | null | null | example/comments/urls.py | singhkumarpratik/django-vote | ca9b182a941c6a9b3426fe4b9e2d0cbf72737232 | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from django.urls import path
from django.urls import path, include
from .views import CommentView, comment_vote
app_name = "home"
urlpatterns = [
path("", CommentView.as_view(), name="comment"),
path("<int:comment_id>/vote", comment_vote, name="comment-vote"),
]
| 27.727273 | 69 | 0.737705 | 42 | 305 | 5.238095 | 0.47619 | 0.136364 | 0.127273 | 0.181818 | 0.218182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131148 | 305 | 10 | 70 | 30.5 | 0.830189 | 0 | 0 | 0 | 0 | 0 | 0.144262 | 0.068852 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.444444 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
7c57f3b88150663987cd02f85ec294c6ccb337d6 | 1,034 | py | Python | datasource/impl/string.py | YAmikep/datasource | 6c8d72bd299aa0a9e2880228f0f39d2b8721b146 | [
"MIT"
] | 1 | 2018-06-16T11:33:56.000Z | 2018-06-16T11:33:56.000Z | datasource/impl/string.py | YAmikep/datasource | 6c8d72bd299aa0a9e2880228f0f39d2b8721b146 | [
"MIT"
] | 1 | 2020-03-24T17:32:45.000Z | 2020-03-24T17:32:45.000Z | datasource/impl/string.py | YAmikep/datasource | 6c8d72bd299aa0a9e2880228f0f39d2b8721b146 | [
"MIT"
] | 2 | 2018-06-16T11:37:34.000Z | 2020-07-30T17:56:54.000Z | # Python stdlib
# Let's use only StringIO because cStringIO seems to duplicate the data in memory
# when using getvalue() to create a new stream.
# try:
# from cStringIO import StringIO as _StringIO
# except ImportError:
# from StringIO import StringIO as _StringIO
from StringIO import StringIO as _StringIO
# Internal
from ..interface import DataSourceInterface
from ..utils import helpers
class StringDataSource(DataSourceInterface):
def __init__(self, data, **kwargs):
self._data = data
self._readers = []
@property
def is_loaded(self):
return True
def load(self):
pass
def size(self, *args, **kwargs):
return len(self._data)
def get_reader(self):
r = _StringIO(self._data)
self._readers.append(r)
return r
def __repr__(self):
name = self.__class__.__name__
return u'<{}: {}>'.format(name, helpers.truncate(self._data, size=50))
def __del__(self):
for r in self._readers:
r.close()
| 24.046512 | 81 | 0.655706 | 128 | 1,034 | 5.039063 | 0.5 | 0.062016 | 0.074419 | 0.111628 | 0.111628 | 0.111628 | 0 | 0 | 0 | 0 | 0 | 0.002584 | 0.251451 | 1,034 | 42 | 82 | 24.619048 | 0.830749 | 0.257253 | 0 | 0 | 0 | 0 | 0.01054 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.291667 | false | 0.041667 | 0.125 | 0.083333 | 0.625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
7c6fd77e382a83086992837874c663bdc784cae7 | 209 | py | Python | train.py | YoruCathy/GarbageNet | dca173e20b9c72c957aa535e2b583d2294164840 | [
"Apache-2.0"
] | 3 | 2020-05-23T12:27:27.000Z | 2022-03-21T03:26:28.000Z | train.py | YoruCathy/GarbageNet | dca173e20b9c72c957aa535e2b583d2294164840 | [
"Apache-2.0"
] | null | null | null | train.py | YoruCathy/GarbageNet | dca173e20b9c72c957aa535e2b583d2294164840 | [
"Apache-2.0"
] | null | null | null | from GarbageClassification import GarbageClassification
gc = GarbageClassification(backbone="MobileNet",gpu="1",logname="realcosinelr")
gc.set_environment()
pipeline = gc.prepare_pipeline()
gc.train(pipeline) | 34.833333 | 79 | 0.827751 | 22 | 209 | 7.772727 | 0.681818 | 0.116959 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005076 | 0.057416 | 209 | 6 | 80 | 34.833333 | 0.862944 | 0 | 0 | 0 | 0 | 0 | 0.104762 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c710e29c596661609aa58056050ea566b6bc45c | 1,358 | py | Python | bokeh/core/templates.py | areaweb/bokeh | 9d131e45d626a912e85aee5b2647139c194dc893 | [
"BSD-3-Clause"
] | null | null | null | bokeh/core/templates.py | areaweb/bokeh | 9d131e45d626a912e85aee5b2647139c194dc893 | [
"BSD-3-Clause"
] | 1 | 2017-01-12T00:37:38.000Z | 2017-01-12T00:37:38.000Z | bokeh/core/templates.py | areaweb/bokeh | 9d131e45d626a912e85aee5b2647139c194dc893 | [
"BSD-3-Clause"
] | null | null | null | ''' Provide Jinja2 templates used by Bokeh to embed Bokeh models
(e.g. plots, widgets, layouts) in various ways.
.. bokeh-jinja:: bokeh.core.templates.AUTOLOAD_JS
.. bokeh-jinja:: bokeh.core.templates.AUTOLOAD_NB_JS
.. bokeh-jinja:: bokeh.core.templates.AUTOLOAD_TAG
.. bokeh-jinja:: bokeh.core.templates.CSS_RESOURCES
.. bokeh-jinja:: bokeh.core.templates.DOC_JS
.. bokeh-jinja:: bokeh.core.templates.FILE
.. bokeh-jinja:: bokeh.core.templates.JS_RESOURCES
.. bokeh-jinja:: bokeh.core.templates.NOTEBOOK_LOAD
.. bokeh-jinja:: bokeh.core.templates.PLOT_DIV
.. bokeh-jinja:: bokeh.core.templates.SCRIPT_TAG
'''
from __future__ import absolute_import
import json
from jinja2 import Environment, PackageLoader, Markup
_env = Environment(loader=PackageLoader('bokeh.core', '_templates'))
_env.filters['json'] = lambda obj: Markup(json.dumps(obj))
JS_RESOURCES = _env.get_template("js_resources.html")
CSS_RESOURCES = _env.get_template("css_resources.html")
SCRIPT_TAG = _env.get_template("script_tag.html")
PLOT_DIV = _env.get_template("plot_div.html")
DOC_JS = _env.get_template("doc_js.js")
FILE = _env.get_template("file.html")
NOTEBOOK_LOAD = _env.get_template("notebook_load.html")
AUTOLOAD_JS = _env.get_template("autoload_js.js")
AUTOLOAD_NB_JS = _env.get_template("autoload_nb_js.js")
AUTOLOAD_TAG = _env.get_template("autoload_tag.html")
| 30.863636 | 68 | 0.776141 | 197 | 1,358 | 5.055838 | 0.248731 | 0.099398 | 0.198795 | 0.190763 | 0.37751 | 0.216867 | 0.076305 | 0 | 0 | 0 | 0 | 0.001609 | 0.084683 | 1,358 | 43 | 69 | 31.581395 | 0.799678 | 0.443299 | 0 | 0 | 0 | 0 | 0.228916 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c76945d43490926170dfd59e1cedf31f10a3f68 | 9,710 | py | Python | core/storage/translation/gae_models_test.py | JeeveshGarg/oppia | 8d4efdb2b1521eb6432ddc12e7bed55c8e90db06 | [
"Apache-2.0"
] | 1 | 2021-12-17T15:21:23.000Z | 2021-12-17T15:21:23.000Z | core/storage/translation/gae_models_test.py | JeeveshGarg/oppia | 8d4efdb2b1521eb6432ddc12e7bed55c8e90db06 | [
"Apache-2.0"
] | null | null | null | core/storage/translation/gae_models_test.py | JeeveshGarg/oppia | 8d4efdb2b1521eb6432ddc12e7bed55c8e90db06 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for MachineTranslation models."""
from __future__ import annotations
from core import feconf
from core.platform import models
from core.tests import test_utils
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import base_models
from mypy_imports import translation_models
(base_models, translation_models) = models.Registry.import_models(
[models.NAMES.base_model, models.NAMES.translation])
class EntityTranslationsModelTest(test_utils.GenericTestBase):
"""Unit tests for EntityTranslationsModel class."""
def test_create_new_model(self) -> None:
enitity_translation_model = (
translation_models.EntityTranslationsModel.create_new(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id', 1, 'hi', {
'123': {
'content_value': 'Hello world!',
'needs_update': False
}
})
)
self.assertEqual(enitity_translation_model.entity_type, 'exploration')
self.assertEqual(enitity_translation_model.entity_id, 'exp_id')
self.assertEqual(enitity_translation_model.entity_version, 1)
self.assertEqual(enitity_translation_model.language_code, 'hi')
self.assertEqual(
enitity_translation_model.translations['123']['content_value'],
'Hello world!')
self.assertEqual(
enitity_translation_model.translations['123']['needs_update'],
False)
def test_get_model_method_returns_correctly(self) -> None:
translation_models.EntityTranslationsModel.create_new(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id', 1, 'hi', {
'123': {
'content_value': 'Hello world!',
'needs_update': False
}
}
).put()
enitity_translation_model = (
translation_models.EntityTranslationsModel.get_model(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id', 1, 'hi'))
self.assertEqual(enitity_translation_model.entity_type, 'exploration')
self.assertEqual(enitity_translation_model.entity_id, 'exp_id')
self.assertEqual(enitity_translation_model.entity_version, 1)
self.assertEqual(enitity_translation_model.language_code, 'hi')
self.assertEqual(
enitity_translation_model.translations['123']['content_value'],
'Hello world!')
self.assertEqual(
enitity_translation_model.translations['123']['needs_update'],
False)
def test_get_all_for_entity_returns_correctly(self) -> None:
translation_models.EntityTranslationsModel.create_new(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id', 1, 'en', {
'123': {
'content_value': 'Hey I am Jhon.',
'needs_update': False
}
}
).put()
translation_models.EntityTranslationsModel.create_new(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id2', 2, 'hi', {
'123': {
'content_value': 'Hello world!',
'needs_update': False
}
}
).put()
translation_models.EntityTranslationsModel.create_new(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id', 1, 'hi', {
'123': {
'content_value': 'Hey I am Nikhil.',
'needs_update': False
}
}
).put()
enitity_translation_models = (
translation_models.EntityTranslationsModel.get_all_for_entity(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id', 1))
self.assertEqual(len(enitity_translation_models), 2)
enitity_translation_models = (
translation_models.EntityTranslationsModel.get_all_for_entity(
feconf.TranslatableEntityType.EXPLORATION, 'exp_id2', 2))
self.assertEqual(len(enitity_translation_models), 1)
def test_get_export_policy_not_applicable(self) -> None:
self.assertEqual(
translation_models.EntityTranslationsModel.get_export_policy(),
{
'created_on': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'deleted': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'last_updated': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'entity_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'entity_type': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'entity_version': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'language_code': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'translations': base_models.EXPORT_POLICY.NOT_APPLICABLE
}
)
def test_get_deletion_policy_not_applicable(self) -> None:
self.assertEqual(
translation_models.EntityTranslationsModel.get_deletion_policy(),
base_models.DELETION_POLICY.NOT_APPLICABLE)
def test_get_model_association_to_user_not_corresponding_to_user(
self
) -> None:
self.assertEqual(
(
translation_models.EntityTranslationsModel
.get_model_association_to_user()
),
base_models.MODEL_ASSOCIATION_TO_USER.NOT_CORRESPONDING_TO_USER)
class MachineTranslationModelTests(test_utils.GenericTestBase):
def test_create_model(self) -> None:
model_id = translation_models.MachineTranslationModel.create(
source_language_code='en',
target_language_code='es',
source_text='hello world',
translated_text='hola mundo'
)
# Ruling out the possibility of None for mypy type checking.
assert model_id is not None
translation_model = (
translation_models.MachineTranslationModel.get(model_id))
self.assertEqual(translation_model.translated_text, 'hola mundo')
def test_create_model_with_same_source_target_language_codes_returns_none(
self
) -> None:
model_id = translation_models.MachineTranslationModel.create(
source_language_code='en',
target_language_code='en',
source_text='hello world',
translated_text='hello world'
)
self.assertIsNone(model_id)
def test_get_machine_translation_with_existing_translation(self) -> None:
translation_models.MachineTranslationModel.create(
source_language_code='en',
target_language_code='es',
source_text='hello world',
translated_text='hola mundo'
)
translation = (
translation_models.MachineTranslationModel
.get_machine_translation(
source_language_code='en',
target_language_code='es',
source_text='hello world',
)
)
self.assertIsNotNone(translation)
# Ruling out the possibility of None for mypy type checking.
assert translation is not None
self.assertEqual(translation.translated_text, 'hola mundo')
def test_get_machine_translation_with_no_existing_translation_returns_none(
self
) -> None:
translation = (
translation_models.MachineTranslationModel
.get_machine_translation(
source_language_code='en',
target_language_code='fr',
source_text='hello world',
)
)
self.assertIsNone(translation)
def test_get_deletion_policy_not_applicable(self) -> None:
self.assertEqual(
translation_models.MachineTranslationModel.get_deletion_policy(),
base_models.DELETION_POLICY.NOT_APPLICABLE)
def test_get_model_association_to_user_not_corresponding_to_user(
self
) -> None:
self.assertEqual(
(
translation_models.MachineTranslationModel
.get_model_association_to_user()
),
base_models.MODEL_ASSOCIATION_TO_USER.NOT_CORRESPONDING_TO_USER)
def test_get_export_policy_not_applicable(self) -> None:
self.assertEqual(
translation_models.MachineTranslationModel.get_export_policy(),
{
'created_on': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'deleted': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'hashed_source_text': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'last_updated': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'source_text': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'source_language_code':
base_models.EXPORT_POLICY.NOT_APPLICABLE,
'target_language_code':
base_models.EXPORT_POLICY.NOT_APPLICABLE,
'translated_text': base_models.EXPORT_POLICY.NOT_APPLICABLE
}
)
| 40.970464 | 79 | 0.644181 | 958 | 9,710 | 6.179541 | 0.174322 | 0.074662 | 0.070608 | 0.076014 | 0.769764 | 0.749155 | 0.67348 | 0.643074 | 0.602196 | 0.602196 | 0 | 0.007112 | 0.276004 | 9,710 | 236 | 80 | 41.144068 | 0.834993 | 0.083419 | 0 | 0.572165 | 0 | 0 | 0.081041 | 0 | 0 | 0 | 0 | 0 | 0.139175 | 1 | 0.06701 | false | 0 | 0.036082 | 0 | 0.113402 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7c8547ca6fed96593c6254663ccdb483039a110a | 1,057 | py | Python | p3w_05.0c.2.py | davidhawkes11/p3w | 68dd75da167de8c057b46fc5ebce53239b70706f | [
"MIT"
] | 3 | 2016-05-27T03:18:02.000Z | 2018-12-19T06:15:20.000Z | p3w_05.0c.2.py | davidhawkes11/p3w | 68dd75da167de8c057b46fc5ebce53239b70706f | [
"MIT"
] | null | null | null | p3w_05.0c.2.py | davidhawkes11/p3w | 68dd75da167de8c057b46fc5ebce53239b70706f | [
"MIT"
] | 34 | 2016-05-27T03:25:04.000Z | 2021-05-03T02:02:04.000Z | # Code to instruct the computer to generate dice roll values and display output to the screen.
# Input: number, an integer variable
# Output: a vertical list of randomly generated integers in the range specified.
# The above text is commentary. The actual program starts below:
import random # provides this program's access to the Python 3.5 builtin 'random' module.
print ("Python 3.0 Workbook\nStudent Work Booklet\nStudent Activity p3w_05.0c.2\n")
print ("A program to instruct the computer to generate 100 dice roll values of a 12 sided dice and display output to the screen.\n" )
print ("This is best solved using a while loop:\n")
counter = 1 # initialises the pretest while loop to 1
while counter <= 100: # sets the upper limit of the loop
print("Dice roll value", counter ," is ", random.randint(1, 12)) # prints the value to the screen
counter = counter + 1 # increments the counter
print ("\nProgram successfully terminated." )
| 62.176471 | 134 | 0.677389 | 153 | 1,057 | 4.673203 | 0.529412 | 0.027972 | 0.046154 | 0.058741 | 0.162238 | 0.162238 | 0 | 0 | 0 | 0 | 0 | 0.029337 | 0.258278 | 1,057 | 16 | 135 | 66.0625 | 0.882653 | 0.445601 | 0 | 0 | 1 | 0.111111 | 0.516995 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.111111 | 0.555556 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 2 |
7c8f6ea62fcb74700f7356ed4b937a3aaa1c7092 | 1,401 | py | Python | python/paddle/v2/__init__.py | OleNet/Paddle | 59271d643b13b13346889d12355611b9a2ce4e31 | [
"Apache-2.0"
] | 1 | 2016-10-07T20:40:11.000Z | 2016-10-07T20:40:11.000Z | python/paddle/v2/__init__.py | OleNet/Paddle | 59271d643b13b13346889d12355611b9a2ce4e31 | [
"Apache-2.0"
] | 1 | 2017-05-26T18:33:00.000Z | 2017-05-26T18:33:00.000Z | python/paddle/v2/__init__.py | OleNet/Paddle | 59271d643b13b13346889d12355611b9a2ce4e31 | [
"Apache-2.0"
] | 1 | 2016-10-07T00:50:53.000Z | 2016-10-07T00:50:53.000Z | # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import optimizer
import layer
import activation
import parameters
import trainer
import event
import data_type
import topology
import data_feeder
import networks
import evaluator
from . import dataset
from . import reader
from . import plot
import attr
import pooling
import inference
import networks
import py_paddle.swig_paddle as api
import minibatch
import plot
__all__ = [
'optimizer', 'layer', 'activation', 'parameters', 'init', 'trainer',
'event', 'data_type', 'attr', 'pooling', 'data_feeder', 'dataset', 'reader',
'topology', 'networks', 'infer', 'plot', 'evaluator'
]
def init(**kwargs):
args = []
for key in kwargs.keys():
args.append('--%s=%s' % (key, str(kwargs[key])))
api.initPaddle(*args)
infer = inference.infer
batch = minibatch.batch
| 26.433962 | 80 | 0.735189 | 193 | 1,401 | 5.284974 | 0.53886 | 0.058824 | 0.02549 | 0.031373 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006849 | 0.16631 | 1,401 | 52 | 81 | 26.942308 | 0.866438 | 0.415418 | 0 | 0.060606 | 0 | 0 | 0.167702 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0 | 0.636364 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
7c9d336d6718c32029b72a24770e2ea59a55dcee | 863 | py | Python | Quality/AtomicComputation/reputationsystem/sink.py | CityPulse/CP_Resourcemanagement | aa670fa89d5e086a98ade3ccc152518be55abf2e | [
"MIT"
] | 2 | 2016-11-03T14:57:45.000Z | 2019-05-13T13:21:08.000Z | Quality/AtomicComputation/reputationsystem/sink.py | CityPulse/CP_Resourcemanagement | aa670fa89d5e086a98ade3ccc152518be55abf2e | [
"MIT"
] | null | null | null | Quality/AtomicComputation/reputationsystem/sink.py | CityPulse/CP_Resourcemanagement | aa670fa89d5e086a98ade3ccc152518be55abf2e | [
"MIT"
] | 1 | 2020-07-23T11:27:15.000Z | 2020-07-23T11:27:15.000Z | import sys
class Sink(object):
"""docstring for Sink"""
def __init__(self):
self.metrics = {}
self.reputation = 1.0
self.reputationsystem = None
# self.timestamp = None #to be set by a (all) QoIMetric
def qoiMetricAdded(self, qoiMetricName, initValue):
""" called when a new qoiMetric is added to the reputation system """
pass
def startup(self):
"""This method is called immediately before the reputation system starts"""
raw_input("enter to stop")
if self.reputationsystem != None:
self.reputationsystem.feed.stop1()
sys.exit()
def update(self, qoiMetric):
""" qoiMetric = tuple (absoluter Wert, Bewertung) """
self.metrics[qoiMetric.name] = qoiMetric
print "%s updated to %f (%f)" % (qoiMetric.name, qoiMetric.absoluteValue, qoiMetric.ratedValue)
def persist(self, observationIdList):
print "persist called in Sink"
| 29.758621 | 97 | 0.713789 | 110 | 863 | 5.554545 | 0.581818 | 0.0982 | 0.07856 | 0.091653 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004172 | 0.16686 | 863 | 29 | 98 | 29.758621 | 0.845619 | 0.060255 | 0 | 0 | 0 | 0 | 0.095563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.055556 | 0.055556 | null | null | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
7c9ec5ef031873a5acc30cb0b66578148b0a0314 | 648 | py | Python | tests/test_rough_e2e.py | dmarx/pytti-core | 6d41082ac7ca797b682e777b65e70bb2095b24c0 | [
"MIT"
] | null | null | null | tests/test_rough_e2e.py | dmarx/pytti-core | 6d41082ac7ca797b682e777b65e70bb2095b24c0 | [
"MIT"
] | null | null | null | tests/test_rough_e2e.py | dmarx/pytti-core | 6d41082ac7ca797b682e777b65e70bb2095b24c0 | [
"MIT"
] | null | null | null | """
Broad strokes, end-to-end testing because something is better than nothing,
which is what we have right now.
"""
from hydra import initialize, initialize_config_module, initialize_config_dir, compose
from omegaconf import OmegaConf
CONFIG_BASE_PATH = "config"
CONFIG_DEFAULTS = "default.yaml"
def test_import():
from pytti.workhorse import _main as render_frames
assert True
def test_simple():
from pytti.workhorse import _main as render_frames
c_o = "_test.yaml"
with initialize(config_path=CONFIG_BASE_PATH):
cfg = compose(config_name=CONFIG_DEFAULTS, overrides=[f"conf={c_o}"])
render_frames(cfg)
| 24.923077 | 86 | 0.753086 | 91 | 648 | 5.120879 | 0.549451 | 0.103004 | 0.060086 | 0.103004 | 0.180258 | 0.180258 | 0.180258 | 0.180258 | 0 | 0 | 0 | 0 | 0.166667 | 648 | 25 | 87 | 25.92 | 0.862963 | 0.166667 | 0 | 0.153846 | 0 | 0 | 0.071429 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 1 | 0.153846 | false | 0 | 0.384615 | 0 | 0.538462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
7ca47e9fd557afe55f4aab9443df7597c4da1e41 | 8,651 | py | Python | gnome_plugins/uploader_plugin.py | sugar-activities/4434-activity | c51fb2a1ba3b79e5111a037a673c2f596d256205 | [
"MIT"
] | null | null | null | gnome_plugins/uploader_plugin.py | sugar-activities/4434-activity | c51fb2a1ba3b79e5111a037a673c2f596d256205 | [
"MIT"
] | null | null | null | gnome_plugins/uploader_plugin.py | sugar-activities/4434-activity | c51fb2a1ba3b79e5111a037a673c2f596d256205 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2011 Walter Bender
# Copyright (c) 2010 Jamie Boisture
# Copyright (c) 2011 Collabora Ltd. <http://www.collabora.co.uk/>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#!/usr/bin/python
try:
import pycurl
import xmlrpclib
_UPLOAD_AVAILABLE = True
except ImportError as e:
print "Import Error: %s. Project upload is disabled." % (e)
_UPLOAD_AVAILABLE = False
import os
import gtk
from .plugin import Plugin
from util.menubuilder import MenuBuilder, MENUBAR
from gettext import gettext as _
class Uploader_plugin(Plugin):
MAX_FILE_SIZE = 950000
UPLOAD_SERVER = 'http://turtleartsite.appspot.com'
def __init__(self, parent, upload_server=None, max_file_size=None):
self._parent = parent
self.uploading = False
if upload_server is None:
self._upload_server = self.UPLOAD_SERVER
if max_file_size is None:
self._max_file_size = self.MAX_FILE_SIZE
else:
self._max_file_size = max_file_size
def set_tw(self, turtleart_window):
self.tw = turtleart_window
def get_menu(self):
if _('Upload') in MENUBAR:
menu, upload_menu = MENUBAR[_('Upload')]
else:
upload_menu = None
menu = gtk.Menu()
MenuBuilder.make_menu_item(menu, _('Upload to Web'),
self.do_upload_to_web)
if upload_menu is not None:
return None # We don't have to add it since it already exists
else:
upload_menu = MenuBuilder.make_sub_menu(menu, _('Upload'))
return upload_menu
def enabled(self):
return _UPLOAD_AVAILABLE
def do_upload_to_web(self, widget=None):
if self.uploading:
return
self.uploading = False
self.pop_up = gtk.Window()
self.pop_up.set_default_size(600, 400)
self.pop_up.connect('delete_event', self._stop_uploading)
table = gtk.Table(8, 1, False)
self.pop_up.add(table)
login_label = gtk.Label(_('You must have an account at \
http://turtleartsite.sugarlabs.org to upload your project.'))
table.attach(login_label, 0, 1, 0, 1)
self.login_message = gtk.Label('')
table.attach(self.login_message, 0, 1, 1, 2)
self.Hbox1 = gtk.HBox()
table.attach(self.Hbox1, 0, 1, 2, 3, xpadding=5, ypadding=3)
self.username_entry = gtk.Entry()
username_label = gtk.Label(_('Username:') + ' ')
username_label.set_size_request(150, 25)
username_label.set_alignment(1.0, 0.5)
self.username_entry.set_size_request(450, 25)
self.Hbox1.add(username_label)
self.Hbox1.add(self.username_entry)
self.Hbox2 = gtk.HBox()
table.attach(self.Hbox2, 0, 1, 3, 4, xpadding=5, ypadding=3)
self.password_entry = gtk.Entry()
password_label = gtk.Label(_('Password:') + ' ')
self.password_entry.set_visibility(False)
password_label.set_size_request(150, 25)
password_label.set_alignment(1.0, 0.5)
self.password_entry.set_size_request(450, 25)
self.Hbox2.add(password_label)
self.Hbox2.add(self.password_entry)
self.Hbox3 = gtk.HBox()
table.attach(self.Hbox3, 0, 1, 4, 5, xpadding=5, ypadding=3)
self.title_entry = gtk.Entry()
title_label = gtk.Label(_('Title:') + ' ')
title_label.set_size_request(150, 25)
title_label.set_alignment(1.0, 0.5)
self.title_entry.set_size_request(450, 25)
self.Hbox3.add(title_label)
self.Hbox3.add(self.title_entry)
self.Hbox4 = gtk.HBox()
table.attach(self.Hbox4, 0, 1, 5, 6, xpadding=5, ypadding=3)
self.description_entry = gtk.TextView()
description_label = gtk.Label(_('Description:') + ' ')
description_label.set_size_request(150, 25)
description_label.set_alignment(1.0, 0.5)
self.description_entry.set_wrap_mode(gtk.WRAP_WORD)
self.description_entry.set_size_request(450, 50)
self.Hbox4.add(description_label)
self.Hbox4.add(self.description_entry)
self.Hbox5 = gtk.HBox()
table.attach(self.Hbox5, 0, 1, 6, 7, xpadding=5, ypadding=3)
self.submit_button = gtk.Button(_('Submit to Web'))
self.submit_button.set_size_request(300, 25)
self.submit_button.connect('pressed', self._do_remote_logon)
self.Hbox5.add(self.submit_button)
self.cancel_button = gtk.Button(_('Cancel'))
self.cancel_button.set_size_request(300, 25)
self.cancel_button.connect('pressed', self._stop_uploading)
self.Hbox5.add(self.cancel_button)
self.pop_up.show_all()
def _stop_uploading(self, widget, event=None):
""" Hide the popup when the upload is complte """
self.uploading = False
self.pop_up.hide()
def _do_remote_logon(self, widget):
""" Log into the upload server """
import socket
username = self.username_entry.get_text()
password = self.password_entry.get_text()
server = xmlrpclib.ServerProxy(self._upload_server + '/call/xmlrpc')
logged_in = None
try:
logged_in = server.login_remote(username, password)
except socket.gaierror as e:
print "Login failed %s" % e
if logged_in:
upload_key = logged_in
self._do_submit_to_web(upload_key)
else:
self.login_message.set_text(_('Login failed'))
def _do_submit_to_web(self, key):
""" Submit project to the server """
title = self.title_entry.get_text()
description = self.description_entry.get_buffer().get_text(
*self.description_entry.get_buffer().get_bounds())
tafile, imagefile = self.tw.save_for_upload(title)
# Set a maximum file size for image to be uploaded.
if int(os.path.getsize(imagefile)) > self._max_file_size:
import Image
while int(os.path.getsize(imagefile)) > self._max_file_size:
big_file = Image.open(imagefile)
smaller_file = big_file.resize(int(0.9 * big_file.size[0]),
int(0.9 * big_file.size[1]),
Image.ANTIALIAS)
smaller_file.save(imagefile, quality=100)
c = pycurl.Curl()
c.setopt(c.POST, 1)
c.setopt(c.FOLLOWLOCATION, 1)
c.setopt(c.URL, self._upload_server + '/upload')
c.setopt(c.HTTPHEADER, ["Expect:"])
c.setopt(c.HTTPPOST, [('file', (c.FORM_FILE, tafile)),
('newimage', (c.FORM_FILE, imagefile)),
('small_image', (c.FORM_FILE, imagefile)),
('title', title),
('description', description),
('upload_key', key), ('_formname',
'image_create')])
c.perform()
error_code = c.getinfo(c.HTTP_CODE)
c.close
os.remove(imagefile)
os.remove(tafile)
if error_code == 400:
self.login_message.set_text(_('Failed to upload!'))
else:
self.pop_up.hide()
self.uploading = False
if __name__ == "__main__":
# TODO: create test data...
u = Uploader_plugin(None)
if u.enabled():
print "Uploader is enabled... trying to upload"
u.do_upload_to_web()
gtk.main()
else:
print "Uploader is not enabled... exiting"
| 38.793722 | 79 | 0.626517 | 1,131 | 8,651 | 4.587975 | 0.259063 | 0.018501 | 0.02698 | 0.014454 | 0.164964 | 0.109462 | 0.062054 | 0.034689 | 0.015417 | 0 | 0 | 0.025958 | 0.26968 | 8,651 | 222 | 80 | 38.968468 | 0.795347 | 0.151659 | 0 | 0.084848 | 0 | 0 | 0.057524 | 0 | 0 | 0 | 0 | 0.004505 | 0 | 0 | null | null | 0.060606 | 0.066667 | null | null | 0.024242 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 2 |
7cb52b86ac3d4c2591309c4f083b28218c8d6a57 | 4,219 | py | Python | clinic/admin.py | Pranavraut033/Patient-Management | 4c48087d8a2aa095e63200af5232f738513333ce | [
"Apache-2.0"
] | null | null | null | clinic/admin.py | Pranavraut033/Patient-Management | 4c48087d8a2aa095e63200af5232f738513333ce | [
"Apache-2.0"
] | null | null | null | clinic/admin.py | Pranavraut033/Patient-Management | 4c48087d8a2aa095e63200af5232f738513333ce | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from .models import *
import nested_admin
com_info = ('Personal information', {'fields': ['profile', 'first_name', 'middle_name','last_name', \
'gender', 'dob','blood_type', 'email', ]})
'''
- Clinic -
'''
class CBranchAssistantDoctorInline(nested_admin.NestedTabularInline):
model = BranchAssistantDoctor
extra = 2
class CBranchAddressInline(nested_admin.NestedTabularInline):
model = BranchAddress
extra = 1
class CBranchPhoneInline(nested_admin.NestedTabularInline):
model = BranchPhone
extra = 1
class CBranchTimmingInline(nested_admin.NestedTabularInline):
model = BranchTimming
extra = 5
class CBranchEmployeeInline(nested_admin.NestedTabularInline):
model = BranchEmployee
extra = 2
class CBranchInline(nested_admin.NestedStackedInline):
model = Branch
extra = 0
inlines = [CBranchEmployeeInline, CBranchPhoneInline, CBranchAssistantDoctorInline, CBranchAddressInline, CBranchTimmingInline, ]
class ClinicPhoneInline(nested_admin.NestedTabularInline):
model = ClinicPhone
extra = 1
class ClinicAdmin(nested_admin.NestedModelAdmin):
inlines = [ClinicPhoneInline, CBranchInline]
'''
- end -
- BranchAdmin -
'''
class BranchAssistantDoctorInline(admin.TabularInline):
model = BranchAssistantDoctor
class BranchAddressInline(admin.TabularInline):
model = BranchAddress
extra = 1
class BranchPhoneInline(admin.TabularInline):
model = BranchPhone
extra = 1
class BranchTimmingInline(admin.TabularInline):
model = BranchTimming
extra = 5
class BranchEmployeeInline(admin.TabularInline):
model = BranchEmployee
extra = 3
class BranchAdmin(admin.ModelAdmin):
inlines = [BranchAssistantDoctorInline, BranchEmployeeInline, BranchPhoneInline, BranchAddressInline, BranchTimmingInline, ]
model = BranchEmployee
extra = 3
'''
- end -
- Generic Person admin -
'''
class PersonPhoneInline(nested_admin.NestedTabularInline):
model = PersonPhone
class PersonAddressInline(nested_admin.NestedTabularInline):
model = PersonAddress
class EmergencyContactPhoneInline(nested_admin.NestedTabularInline):
model = EmergencyContactPhone
extra = 0
class EmergencyContactInline(nested_admin.NestedTabularInline):
model = EmergencyContact
extra = 1
inlines = [EmergencyContactPhoneInline]
class PersonAdmin(nested_admin.NestedModelAdmin):
inlines = [PersonAddressInline, PersonPhoneInline, EmergencyContactInline, ]
'''
- End -
- Others -
'''
class DoctorAdmin(PersonAdmin):
fieldsets = [
('Login information', {'fields': ['username', 'password',]}),
('Extra', {'fields': ['qual', 'bio', 'speciality',]}),
com_info,
]
list_display = ('username', 'full_name', 'email', 'speciality', 'reg_time')
class PatientAdmin(PersonAdmin):
fieldsets = [
com_info,
('Extra', {'fields':['clinic', 'occupation','med_info']}),
]
list_display = ('full_name', 'email', 'reg_time')
'''
- End -
- Case -
'''
class VisitDrugInline(nested_admin.NestedTabularInline):
model = VisitDrug
extra = 0
class VisitComplaintInLine(nested_admin.NestedTabularInline):
model = VisitComplaint
extra = 1
class VisitExaminationInline(nested_admin.NestedTabularInline):
model = VisitExamination
extra = 1
class CaseVisitInline(nested_admin.NestedTabularInline):
model = CaseVisit
extra = 1
inlines = [VisitDrugInline, VisitComplaintInLine, VisitExaminationInline]
class CaseDiseaseInline(nested_admin.NestedTabularInline):
model = CaseDisease
extra = 1
class CaseReportInline(nested_admin.NestedTabularInline):
model = CaseReport
extra = 0
class CaseAdmin(nested_admin.NestedModelAdmin):
fieldsets = [
('Details', {'fields': ['title', 'doctor', 'patient', 'date',]}),
('Extra', {'fields': ['refer',]})
]
inlines = [CaseDiseaseInline, CaseReportInline, CaseVisitInline]
list_display = ('title', 'patient', 'date')
'''
- End -
'''
admin.site.register(Address)
admin.site.register(Phone)
admin.site.register(Disease)
admin.site.register(Drug)
admin.site.register(CaseAppointment)
admin.site.register(Case, CaseAdmin)
admin.site.register(Person, PersonAdmin)
admin.site.register(Doctor, DoctorAdmin)
admin.site.register(Patient, PatientAdmin)
admin.site.register(Branch, BranchAdmin)
admin.site.register(Clinic, ClinicAdmin)
| 25.883436 | 130 | 0.767954 | 388 | 4,219 | 8.257732 | 0.293814 | 0.072097 | 0.149813 | 0.174782 | 0.053059 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00536 | 0.115667 | 4,219 | 162 | 131 | 26.04321 | 0.853391 | 0 | 0 | 0.327273 | 0 | 0 | 0.078499 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.009091 | 0.027273 | 0 | 0.790909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
7cb68869cd0695083bd6764bba9110da90e347a3 | 428 | py | Python | tests/conftest.py | hugovk/tldextract | 9932bb5058f1fdf90c03819ede8bb2a55cf406da | [
"BSD-3-Clause"
] | 1,200 | 2015-01-02T17:49:58.000Z | 2022-03-31T13:32:09.000Z | tests/conftest.py | hugovk/tldextract | 9932bb5058f1fdf90c03819ede8bb2a55cf406da | [
"BSD-3-Clause"
] | 192 | 2015-02-09T15:53:48.000Z | 2022-03-19T20:45:11.000Z | tests/conftest.py | hugovk/tldextract | 9932bb5058f1fdf90c03819ede8bb2a55cf406da | [
"BSD-3-Clause"
] | 186 | 2015-01-10T15:41:45.000Z | 2022-03-17T15:30:22.000Z | """py.test standard config file."""
import logging
import pytest
import tldextract.cache
@pytest.fixture(autouse=True)
def reset_log_level():
"""Automatically reset log level verbosity between tests. Generally want
test output the Unix way: silence is golden."""
tldextract.cache._DID_LOG_UNABLE_TO_CACHE = ( # pylint: disable=protected-access
False
)
logging.getLogger().setLevel(logging.WARN)
| 25.176471 | 85 | 0.733645 | 54 | 428 | 5.685185 | 0.759259 | 0.09772 | 0.084691 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.168224 | 428 | 16 | 86 | 26.75 | 0.86236 | 0.415888 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | true | 0 | 0.333333 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
7cbaa828f828e845f3e1e24817269f2c94a74fda | 186 | py | Python | letsmeet/locations/urls.py | letsmeet-click/letsmeet.click | e9332fd37b0f9c289453828dcecb12b4203dc439 | [
"MIT"
] | 15 | 2015-12-20T20:29:29.000Z | 2020-01-21T11:49:14.000Z | letsmeet/locations/urls.py | letsmeet-click/letsmeet.click | e9332fd37b0f9c289453828dcecb12b4203dc439 | [
"MIT"
] | 61 | 2015-12-29T10:23:56.000Z | 2018-06-03T19:58:30.000Z | letsmeet/locations/urls.py | letsmeet-click/letsmeet.click | e9332fd37b0f9c289453828dcecb12b4203dc439 | [
"MIT"
] | 9 | 2015-12-27T14:48:19.000Z | 2017-10-07T13:40:20.000Z | from django.conf.urls import url
from .views import (
LocationSearchView,
)
urlpatterns = [
url(r'^create-location/$', LocationSearchView.as_view(), name='location_create'),
]
| 18.6 | 85 | 0.715054 | 21 | 186 | 6.238095 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.145161 | 186 | 9 | 86 | 20.666667 | 0.823899 | 0 | 0 | 0 | 0 | 0 | 0.177419 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7cbf49d54a84cdf89a8d101c441a55626212e2d4 | 519 | py | Python | day02/solution.py | andrewyang96/AdventOfCode2017 | 665d7869fb8677f41c07ca2177b4fe3ea3356fec | [
"MIT"
] | null | null | null | day02/solution.py | andrewyang96/AdventOfCode2017 | 665d7869fb8677f41c07ca2177b4fe3ea3356fec | [
"MIT"
] | null | null | null | day02/solution.py | andrewyang96/AdventOfCode2017 | 665d7869fb8677f41c07ca2177b4fe3ea3356fec | [
"MIT"
] | null | null | null | import itertools
from typing import List
def checksum(spreadsheet: List[List[int]]) -> int:
return sum(map(lambda row: max(row) - min(row), spreadsheet))
def checksum2(spreadsheet: List[List[int]]) -> int:
def process_row(row: List[int]) -> int:
for x, y in itertools.combinations(row, 2):
if x > y and x % y == 0:
return x // y
elif x < y and y % x == 0:
return y // x
raise RuntimeError()
return sum(map(process_row, spreadsheet))
| 32.4375 | 65 | 0.578035 | 73 | 519 | 4.082192 | 0.410959 | 0.033557 | 0.100671 | 0.147651 | 0.167785 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010929 | 0.294798 | 519 | 15 | 66 | 34.6 | 0.803279 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.230769 | false | 0 | 0.153846 | 0.076923 | 0.692308 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
7cc2268128342cc54bf1dc01edf96a10e63de6b2 | 472 | py | Python | template_dynamicloader/environment.py | lesspointless/Shakal-NG | eee491af94527228735c2bca7644605effd74b37 | [
"MIT"
] | null | null | null | template_dynamicloader/environment.py | lesspointless/Shakal-NG | eee491af94527228735c2bca7644605effd74b37 | [
"MIT"
] | null | null | null | template_dynamicloader/environment.py | lesspointless/Shakal-NG | eee491af94527228735c2bca7644605effd74b37 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import jinja2.exceptions
from .loader_filesystem import DynamicLoaderMixin
class Environment(DynamicLoaderMixin, jinja2.Environment):
def get_template(self, name, *args, **kwargs):
try:
return super(Environment, self).get_template(self.get_visitors_template(name), *args, **kwargs)
except jinja2.exceptions.TemplateNotFound:
return super(Environment, self).get_template(name, *args, **kwargs)
| 31.466667 | 98 | 0.777542 | 55 | 472 | 6.472727 | 0.509091 | 0.092697 | 0.117978 | 0.146067 | 0.207865 | 0.207865 | 0 | 0 | 0 | 0 | 0 | 0.009501 | 0.108051 | 472 | 14 | 99 | 33.714286 | 0.836105 | 0.044492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.333333 | 0 | 0.777778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
7cdbbcb509c9593655c04c240ef3694f6b11d943 | 2,022 | py | Python | minos/examples/regressor_demo.py | qorrect/sisy | 4c279f3a47109395d57521b5c8144b18693737fc | [
"Apache-2.0"
] | 6 | 2017-09-15T03:14:10.000Z | 2019-12-03T04:15:21.000Z | minos/examples/regressor_demo.py | qorrect/sisy | 4c279f3a47109395d57521b5c8144b18693737fc | [
"Apache-2.0"
] | 2 | 2017-09-21T01:49:42.000Z | 2017-09-23T16:33:01.000Z | minos/examples/regressor_demo.py | qorrect/sisy | 4c279f3a47109395d57521b5c8144b18693737fc | [
"Apache-2.0"
] | null | null | null | import logging
from pprint import pprint
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Dropout
from keras.datasets import boston_housing
import keras.metrics as metrics
import math
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.preprocessing import StandardScaler
from minos.experiment.experiment import ExperimentSettings
from minos.experiment.ga import run_ga_search_experiment
from minos.experiment.training import Training, EpochStoppingCondition
from minos.model.model import Objective, Optimizer, Metric
from minos.model.parameter import int_param, float_param
from minos.train.utils import SimpleBatchIterator, CpuEnvironment
from minos.train.utils import GpuEnvironment
from minos.utils import load_best_model
import numpy as np
(X_train, y_train), (X_test, y_test) = boston_housing.load_data()
def base_model():
model = Sequential()
model.add(Dense(14, input_dim=13, init='normal', activation='relu'))
model.add(Dense(7, init='normal', activation='relu'))
model.add(Dense(1, init='normal'))
model.compile(loss='mean_squared_error', optimizer='adam',metrics=['mse'])
return model
seed = 7
np.random.seed(seed)
scale = StandardScaler()
X_train = scale.fit_transform(X_train)
X_test = scale.fit_transform(X_test)
clf = KerasRegressor(build_fn=base_model, nb_epoch=100, batch_size=1, verbose=2)
history = clf.fit(X_train, y_train)
res = clf.predict(X_test)
print(y_test)
print(res)
print(history.history.keys())
# summarize history for accuracy
plt.plot(history.history['loss'])
#plt.plot(history.history['val_acc'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show() | 28.885714 | 82 | 0.766568 | 291 | 2,022 | 5.206186 | 0.381443 | 0.047525 | 0.036964 | 0.055446 | 0.256106 | 0.220462 | 0.220462 | 0.171617 | 0.171617 | 0.171617 | 0 | 0.006685 | 0.112265 | 2,022 | 70 | 83 | 28.885714 | 0.837326 | 0.045994 | 0 | 0.235294 | 0 | 0 | 0.074247 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.372549 | null | null | 0.078431 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 2 |
7cdbe5c5563fc4649f21b2c662a3f27ea1cdec9c | 12,317 | py | Python | src/zero_sum/plots.py | sbhambr1/MarkovGameSolvers | cd5366faf2082e467dc0ef04c52c5b5b000e9176 | [
"MIT"
] | null | null | null | src/zero_sum/plots.py | sbhambr1/MarkovGameSolvers | cd5366faf2082e467dc0ef04c52c5b5b000e9176 | [
"MIT"
] | null | null | null | src/zero_sum/plots.py | sbhambr1/MarkovGameSolvers | cd5366faf2082e467dc0ef04c52c5b5b000e9176 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
strategies = np.array([
[
#gamma = 0.5
[-9.563336572173805, -8.193748914742143, -10.270220524396596, -3.0000000000000004, -7.553846153846153, -7.904142011834319, -3.0, -3.0, -3.0, -3.0],
[-7.378487640724603, -3.3197512739857147, -7.496142688168831, -3.0000000000000004, -3.0, -3.0, -3.0, -3.0, -3.0, -3.0],
[-9.314285714285715, -6.0, -11.8, -6.0, -6.0, -6.0, -6.0, -6.0, -6.0, -6.0]
],
[
#gamma = 0.55
[-10.524329851693846, -9.121195380360348, -11.382794510864285, -3.333333333333334, -8.308123249299719, -8.767977779347031, -3.333333333333334, -3.333333333333334, -3.333333333333334, -3.333333333333334],
[-7.853847828218031, -3.71793284163171, -7.966237505840968, -3.333333333333334, -3.333333333333333, -3.333333333333333, -3.333333333333334, -3.333333333333334, -3.333333333333334, -3.333333333333334],
[-10.028985507246377, -6.666666666666666, -13.11111111111111, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666, -6.666666666666666]
],
[
#gamma = 0.6
[-11.719684181565645, -10.273338406900049, -12.76628614916286, -3.75, -9.231481481481477, -9.840534979423865, -3.75, -3.75, -3.75, -3.75],
[-8.42965572232598, -4.21204039850597, -8.528273788767603, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982, -3.7499999999999982],
[-10.911764705882351, -7.499999999999999, -14.441176470588236, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999, -7.499999999999999]
],
[
#gamma = 0.65
[-13.246274902675406, -11.742746583844642, -14.533245644719841, -4.285714285714285, -10.388807069219439, -11.206747339173734, -4.285714285714285, -4.285714285714285, -4.285714285714285, -4.285714285714285],
[-9.145905020699661, -4.841618667247477, -9.218958552423087, -4.285714285714285, -4.285714285714285, -4.285714285714285, -4.285714285714283, -4.285714285714283, -4.285714285714283, -4.285714285714283],
[-12.03411513859275, -8.57142857142857, -15.616204690831555, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857, -8.57142857142857]
],
[
#gamma = 0.7
[-15.26280962470669, -13.681019910677742, -16.868493443177165, -4.999999999999998, -11.883720930232553, -13.004326663061104, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998],
[-10.068193838520614, -5.671752735193775, -10.099054988853647, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998, -4.999999999999998],
[-13.515151515151512, -9.999999999999996, -17.15151515151515, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996, -9.999999999999996]
],
[
#gamma = 0.75
[-18.048619027086353, -16.354914089347076, -20.098144329896904, -5.999999999999999, -13.893333333333327, -15.47199999999999, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998],
[-11.364085664816024, -6.829408299891047, -11.362958194659226, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998, -5.999999999999998],
[-15.569230769230767, -11.999999999999996, -19.261538461538457, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996, -11.999999999999996]
],
[
#gamma = 0.8
[-22.14552188552189, -20.28181818181818, -24.856363636363632, -7.500000000000002, -16.749999999999993, -19.062499999999993, -7.5, -7.500000000000002, -7.500000000000002, -7.500000000000002],
[-13.227691215343736, -8.540503875101978, -13.175865235686418, -7.5, -7.500000000000001, -7.5, -7.499999999999998, -7.5, -7.5, -7.5],
[-18.625, -15.0, -22.375, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0, -15.0]
],
[
#gamma = 0.85
[-28.76278844268961, -26.61680527433105, -32.56131830251732, -9.999999999999993, -21.169811320754697, -24.752580989676016, -9.999999999999993, -9.999999999999993, -9.999999999999993, -9.999999999999993],
[-16.183356468130675, -11.33189687650437, -16.033301790463963, -9.999999999999993, -9.999999999999991, -9.999999999999993, -9.999999999999993, -9.999999999999993, -9.999999999999993, -9.999999999999993],
[-23.68253968253967, -19.999999999999986, -27.49206349206348, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986, -19.999999999999986]
],
[
#gamma = 0.9
[-41.27742867847752, -38.58932362753994, -47.172156505914224, -14.999999999999755, -29.095238095237843, -35.13605442176845, -14.999999999999753, -14.999999999999753, -14.999999999999753, -14.999999999999753],
[-21.789898957859354, -16.75709624029196, -21.448166972857727, -14.99999999999974, -14.99999999999974, -14.999999999999744, -14.999999999999735, -14.999999999999744, -14.999999999999744, -14.999999999999744],
[-33.74193548387047, -29.999999999999503, -37.61290322580595, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503, -29.999999999999503]
],
[
#gamma = 0.95
[-74.330382553884, -70.25959327963282, -85.68377649107512, -29.99999408538547, -49.09676827893381, -60.80124278465696, -29.999994085385474, -29.99999408538546, -29.999994085385453, -29.999994085385445],
[-37.67557701062915, -32.430971145564975, -36.94165998316571, -29.999994085385467, -29.999994085385474, -29.99999408538546, -29.999994085385474, -29.99999408538546, -29.999994085385453, -29.999994085385445],
[-63.80326685929545, -59.99998817077086, -67.73769308880364, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086, -59.99998817077086]
]
])
gamma = np.array([0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9,0.95])
#Plot 1
V0_URS = []
V0_OPT = []
V0_MMP = []
for i in range(len(strategies)):
V0_URS.append(strategies[i][0][0])
V0_OPT.append(strategies[i][1][0])
V0_MMP.append(strategies[i][2][0])
plt.plot(gamma, np.asarray(V0_URS), marker='o')
plt.plot(gamma, np.asarray(V0_OPT), marker='x')
plt.plot(gamma, np.asarray(V0_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S0.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 2
V1_URS = []
V1_OPT = []
V1_MMP = []
for i in range(len(strategies)):
V1_URS.append(strategies[i][0][1])
V1_OPT.append(strategies[i][1][1])
V1_MMP.append(strategies[i][2][1])
plt.plot(gamma, np.asarray(V1_URS), marker='o')
plt.plot(gamma, np.asarray(V1_OPT), marker='x')
plt.plot(gamma, np.asarray(V1_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S1.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 3
V2_URS = []
V2_OPT = []
V2_MMP = []
for i in range(len(strategies)):
V2_URS.append(strategies[i][0][2])
V2_OPT.append(strategies[i][1][2])
V2_MMP.append(strategies[i][2][2])
plt.plot(gamma, np.asarray(V2_URS), marker='o')
plt.plot(gamma, np.asarray(V2_OPT), marker='x')
plt.plot(gamma, np.asarray(V2_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S2.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 4
V3_URS = []
V3_OPT = []
V3_MMP = []
for i in range(len(strategies)):
V3_URS.append(strategies[i][0][3])
V3_OPT.append(strategies[i][1][3])
V3_MMP.append(strategies[i][2][3])
plt.plot(gamma, np.asarray(V3_URS), marker='o')
plt.plot(gamma, np.asarray(V3_OPT), marker='x')
plt.plot(gamma, np.asarray(V3_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S3.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 5
V4_URS = []
V4_OPT = []
V4_MMP = []
for i in range(len(strategies)):
V4_URS.append(strategies[i][0][4])
V4_OPT.append(strategies[i][1][4])
V4_MMP.append(strategies[i][2][4])
plt.plot(gamma, np.asarray(V4_URS), marker='o')
plt.plot(gamma, np.asarray(V4_OPT), marker='x')
plt.plot(gamma, np.asarray(V4_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S4.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 6
V5_URS = []
V5_OPT = []
V5_MMP = []
for i in range(len(strategies)):
V5_URS.append(strategies[i][0][5])
V5_OPT.append(strategies[i][1][5])
V5_MMP.append(strategies[i][2][5])
plt.plot(gamma, np.asarray(V5_URS), marker='o')
plt.plot(gamma, np.asarray(V5_OPT), marker='x')
plt.plot(gamma, np.asarray(V5_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S5.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 7
V6_URS = []
V6_OPT = []
V6_MMP = []
for i in range(len(strategies)):
V6_URS.append(strategies[i][0][6])
V6_OPT.append(strategies[i][1][6])
V6_MMP.append(strategies[i][2][6])
plt.plot(gamma, np.asarray(V6_URS), marker='o')
plt.plot(gamma, np.asarray(V6_OPT), marker='x')
plt.plot(gamma, np.asarray(V6_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S6.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 8
V7_URS = []
V7_OPT = []
V7_MMP = []
for i in range(len(strategies)):
V7_URS.append(strategies[i][0][7])
V7_OPT.append(strategies[i][1][7])
V7_MMP.append(strategies[i][2][7])
plt.plot(gamma, np.asarray(V7_URS), marker='o')
plt.plot(gamma, np.asarray(V7_OPT), marker='x')
plt.plot(gamma, np.asarray(V7_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S7.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 9
V8_URS = []
V8_OPT = []
V8_MMP = []
for i in range(len(strategies)):
V8_URS.append(strategies[i][0][8])
V8_OPT.append(strategies[i][1][8])
V8_MMP.append(strategies[i][2][8])
plt.plot(gamma, np.asarray(V8_URS), marker='o')
plt.plot(gamma, np.asarray(V8_OPT), marker='x')
plt.plot(gamma, np.asarray(V8_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S8.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show()
#Plot 10
V9_URS = []
V9_OPT = []
V9_MMP = []
for i in range(len(strategies)):
V9_URS.append(strategies[i][0][9])
V9_OPT.append(strategies[i][1][9])
V9_MMP.append(strategies[i][2][9])
plt.plot(gamma, np.asarray(V9_URS), marker='o')
plt.plot(gamma, np.asarray(V9_OPT), marker='x')
plt.plot(gamma, np.asarray(V9_MMP), marker='+')
plt.ylabel("Defender's Utility $\longrightarrow$")
plt.xlabel("$\gamma \longrightarrow$")
plt.title("Defender's value in state S9.")
plt.legend(['Uniform Random Strategy', 'Optimal Mixed Strategy', 'Min Max Pure Strategy'], loc='lower left')
plt.show() | 41.47138 | 218 | 0.697735 | 1,625 | 12,317 | 5.233231 | 0.150769 | 0.025517 | 0.059972 | 0.049389 | 0.691087 | 0.613123 | 0.562794 | 0.51952 | 0.382879 | 0.382879 | 0 | 0.420747 | 0.11951 | 12,317 | 297 | 219 | 41.47138 | 0.363393 | 0.014289 | 0 | 0.287805 | 0 | 0 | 0.138591 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.009756 | 0 | 0.009756 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7ceb9afd4c46af89706765716641a6c60117494e | 850 | py | Python | src/2-learn_ifelse.py | padmaparam/kids-py-learn | 0bc86c285d3398782ff529ff0a2105db77102245 | [
"MIT"
] | null | null | null | src/2-learn_ifelse.py | padmaparam/kids-py-learn | 0bc86c285d3398782ff529ff0a2105db77102245 | [
"MIT"
] | null | null | null | src/2-learn_ifelse.py | padmaparam/kids-py-learn | 0bc86c285d3398782ff529ff0a2105db77102245 | [
"MIT"
] | 1 | 2021-06-22T18:20:22.000Z | 2021-06-22T18:20:22.000Z | pass_marks = 70
if pass_marks == 70:
print('pass')
its_raining = True # you can change this to False
if its_raining:
print("It's raining!")
its_raining = True # you can change this to False
its_not_raining = not its_raining # False if its_raining, True otherwise
if its_raining:
print("It's raining!")
if its_not_raining:
print("It's not raining.")
if its_raining:
print("It's raining!")
else:
print("It's not raining.")
if pass_marks < 70:
print('Retake Exam')
elif pass_marks == 70:
print('Just Pass')
elif pass_marks == 80:
print('Pass C grade')
elif pass_marks == 90:
print('Pass B grade')
elif 90 <= pass_marks <= 95: # elif pass_marks >= 90 and pass_marks <= 95:
print('Pass A grade')
else:
print(f'Not sure what to do with pass marks: {pass_marks}')
| 22.368421 | 74 | 0.64 | 135 | 850 | 3.874074 | 0.266667 | 0.189293 | 0.076482 | 0.114723 | 0.434034 | 0.369025 | 0.296367 | 0.141491 | 0.141491 | 0 | 0 | 0.031056 | 0.242353 | 850 | 37 | 75 | 22.972973 | 0.781056 | 0.162353 | 0 | 0.428571 | 0 | 0 | 0.257426 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.464286 | 0 | 0 | 0 | 0.428571 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 2 |
7cf5bf1763fa07c6426c1a5a3b8b4abb9d8d881a | 456 | py | Python | 1-50/p15.py | YiWeiShen/Project-Euler-Hints | a79cacab075dd98d393516f083aaa7ffc6115a06 | [
"MIT"
] | 1 | 2019-02-25T13:00:31.000Z | 2019-02-25T13:00:31.000Z | 1-50/p15.py | YiWeiShen/Project-Euler-Hints | a79cacab075dd98d393516f083aaa7ffc6115a06 | [
"MIT"
] | null | null | null | 1-50/p15.py | YiWeiShen/Project-Euler-Hints | a79cacab075dd98d393516f083aaa7ffc6115a06 | [
"MIT"
] | null | null | null | dic_cal = {}
def cal(a, b):
if (a, b) in dic_cal:
return dic_cal[(a, b)]
return cal(a - 1, b) + cal(a, b - 1)
if __name__ == '__main__':
for j in range(0, 1):
for k in range(0, 21):
dic_cal[(j, k)] = 1
for j in range(0, 21):
for k in range(0, 1):
dic_cal[(j, k)] = 1
for j in range(0, 21):
for k in range(0, 21):
dic_cal[(j, k)] = cal(j, k)
print(cal(20, 20))
| 21.714286 | 40 | 0.453947 | 86 | 456 | 2.244186 | 0.22093 | 0.186529 | 0.248705 | 0.207254 | 0.549223 | 0.487047 | 0.487047 | 0.487047 | 0.487047 | 0.487047 | 0 | 0.083624 | 0.370614 | 456 | 20 | 41 | 22.8 | 0.58885 | 0 | 0 | 0.375 | 0 | 0 | 0.017544 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0 | 0 | 0.1875 | 0.0625 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
7cf938bb07291edc998eff9114640d945346849f | 119 | py | Python | installator/hrim/scripts/utils.py | peterpolidoro/HRIM | 148a6a5f88543ad1b62efecdccf7bbb2b744877b | [
"Apache-2.0"
] | 32 | 2018-02-05T15:32:19.000Z | 2019-02-14T03:16:38.000Z | installator/hrim/scripts/utils.py | peterpolidoro/HRIM | 148a6a5f88543ad1b62efecdccf7bbb2b744877b | [
"Apache-2.0"
] | 24 | 2018-02-06T15:41:56.000Z | 2019-02-18T15:42:35.000Z | installator/hrim/scripts/utils.py | peterpolidoro/HRIM | 148a6a5f88543ad1b62efecdccf7bbb2b744877b | [
"Apache-2.0"
] | 17 | 2019-02-21T17:27:31.000Z | 2022-01-21T02:28:16.000Z | # shorthand for tabulation
def get_tabs(num):
ret = ""
for _ in range(num):
ret += "\t"
return ret
| 17 | 26 | 0.554622 | 16 | 119 | 4 | 0.75 | 0.1875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.319328 | 119 | 6 | 27 | 19.833333 | 0.790123 | 0.201681 | 0 | 0 | 0 | 0 | 0.021505 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6b0053f1050f11f7e42b9d53878cbe0c498bae6c | 5,085 | py | Python | deepensemble/utils/score_functions.py | pdoren/correntropy-and-ensembles-in-deep-learning | b8e39e0ea97395e9f4ef5e9b351551a89fedc885 | [
"MIT"
] | 1 | 2017-11-22T15:35:45.000Z | 2017-11-22T15:35:45.000Z | deepensemble/utils/score_functions.py | pdoren/correntropy-and-ensembles-in-deep-learning | b8e39e0ea97395e9f4ef5e9b351551a89fedc885 | [
"MIT"
] | null | null | null | deepensemble/utils/score_functions.py | pdoren/correntropy-and-ensembles-in-deep-learning | b8e39e0ea97395e9f4ef5e9b351551a89fedc885 | [
"MIT"
] | 1 | 2021-12-14T04:16:57.000Z | 2021-12-14T04:16:57.000Z | import theano.tensor as T
import numpy as np
from ..utils.utils_functions import ITLFunctions
__all__ = [
'dummy_score',
'get_accuracy',
'score_accuracy',
'score_ensemble_ambiguity',
'score_rms',
'score_silverman',
'mutual_information_cs',
'mutual_information_ed',
'mutual_information_parzen'
]
# noinspection PyUnusedLocal
def dummy_score(_input, _output, _target, model):
""" Dummy score function, this function only return zeros for each elements in _target.
Parameters
----------
_input : theano.tensor.matrix
Input Sample.
_output : theano.tensor.matrix
Output model.
_target : theano.tensor.matrix
Target Sample.
model : Model
Model.
Returns
-------
theano.tensor.matrix
Returns only zeros for each elements in _target.
"""
return T.zeros(_target.shape)
#
# Classification Functions
#
def get_accuracy(Y, _target):
# noinspection PyTypeChecker,PyTypeChecker
return float(np.sum(Y == _target)) / float(_target.shape[0])
# noinspection PyUnusedLocal
def score_accuracy(_input, _output, _target, model):
""" Accuracy score in a classifier models.
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
theano.tensor.matrix
Returns accuracy in a classifier models.
"""
return _target.shape[-1] * T.mean(_target * _output)
# noinspection PyUnusedLocal
def score_ensemble_ambiguity(_input, _output, _target, model):
""" Score ambiguity for Ensemble.
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
float
Returns a score ambiguity.
"""
ensemble = model
err = [T.mean(T.sqr(model.output(_input, prob=False) - _output)) for model in ensemble.get_models()]
return sum(err) / ensemble.get_num_models()
# noinspection PyUnusedLocal
def score_silverman(_input, _output, _target, model):
""" Score Silverman.
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
float
Returns size kernel with Silverman Rule.
"""
return ITLFunctions.silverman(model.output(_input))
#
# Regression Functions
#
# noinspection PyUnusedLocal
def score_rms(_input, _output, _target, model):
""" Gets Root Mean Square like score in a regressor model.
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
theano.tensor.matrix
Returns Root Mean Square.
"""
return T.mean(T.power(_output - _target, 2.0))
# noinspection PyUnusedLocal
def mutual_information_cs(_input, _output, _target, model):
""" Quadratic Mutual Information Cauchy-Schwarz
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
theano.tensor.matrix
Returns Quadratic Mutual Information Cauchy-Schwarz.
"""
s = ITLFunctions.silverman(_target)
return ITLFunctions.mutual_information_cs([_output], _target, s)
# noinspection PyUnusedLocal
def mutual_information_ed(_input, _output, _target, model):
""" Quadratic Mutual Information Euclidean.
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
theano.tensor.matrix
Returns Quadratic Mutual Information Euclidean.
"""
s = ITLFunctions.silverman(_target)
return ITLFunctions.mutual_information_ed([_output], _target, s)
# noinspection PyUnusedLocal
def mutual_information_parzen(_input, _output, _target, model):
""" Mutual Information (Parzen Window)
Parameters
----------
_input : theano.tensor.matrix
Input sample.
_output : theano.tensor.matrix
Output sample.
_target : theano.tensor.matrix
Target sample.
model : Model
Model.
Returns
-------
theano.tensor.matrix
Returns Mutual Information (Parzen Window).
"""
s = ITLFunctions.silverman(_target)
return ITLFunctions.mutual_information_parzen(_output, _target, s)
| 20.840164 | 104 | 0.642085 | 522 | 5,085 | 6.042146 | 0.153257 | 0.117945 | 0.171211 | 0.055802 | 0.604312 | 0.56468 | 0.546925 | 0.516487 | 0.419784 | 0.419784 | 0 | 0.001055 | 0.254277 | 5,085 | 243 | 105 | 20.925926 | 0.830696 | 0.564995 | 0 | 0.081081 | 0 | 0 | 0.091511 | 0.054786 | 0 | 0 | 0 | 0 | 0 | 1 | 0.243243 | false | 0 | 0.081081 | 0.027027 | 0.567568 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6b0f13bf4f6bc44ded324d1a7d8e36ee19cdd23f | 3,292 | py | Python | src/zope/container/btree.py | zopefoundation/zope.container | 4bb562436f89cf2e561d63292893d106bee73087 | [
"ZPL-2.1"
] | 3 | 2019-03-29T19:49:05.000Z | 2021-02-05T07:27:16.000Z | src/zope/container/btree.py | zopefoundation/zope.container | 4bb562436f89cf2e561d63292893d106bee73087 | [
"ZPL-2.1"
] | 32 | 2015-05-20T07:21:45.000Z | 2021-11-19T16:01:16.000Z | src/zope/container/btree.py | zopefoundation/zope.container | 4bb562436f89cf2e561d63292893d106bee73087 | [
"ZPL-2.1"
] | 8 | 2015-04-03T08:28:25.000Z | 2021-09-14T08:33:21.000Z | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""This module provides a sample btree container implementation.
"""
__docformat__ = 'restructuredtext'
from persistent import Persistent
from BTrees.OOBTree import OOBTree
from BTrees.Length import Length
from zope.container.interfaces import IBTreeContainer
from zope.container.contained import Contained, setitem, uncontained
from zope.interface import implementer
from zope.cachedescriptors.property import Lazy
@implementer(IBTreeContainer)
class BTreeContainer(Contained, Persistent):
"""OOBTree-based container"""
def __init__(self):
# We keep the previous attribute to store the data
# for backward compatibility
self._SampleContainer__data = self._newContainerData()
self.__len = Length()
def _newContainerData(self):
"""Construct an item-data container
Subclasses should override this if they want different data.
The value returned is a mapping object that also has get,
has_key, keys, items, and values methods.
The default implementation uses an OOBTree.
"""
return OOBTree()
def __contains__(self, key):
"""See interface IReadContainer
"""
return key in self._SampleContainer__data
@Lazy
def _BTreeContainer__len(self):
l_ = Length()
ol = len(self._SampleContainer__data)
if ol > 0:
l_.change(ol)
self._p_changed = True
return l_
def __len__(self):
return self.__len()
def _setitemf(self, key, value):
# make sure our lazy property gets set
l_ = self.__len
self._SampleContainer__data[key] = value
l_.change(1)
def __iter__(self):
return iter(self._SampleContainer__data)
def __getitem__(self, key):
'''See interface `IReadContainer`'''
return self._SampleContainer__data[key]
def get(self, key, default=None):
'''See interface `IReadContainer`'''
return self._SampleContainer__data.get(key, default)
def __setitem__(self, key, value):
setitem(self, self._setitemf, key, value)
def __delitem__(self, key):
# make sure our lazy property gets set
l_ = self.__len
item = self._SampleContainer__data[key]
del self._SampleContainer__data[key]
l_.change(-1)
uncontained(item, self, key)
has_key = __contains__
def items(self, key=None):
return self._SampleContainer__data.items(key)
def keys(self, key=None):
return self._SampleContainer__data.keys(key)
def values(self, key=None):
return self._SampleContainer__data.values(key)
| 31.653846 | 78 | 0.654313 | 375 | 3,292 | 5.482667 | 0.370667 | 0.110895 | 0.134241 | 0.070525 | 0.171206 | 0.171206 | 0.148833 | 0.036965 | 0.036965 | 0.036965 | 0 | 0.005078 | 0.222357 | 3,292 | 103 | 79 | 31.961165 | 0.798047 | 0.31531 | 0 | 0.038462 | 0 | 0 | 0.007956 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.269231 | false | 0 | 0.134615 | 0.096154 | 0.634615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 |
6b1a41570c05b8e22d2de4240132c87bd9212f23 | 238 | py | Python | furystoolbox/common.py | colinfrei/furystoolbox | 2a8613393a46ad6ae2ad2c2fa86fd255fea96796 | [
"MIT"
] | 1 | 2020-01-03T00:32:35.000Z | 2020-01-03T00:32:35.000Z | furystoolbox/common.py | colinfrei/furystoolbox | 2a8613393a46ad6ae2ad2c2fa86fd255fea96796 | [
"MIT"
] | 1 | 2020-02-08T08:54:31.000Z | 2020-02-08T09:31:30.000Z | furystoolbox/common.py | colinfrei/furystoolbox | 2a8613393a46ad6ae2ad2c2fa86fd255fea96796 | [
"MIT"
] | 1 | 2020-02-08T06:54:29.000Z | 2020-02-08T06:54:29.000Z | """Common."""
def share(data):
"""Share"""
import requests
base = 'https://bin.halfdecent.io/'
url = "{}documents".format(base)
post = requests.post(url, data=data).json()
print("{}{}".format(base, post['key']))
| 21.636364 | 47 | 0.571429 | 28 | 238 | 4.857143 | 0.642857 | 0.147059 | 0.205882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.184874 | 238 | 10 | 48 | 23.8 | 0.701031 | 0.054622 | 0 | 0 | 0 | 0 | 0.205607 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.166667 | 0 | 0.333333 | 0.166667 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6b1faab7b0d620ac960ed3683c53054afe93ab0d | 1,301 | py | Python | src/swig_python/contrib/address.py | alessandro-saglimbeni/libwally-core | ae84c26519b6c513332c19bc183dc0b584f4bf33 | [
"MIT"
] | 1 | 2021-06-06T18:11:23.000Z | 2021-06-06T18:11:23.000Z | src/swig_python/contrib/address.py | alessandro-saglimbeni/libwally-core | ae84c26519b6c513332c19bc183dc0b584f4bf33 | [
"MIT"
] | 1 | 2020-08-26T03:41:55.000Z | 2020-08-26T03:41:55.000Z | src/swig_python/contrib/address.py | alessandro-saglimbeni/libwally-core | ae84c26519b6c513332c19bc183dc0b584f4bf33 | [
"MIT"
] | 2 | 2020-08-24T07:39:11.000Z | 2020-09-30T22:36:40.000Z | """Tests for addresses"""
import unittest
from wallycore import *
h2b = hex_to_bytes
class AddressTests(unittest.TestCase):
def test_b58_address(self):
for address, scriptpubkey, network in [
('mxvewdhKCenLkYgNa8irv1UM2omEWPMdEE', h2b('76a914bef5a2f9a56a94aab12459f72ad9cf8cf19c7bbe88ac'), WALLY_NETWORK_BITCOIN_TESTNET), # p2pkh
('2N5XyEfAXtVde7mv6idZDXp5NFwajYEj9TD', h2b('a91486cc442a97817c245ce90ed0d31d6dbcde3841f987'), WALLY_NETWORK_BITCOIN_TESTNET), # p2sh
('1JQheacLPdM5ySCkrZkV66G2ApAXe1mqLj', h2b('76a914bef5a2f9a56a94aab12459f72ad9cf8cf19c7bbe88ac'), WALLY_NETWORK_BITCOIN_MAINNET), # p2pkh
('3DymAvEWH38HuzHZ3VwLus673bNZnYwNXu', h2b('a91486cc442a97817c245ce90ed0d31d6dbcde3841f987'), WALLY_NETWORK_BITCOIN_MAINNET), # p2sh
('XYtnYoGoSeE9ouMEVi6mfeujhjT2VnJncA', h2b('a914ec51ffb65120594389733bf8625f542446d97f7987'), WALLY_NETWORK_LIQUID_REGTEST),
('H5nswXhfo8AMt159sgA5FWT35De34hVR4o', h2b('a914f80278b2011573a2ac59c83fadf929b0fc57ad0187'), WALLY_NETWORK_LIQUID),
]:
self.assertEqual(address_to_scriptpubkey(address, network), scriptpubkey)
self.assertEqual(scriptpubkey_to_address(scriptpubkey, network), address)
if __name__ == '__main__':
unittest.main()
| 52.04 | 150 | 0.767871 | 92 | 1,301 | 10.5 | 0.478261 | 0.074534 | 0.078675 | 0.134576 | 0.289855 | 0 | 0 | 0 | 0 | 0 | 0 | 0.20162 | 0.146042 | 1,301 | 24 | 151 | 54.208333 | 0.667867 | 0.032283 | 0 | 0 | 0 | 0 | 0.3976 | 0.3912 | 0 | 0 | 0 | 0 | 0.117647 | 1 | 0.058824 | false | 0 | 0.117647 | 0 | 0.235294 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
6b2284791e4cbb47707cb2069bb18424a58890cd | 995 | py | Python | ml3d/datasets/__init__.py | kylevedder/SparsePointPillars | 00d73c9ae5153d3a108fc05c6dface93c5b5ef47 | [
"MIT"
] | 7 | 2021-08-11T02:21:37.000Z | 2022-01-15T19:32:04.000Z | ml3d/datasets/__init__.py | kylevedder/SparsePointPillars | 00d73c9ae5153d3a108fc05c6dface93c5b5ef47 | [
"MIT"
] | null | null | null | ml3d/datasets/__init__.py | kylevedder/SparsePointPillars | 00d73c9ae5153d3a108fc05c6dface93c5b5ef47 | [
"MIT"
] | null | null | null | """I/O, attributes, and processing for different datasets."""
from .semantickitti import SemanticKITTI
from .s3dis import S3DIS
from .parislille3d import ParisLille3D
from .toronto3d import Toronto3D
from .customdataset import Custom3D
from .semantic3d import Semantic3D
from .inference_dummy import InferenceDummySplit
from .samplers import SemSegRandomSampler, SemSegSpatiallyRegularSampler
from . import utils
from .kitti import KITTI
from .nuscenes import NuScenes
from .waymo import Waymo
from .lyft import Lyft
from .shapenet import ShapeNet
from .argoverse import Argoverse
from .scannet import Scannet
from .sunrgbd import SunRGBD
from .habitat_sampling import HabitatSampling
__all__ = [
'SemanticKITTI', 'S3DIS', 'Toronto3D', 'ParisLille3D', 'Semantic3D',
'Custom3D', 'utils', 'KITTI', 'Waymo', 'NuScenes', 'Lyft', 'ShapeNet',
'SemSegRandomSampler', 'InferenceDummySplit',
'SemSegSpatiallyRegularSampler', 'Argoverse', 'Scannet', 'SunRGBD',
'HabitatSampling'
]
| 33.166667 | 74 | 0.78392 | 102 | 995 | 7.588235 | 0.352941 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016129 | 0.127638 | 995 | 29 | 75 | 34.310345 | 0.875576 | 0.055276 | 0 | 0 | 0 | 0 | 0.210921 | 0.031049 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.72 | 0 | 0.72 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 2 |
6b29d3842c62d0b492f11566cb54c9837be91f10 | 2,575 | py | Python | tools/nntool/tests/test_nngraph.py | knmcguire/gap_sdk | 7b0a09a353ab6f0550793d40bd46e98051f4a3d7 | [
"Apache-2.0"
] | null | null | null | tools/nntool/tests/test_nngraph.py | knmcguire/gap_sdk | 7b0a09a353ab6f0550793d40bd46e98051f4a3d7 | [
"Apache-2.0"
] | null | null | null | tools/nntool/tests/test_nngraph.py | knmcguire/gap_sdk | 7b0a09a353ab6f0550793d40bd46e98051f4a3d7 | [
"Apache-2.0"
] | null | null | null | from importer.tflite.new_tflite_graph_all import create_graph
from graph.manipulations import add_dimensions, calculate_liveness
from graph.types import Parameters
def verify_steps(steps, cnt):
assert len(steps) == cnt
assert all(isinstance(step['node'], Parameters) for step in steps)
def test_load1(mnist_graph):
G = create_graph(mnist_graph, {})
assert G
def test_load2(ir_graph):
G = create_graph(ir_graph, {})
assert G
def test_load3(ssd_graph):
G = create_graph(ssd_graph, {})
assert G
def test_load4(cifar10_graph):
G = create_graph(cifar10_graph, {})
assert G
def test_add_dimension1(mnist_graph):
G = create_graph(mnist_graph, {})
steps = add_dimensions(G)
verify_steps(steps, 10)
def test_add_dimension2(ir_graph):
G = create_graph(ir_graph, {})
steps = add_dimensions(G)
verify_steps(steps, 31)
def test_add_dimension3(ssd_graph):
G = create_graph(ssd_graph, {})
steps = add_dimensions(G)
verify_steps(steps, 40)
def test_add_dimension4(cifar10_graph):
G = create_graph(cifar10_graph, {})
steps = add_dimensions(G)
verify_steps(steps, 16)
def test_liveness1(mnist_graph):
G = create_graph(mnist_graph, {})
steps = add_dimensions(G)
liveness = calculate_liveness(G, steps)
assert len(liveness) == 9 # no record for 1 output
def test_liveness2(ir_graph):
G = create_graph(ir_graph, {})
steps = add_dimensions(G)
liveness = calculate_liveness(G, steps)
assert len(liveness) == 23 # no record for 8 outputs
def test_liveness3(ssd_graph):
G = create_graph(ssd_graph, {})
assert G
steps = add_dimensions(G)
liveness = calculate_liveness(G, steps)
assert len(liveness) == 39 # no record for 1 output
def test_liveness4(cifar10_graph):
G = create_graph(cifar10_graph, {})
assert G
steps = add_dimensions(G)
liveness = calculate_liveness(G, steps)
assert len(liveness) == 15 # no record for 1 output
def test_adjust1(mnist_graph):
G = create_graph(mnist_graph, {'load_tensors': True})
G.add_dimensions()
G.adjust_order()
def test_adjust2(ir_graph):
G = create_graph(ir_graph, {'load_tensors': True})
G.add_dimensions()
G.adjust_order()
def test_adjust4(cifar10_graph):
G = create_graph(cifar10_graph, {'load_tensors': True})
G.add_dimensions()
G.adjust_order()
# TODO - Implement Reshape and Concat for adjust
# def test_adjust3():
# G = create_graph('examples/New_SSD_Model.tflite', {'load_tensors': True})
# G.add_dimensions()
# G.adjust_order()
| 28.296703 | 79 | 0.706019 | 366 | 2,575 | 4.685792 | 0.202186 | 0.109038 | 0.111953 | 0.148688 | 0.694461 | 0.669971 | 0.669971 | 0.514869 | 0.449563 | 0.348688 | 0 | 0.024263 | 0.183689 | 2,575 | 90 | 80 | 28.611111 | 0.791627 | 0.109126 | 0 | 0.529412 | 0 | 0 | 0.017513 | 0 | 0 | 0 | 0 | 0.011111 | 0.176471 | 1 | 0.235294 | false | 0 | 0.044118 | 0 | 0.279412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.