hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
53023c9d299cd94c463a0b0311476646d5bba07e
| 37
|
py
|
Python
|
tests/components/light/__init__.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/light/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/components/light/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""The tests for Light platforms."""
| 18.5
| 36
| 0.675676
| 5
| 37
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 37
| 1
| 37
| 37
| 0.78125
| 0.810811
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5312cf409352c230679292ca642b31e0d4a71e10
| 97
|
py
|
Python
|
glass/__init__.py
|
timgates42/glass.py
|
846638dcc2d596ba48d68859affc9f5e30823e50
|
[
"Apache-2.0"
] | null | null | null |
glass/__init__.py
|
timgates42/glass.py
|
846638dcc2d596ba48d68859affc9f5e30823e50
|
[
"Apache-2.0"
] | null | null | null |
glass/__init__.py
|
timgates42/glass.py
|
846638dcc2d596ba48d68859affc9f5e30823e50
|
[
"Apache-2.0"
] | null | null | null |
import exceptions
from app import Application
from user import User
from timeline import Timeline
| 24.25
| 29
| 0.865979
| 14
| 97
| 6
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134021
| 97
| 4
| 29
| 24.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
53172c75e191312bc4a1a98bdc912d5b03b9f838
| 1,929
|
py
|
Python
|
P2/assignment/testing_structures/split_list.py
|
Pierrefha/introduction-to-ai-practials
|
55050d710398d82a5358bc04fea5a4cac6f7b74f
|
[
"MIT"
] | null | null | null |
P2/assignment/testing_structures/split_list.py
|
Pierrefha/introduction-to-ai-practials
|
55050d710398d82a5358bc04fea5a4cac6f7b74f
|
[
"MIT"
] | null | null | null |
P2/assignment/testing_structures/split_list.py
|
Pierrefha/introduction-to-ai-practials
|
55050d710398d82a5358bc04fea5a4cac6f7b74f
|
[
"MIT"
] | null | null | null |
import random
def swap_second_half(state_one, state_two, cut_position):
"""" Cuts both states at cut position and swaps
the resulting second parts with each other.
Returns a pair of cut and swapped states.
"""
first_state_left = first_state[0:cut_position]
first_state_right = second_state[cut_position:]
second_state_left = second_state[0:cut_position]
second_state_right = first_state[cut_position:]
resulting_first = first_state_left+first_state_right
resulting_second = second_state_left+second_state_right
return (resulting_first, resulting_second)
def swap_first_half(state_one, state_two, cut_position):
"""" Cuts both states at cut position and swaps
the resulting first parts with each other.
Returns a pair of cut and swapped states.
"""
first_state_right = first_state[cut_position:]
first_state_left = second_state[0:cut_position:]
second_state_right = second_state[cut_position:]
second_state_left = first_state[0:cut_position:]
resulting_first = first_state_left+first_state_right
resulting_second = second_state_left+second_state_right
return (resulting_first, resulting_second)
if __name__ == '__main__':
first_state = "11112222"
second_state = "33334444"
print(f"first state:{first_state} second state: {second_state}")
for i in range(5):
cut_position = round(random.random()*5)+1
if(random.random() > 0.5):
print("swap first half")
result = swap_first_half(first_state, second_state, cut_position)
print("new first state: %s new second state: %s"
% (result[0], result[1]))
else:
print("swap second half")
result = swap_second_half(first_state, second_state, cut_position)
print("new first state: %s new second state: %s"
% (result[0], result[1]))
| 38.58
| 78
| 0.690513
| 261
| 1,929
| 4.762452
| 0.195402
| 0.152856
| 0.077233
| 0.061142
| 0.772325
| 0.772325
| 0.743363
| 0.713596
| 0.713596
| 0.653258
| 0
| 0.019346
| 0.222913
| 1,929
| 49
| 79
| 39.367347
| 0.809873
| 0.135822
| 0
| 0.30303
| 0
| 0
| 0.117391
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.030303
| 0
| 0.151515
| 0.151515
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5319e7a0dafedb81221d9e8a0c0742cdc32c307f
| 132
|
py
|
Python
|
polyaxon/polyaxon/config_settings/repos.py
|
vfdev-5/polyaxon
|
3e1511a993dc1a03e0a0827de0357f4adcc0015f
|
[
"MIT"
] | null | null | null |
polyaxon/polyaxon/config_settings/repos.py
|
vfdev-5/polyaxon
|
3e1511a993dc1a03e0a0827de0357f4adcc0015f
|
[
"MIT"
] | null | null | null |
polyaxon/polyaxon/config_settings/repos.py
|
vfdev-5/polyaxon
|
3e1511a993dc1a03e0a0827de0357f4adcc0015f
|
[
"MIT"
] | null | null | null |
from polyaxon.config_manager import config
REPOS_ACCESS_TOKEN = config.get_string('POLYAXON_REPOS_ACCESS_TOKEN', is_optional=True)
| 33
| 87
| 0.863636
| 19
| 132
| 5.578947
| 0.684211
| 0.207547
| 0.301887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 132
| 3
| 88
| 44
| 0.861789
| 0
| 0
| 0
| 0
| 0
| 0.204545
| 0.204545
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5349fb651d8f1e74a4e8361cd1de245c87ebef03
| 206
|
py
|
Python
|
odapi/tests/interfaces/test_interfaces_geomatic.py
|
jlandercy/odapi
|
781aa95ef346f8d5f1d727a19ae078687cc4cc36
|
[
"BSD-3-Clause"
] | 1
|
2020-05-27T08:33:26.000Z
|
2020-05-27T08:33:26.000Z
|
odapi/tests/interfaces/test_interfaces_geomatic.py
|
jlandercy/odapi
|
781aa95ef346f8d5f1d727a19ae078687cc4cc36
|
[
"BSD-3-Clause"
] | null | null | null |
odapi/tests/interfaces/test_interfaces_geomatic.py
|
jlandercy/odapi
|
781aa95ef346f8d5f1d727a19ae078687cc4cc36
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
import unittest
import pandas as pd
from odapi.settings import settings
class GeomaticTest:
pass
def main():
unittest.main()
sys.exit(0)
if __name__ == "__main__":
main()
| 10.3
| 35
| 0.679612
| 27
| 206
| 4.888889
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.23301
| 206
| 19
| 36
| 10.842105
| 0.829114
| 0
| 0
| 0
| 0
| 0
| 0.038835
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| true
| 0.090909
| 0.363636
| 0
| 0.545455
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
5356e45ffe06007fb797b012b4512c86d7e82e5e
| 3,934
|
py
|
Python
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/pbr/configure.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | 1
|
2022-01-16T10:00:24.000Z
|
2022-01-16T10:00:24.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/pbr/configure.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/pbr/configure.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
"""Common configure functions for PBR"""
import logging
from unicon.core.errors import SubCommandFailure
log = logging.getLogger(__name__)
def configure_route_map_under_interface(device, interface, route_map):
""" Configure route-map on an interface
Args:
device (`obj`): Device object
interface (`str`): Interface to get address
route_map (`str`): Route-map to be configured on interface
Returns:
None
Raises:
SubCommandFailure
"""
configs = [
"interface {intf}".format(intf=interface),
"ip policy route-map {policy}".format(policy=route_map),
]
try:
device.configure(configs)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed to configure route-map under interface {interface} on device {dev}. Error:\n{error}"
.format(
interface=interface,
dev=device.name,
error=e,
))
def unconfigure_route_map_under_interface(device, interface, route_map):
""" unonfigure route-map on an interface
Args:
device (`obj`): Device object
interface (`str`): Interface to get address
route_map (`str`): Route-map to be configured on interface
Returns:
None
Raises:
SubCommandFailure
"""
configs = [
"interface {intf}".format(intf=interface),
"no ip policy route-map {policy}".format(policy=route_map),
]
try:
device.configure(configs)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed to unconfigure route-map under interface {interface} on device {dev}. Error:\n{error}"
.format(
interface=interface,
dev=device.name,
error=e,
))
def configure_pbr_route_map(device,
route_map_name,
acl_name,
next_hop_ip,
default_next_hop=None):
""" Configure route-map
Args:
device (`obj`): Device object
route_map_name (`str`): Route-map to be configured on interface
acl_name (`str`): Route-map to be attached on interface
next_hop_ip (`str`): Next-hop ip address
default_next_hop (`str`, optional): Default Next-hop ip address, default value is None
Returns:
None
Raises:
SubCommandFailure
"""
configs = [
"route-map {pbr}".format(pbr=route_map_name),
"match ip address {acl}".format(acl=acl_name),
]
if default_next_hop:
configs.append("set ip default next-hop {ip}".format(ip=next_hop_ip))
else:
configs.append("set ip next-hop {ip}".format(ip=next_hop_ip))
try:
device.configure(configs)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed to configure route map {pbr} on device {dev}. Error:\n{error}"
.format(
pbr=route_map_name,
dev=device.name,
error=e,
))
def unconfigure_pbr_route_map(device, route_map_name):
""" Unconfigure route-map
Args:
device (`obj`): Device object
route_map_name (`str`): Route-map to be configured on interface
Returns:
None
Raises:
SubCommandFailure
"""
configs = ["no route-map {pbr}".format(pbr=route_map_name)]
try:
device.configure(configs)
except SubCommandFailure as e:
raise SubCommandFailure(
"Failed to unconfigure route map {pbr} on device {dev}. Error:\n{error}"
.format(
pbr=route_map_name,
dev=device.name,
error=e,
))
| 27.704225
| 106
| 0.562278
| 420
| 3,934
| 5.130952
| 0.154762
| 0.12993
| 0.044548
| 0.030162
| 0.83109
| 0.786543
| 0.777726
| 0.739211
| 0.643619
| 0.643619
| 0
| 0
| 0.344687
| 3,934
| 141
| 107
| 27.900709
| 0.835919
| 0.271225
| 0
| 0.602941
| 0
| 0.029412
| 0.202432
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.029412
| 0
| 0.088235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
536e626174e7353b3ab7f1669e507daa6239d787
| 69
|
py
|
Python
|
src/__init__.py
|
FangTang999/grid2demand
|
3b253643d18e773700e24a3284e365d2268c3e41
|
[
"Apache-2.0"
] | 42
|
2021-01-03T17:11:28.000Z
|
2022-03-28T20:27:29.000Z
|
src/__init__.py
|
FangTang999/grid2demand
|
3b253643d18e773700e24a3284e365d2268c3e41
|
[
"Apache-2.0"
] | 7
|
2021-01-29T06:15:14.000Z
|
2022-03-11T11:02:42.000Z
|
src/__init__.py
|
FangTang999/grid2demand
|
3b253643d18e773700e24a3284e365d2268c3e41
|
[
"Apache-2.0"
] | 11
|
2021-01-18T02:14:59.000Z
|
2021-12-07T13:07:25.000Z
|
from .grid2demand_0129 import *
print('grid2demand, version 0.0.1')
| 17.25
| 35
| 0.753623
| 10
| 69
| 5.1
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147541
| 0.115942
| 69
| 3
| 36
| 23
| 0.688525
| 0
| 0
| 0
| 0
| 0
| 0.376812
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
7274475763d943a6b30a47578c302239efd9b27e
| 42
|
py
|
Python
|
exceptions/signature_exception.py
|
gbanegas/KissECC
|
2630998178955a6e68adec34e30802447057c58f
|
[
"Apache-2.0"
] | 1
|
2017-02-08T16:01:48.000Z
|
2017-02-08T16:01:48.000Z
|
exceptions/signature_exception.py
|
gbanegas/ecc
|
2630998178955a6e68adec34e30802447057c58f
|
[
"Apache-2.0"
] | null | null | null |
exceptions/signature_exception.py
|
gbanegas/ecc
|
2630998178955a6e68adec34e30802447057c58f
|
[
"Apache-2.0"
] | null | null | null |
class SignatureError(Exception):
pass
| 14
| 32
| 0.761905
| 4
| 42
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 2
| 33
| 21
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7274c4614d0c19de6b60334a5e8ca939852c17bd
| 112
|
py
|
Python
|
JPS/python/caresjpsnlq/ols_docker/test.py
|
mdhillmancmcl/TheWorldAvatar-CMCL-Fork
|
011aee78c016b76762eaf511c78fabe3f98189f4
|
[
"MIT"
] | 21
|
2021-03-08T01:58:25.000Z
|
2022-03-09T15:46:16.000Z
|
JPS/python/caresjpsnlq/ols_docker/test.py
|
mdhillmancmcl/TheWorldAvatar-CMCL-Fork
|
011aee78c016b76762eaf511c78fabe3f98189f4
|
[
"MIT"
] | 63
|
2021-05-04T15:05:30.000Z
|
2022-03-23T14:32:29.000Z
|
JPS/python/caresjpsnlq/ols_docker/test.py
|
mdhillmancmcl/TheWorldAvatar-CMCL-Fork
|
011aee78c016b76762eaf511c78fabe3f98189f4
|
[
"MIT"
] | 15
|
2021-03-08T07:52:03.000Z
|
2022-03-29T04:46:20.000Z
|
import urllib.request
with urllib.request.urlopen('http://www.ebi.ac.uk/efo/efo.owl') as f:
print(f.read(300))
| 28
| 69
| 0.732143
| 21
| 112
| 3.904762
| 0.809524
| 0.317073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028846
| 0.071429
| 112
| 3
| 70
| 37.333333
| 0.759615
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7295813d46ac823fa80fd86dbb38f0550d70f873
| 65
|
py
|
Python
|
matchms/exporting/__init__.py
|
NLeSC/Spec2Vec
|
80678fdc7325813a1e3bd3ad7e2a60cb7482026a
|
[
"Apache-2.0"
] | null | null | null |
matchms/exporting/__init__.py
|
NLeSC/Spec2Vec
|
80678fdc7325813a1e3bd3ad7e2a60cb7482026a
|
[
"Apache-2.0"
] | 23
|
2020-03-16T13:47:00.000Z
|
2020-03-19T13:34:27.000Z
|
matchms/exporting/__init__.py
|
NLeSC/spec2vec
|
80678fdc7325813a1e3bd3ad7e2a60cb7482026a
|
[
"Apache-2.0"
] | null | null | null |
from .save_as_mgf import save_as_mgf
__all__ = ["save_as_mgf"]
| 13
| 36
| 0.769231
| 12
| 65
| 3.333333
| 0.5
| 0.45
| 0.675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138462
| 65
| 4
| 37
| 16.25
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0.169231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
72cabf7808366a50e1cc1d59929997292684a4fc
| 644
|
py
|
Python
|
Contributors/GabrielConlon/checkPermutation/tests_check_permutation.py
|
iatenine/Coders-Workshop
|
d93fbdd2e2cd2722f45d2a16d6ace31b62e7fc3f
|
[
"MIT"
] | 33
|
2019-12-02T23:29:47.000Z
|
2022-03-24T02:40:36.000Z
|
Contributors/GabrielConlon/checkPermutation/tests_check_permutation.py
|
iatenine/Coders-Workshop
|
d93fbdd2e2cd2722f45d2a16d6ace31b62e7fc3f
|
[
"MIT"
] | 39
|
2020-01-15T19:28:12.000Z
|
2021-11-26T05:13:29.000Z
|
Contributors/GabrielConlon/checkPermutation/tests_check_permutation.py
|
iatenine/Coders-Workshop
|
d93fbdd2e2cd2722f45d2a16d6ace31b62e7fc3f
|
[
"MIT"
] | 49
|
2019-12-02T23:29:53.000Z
|
2022-03-03T01:11:37.000Z
|
# practicing with some unittesting
import check_permutation
import unittest
class TestMethods(unittest.TestCase):
def test_baseline(self):
self.assertTrue(check_permutation.check_permutation("god", "dog"))
def test_lengthDiff(self):
self.assertFalse(check_permutation.check_permutation("hello", "goodbye"))
def test_withWhitespace(self):
self.assertFalse(check_permutation.check_permutation("hello world", "goodbye world"))
def test_complexWhitespaceTrue(self):
self.assertTrue(check_permutation.check_permutation("hello world", "lll ooehrwd"))
if __name__ == 'main':
unittest.main()
| 28
| 93
| 0.743789
| 70
| 644
| 6.6
| 0.428571
| 0.311688
| 0.181818
| 0.277056
| 0.491342
| 0.491342
| 0.458874
| 0.242424
| 0
| 0
| 0
| 0
| 0.149068
| 644
| 22
| 94
| 29.272727
| 0.843066
| 0.049689
| 0
| 0
| 0
| 0
| 0.111475
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0.307692
| false
| 0
| 0.153846
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
f429c163ca44e2690c00d84cfca7e174852dbbcd
| 119
|
py
|
Python
|
TexDBook/__init__.py
|
kkysen/TexDBook
|
61d9db5f00f04b00fd45ef50d0c8df417548d324
|
[
"MIT"
] | null | null | null |
TexDBook/__init__.py
|
kkysen/TexDBook
|
61d9db5f00f04b00fd45ef50d0c8df417548d324
|
[
"MIT"
] | 9
|
2018-05-18T16:19:27.000Z
|
2022-02-26T03:48:31.000Z
|
TexDBook/__init__.py
|
kkysen/TexDBook
|
61d9db5f00f04b00fd45ef50d0c8df417548d324
|
[
"MIT"
] | 1
|
2018-06-14T04:06:14.000Z
|
2018-06-14T04:06:14.000Z
|
from src.python.app import app_name, create_app
if __name__ == '__main__':
print(app_name)
create_app().run()
| 19.833333
| 47
| 0.705882
| 18
| 119
| 4
| 0.611111
| 0.194444
| 0.361111
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168067
| 119
| 5
| 48
| 23.8
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.067227
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f4403d208c3462837a5c54441cd4467b99623f51
| 39
|
py
|
Python
|
CodeUp/6003.py
|
chae-heechan/Algorithm_Study
|
183a77e2cfe352cd82fb5e988b493082529a73dd
|
[
"MIT"
] | null | null | null |
CodeUp/6003.py
|
chae-heechan/Algorithm_Study
|
183a77e2cfe352cd82fb5e988b493082529a73dd
|
[
"MIT"
] | null | null | null |
CodeUp/6003.py
|
chae-heechan/Algorithm_Study
|
183a77e2cfe352cd82fb5e988b493082529a73dd
|
[
"MIT"
] | null | null | null |
# 출력하기 03
print("Hello")
print("World")
| 13
| 14
| 0.666667
| 6
| 39
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 0.102564
| 39
| 3
| 15
| 13
| 0.685714
| 0.179487
| 0
| 0
| 0
| 0
| 0.322581
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
f443846f7fb6cc18ee9bf9f30b853daa9e22b069
| 9,919
|
py
|
Python
|
Market/views.py
|
Fowerus/drf-crm
|
be5420f1942886b685214c33537cf4b3759704a1
|
[
"Apache-2.0"
] | 3
|
2021-09-20T09:21:46.000Z
|
2021-09-21T08:51:14.000Z
|
Market/views.py
|
Fowerus/drf-crm
|
be5420f1942886b685214c33537cf4b3759704a1
|
[
"Apache-2.0"
] | null | null | null |
Market/views.py
|
Fowerus/drf-crm
|
be5420f1942886b685214c33537cf4b3759704a1
|
[
"Apache-2.0"
] | null | null | null |
from django.db import transaction
from rest_framework import status
from rest_framework import generics
from rest_framework.response import Response
from .serializers import *
from Handbook.models import *
from core.utils.customPerm import *
from core.utils.customViewMethods import *
class ProductCategoryListAPIView(generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = ProductCategory.objects.all()
serializer_class = ProductCategorySerializer
perm_view_name = 'product'
class TransactionListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = Transaction.objects.all()
serializer_class = TransactionSerializer
filterset_fields = ['service__id']
class ProductListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = Product.objects.all()
serializer_class = ProductSerializer
filterset_fields = ['service__id']
class ProductCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = Product.objects.all()
serializer_class = ProductSerializer.ProductCSerializer
class ProductRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = Product.objects.all()
serializer_class = ProductSerializer
class ProductUpdateDestroyAPIView(CustomGetObject, generics.UpdateAPIView, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole, CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = Product.objects.all()
serializer_class = ProductSerializer.ProductUSerializer
class CashboxListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = Cashbox.objects.all()
serializer_class = CashboxSerializer
filterset_fields = ['service__id']
class CashboxCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = Cashbox.objects.all()
serializer_class = CashboxSerializer.CashboxCSerializer
class CashboxRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = Cashbox.objects.all()
serializer_class = CashboxSerializer
class CashboxUpdateDestroyAPIView(CustomGetObject, generics.UpdateAPIView, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole, CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = Cashbox.objects.all()
serializer_class = CashboxSerializer.CashboxUSerializer
class PurchaseRequestListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = PurchaseRequest.objects.all()
serializer_class = PurchaseRequestSerializer
filterset_fields = ['service__id']
class PurchaseRequestCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = PurchaseRequest.objects.all()
serializer_class = PurchaseRequestSerializer.PurchaseRequestCSerializer
class PurchaseRequestRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = PurchaseRequest.objects.all()
serializer_class = PurchaseRequestSerializer
class PurchaseRequestDestroyAPIView(CustomGetObject, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = PurchaseRequest.objects.all()
serializer_class = PurchaseRequestSerializer
class PurchaseAcceptListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = PurchaseAccept.objects.all()
serializer_class = PurchaseAcceptSerializer
filterset_fields = ['service__id']
class PurchaseAcceptRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = PurchaseAccept.objects.all()
serializer_class = PurchaseAcceptSerializer()
class PurchaseAcceptUpdateAPIView(CustomGetObject, generics.UpdateAPIView):
permission_classes = [CustomPermissionVerificationRole,CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = PurchaseAccept.objects.all()
serializer_class = PurchaseAcceptSerializer.PurchaseAcceptUSerializer
class SaleProductListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = SaleProduct.objects.all()
serializer_class = SaleProductSerializer
filterset_fields = ['service__id']
class SaleProductCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = SaleProduct.objects.all()
serializer_class = SaleProductSerializer.SaleProductCSerializer
class SaleProductRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = SaleProduct.objects.all()
serializer_class = SaleProductSerializer
class SaleProductUpdateDestroyAPIView(CustomGetObject, generics.UpdateAPIView, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole, CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = SaleProduct.objects.all()
serializer_class = SaleProductSerializer.SaleProductUSerializer
class SaleOrderListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = SaleOrder.objects.all()
serializer_class = SaleOrderSerializer
filterset_fields = ['service__id']
class SaleOrderCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = SaleOrder.objects.all()
serializer_class = SaleOrderSerializer.SaleOrderCSerializer
class SaleOrderRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = SaleOrder.objects.all()
serializer_class = SaleOrderSerializer
class SaleOrderUpdateDestroyAPIView(CustomGetObject, generics.UpdateAPIView, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole, CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = SaleOrder.objects.all()
serializer_class = SaleOrderSerializer.SaleOrderUSerializer
class WorkDoneListAPIView(CustomFilterQueryset, generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = WorkDone.objects.all()
serializer_class = WorkDoneSerializer
filterset_fields = ['service__id']
class WorkDoneCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = WorkDone.objects.all()
serializer_class = WorkDoneSerializer.WorkDoneCSerializer
class WorkDoneRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = WorkDone.objects.all()
serializer_class = WorkDoneSerializer
class WorkDoneUpdateDestroyAPIView(CustomGetObject, generics.UpdateAPIView, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole, CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = WorkDone.objects.all()
serializer_class = WorkDoneSerializer.WorkDoneUSerializer
@transaction.atomic
def delete(self, request, **kwargs):
instance = self.get_object()
create_orderHistory(order=instance.order, model='1', organization=instance.order.organization, method='delete',
body={"id": instance.id, "name": instance.name})
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
class ProductOrderListAPIView(CustomFilterQueryset,generics.ListAPIView):
permission_classes = [CustomPermissionVerificationRole]
queryset = ProductOrder.objects.all()
serializer_class = ProductOrderSerializer
filterset_fields = ['service__id']
class ProductOrderCreateAPIView(generics.CreateAPIView):
permission_classes = [
CustomPermissionVerificationRole, CustomPermissionCheckRelated]
queryset = ProductOrder.objects.all()
serializer_class = ProductOrderSerializer.ProductOrderCSerializer
class ProductOrderRetrieveAPIView(CustomGetObject, generics.RetrieveAPIView):
permission_classes = [CustomPermissionVerificationRole]
lookup_field = 'id'
queryset = ProductOrder.objects.all()
serializer_class = ProductOrderSerializer
class ProductOrderUpdateDestroyAPIView(CustomGetObject, generics.UpdateAPIView, generics.DestroyAPIView):
permission_classes = [CustomPermissionVerificationRole, CustomPermissionCheckRelated]
lookup_field = 'id'
queryset = ProductOrder.objects.all()
serializer_class = ProductOrderSerializer.ProductOrderUSerializer
@transaction.atomic
def delete(self, request, **kwargs):
instance = self.get_object()
create_orderHistory(order=instance.order, model='0',
organization=instance.order.organization, method='delete')
self.perform_destroy(instance)
return Response(status=status.HTTP_204_NO_CONTENT)
| 38.445736
| 119
| 0.79756
| 732
| 9,919
| 10.635246
| 0.181694
| 0.072062
| 0.207707
| 0.105973
| 0.784586
| 0.748234
| 0.725883
| 0.413102
| 0.403211
| 0.403211
| 0
| 0.00093
| 0.132977
| 9,919
| 257
| 120
| 38.595331
| 0.904302
| 0
| 0
| 0.647059
| 0
| 0
| 0.015929
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010695
| false
| 0
| 0.042781
| 0
| 0.909091
| 0
| 0
| 0
| 1
| null | 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
f4834861d4b7b643d6a06809f095bf5f0eb673c8
| 86
|
py
|
Python
|
c02/p049_test_doc.py
|
HiAwesome/dive-into-python3-practice
|
e57504cb0683ebca9c80b20ff0cb3878bdcc3f87
|
[
"Apache-2.0"
] | null | null | null |
c02/p049_test_doc.py
|
HiAwesome/dive-into-python3-practice
|
e57504cb0683ebca9c80b20ff0cb3878bdcc3f87
|
[
"Apache-2.0"
] | null | null | null |
c02/p049_test_doc.py
|
HiAwesome/dive-into-python3-practice
|
e57504cb0683ebca9c80b20ff0cb3878bdcc3f87
|
[
"Apache-2.0"
] | null | null | null |
from p044_humansize import *
"""
测试写了 doc 代码的提示
"""
print(approximate_size(10000000))
| 14.333333
| 33
| 0.755814
| 11
| 86
| 5.727273
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144737
| 0.116279
| 86
| 5
| 34
| 17.2
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
be3f593019919495e96b0b8d2e2aba0e019c4843
| 208
|
py
|
Python
|
processing/views/main_view.py
|
haykhman/online-newspaper
|
ebe38be63f3f037160532b33eb76bf9a22dbf171
|
[
"MIT"
] | 1
|
2018-12-10T15:50:21.000Z
|
2018-12-10T15:50:21.000Z
|
processing/views/main_view.py
|
haykhman/online-newspaper
|
ebe38be63f3f037160532b33eb76bf9a22dbf171
|
[
"MIT"
] | null | null | null |
processing/views/main_view.py
|
haykhman/online-newspaper
|
ebe38be63f3f037160532b33eb76bf9a22dbf171
|
[
"MIT"
] | null | null | null |
from django.shortcuts import redirect
from django.views.generic import View
class EnteryLanguageDetect(View):
def get(self, request, *args, **kwargs):
return redirect('/' + request.LANGUAGE_CODE)
| 34.666667
| 52
| 0.745192
| 25
| 208
| 6.16
| 0.76
| 0.12987
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149038
| 208
| 6
| 52
| 34.666667
| 0.870057
| 0
| 0
| 0
| 0
| 0
| 0.004785
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
be3f6291be95be2878b5d8758a5802c131aae68a
| 93
|
py
|
Python
|
copulae/mixtures/gmc/estimators/exceptions.py
|
CrisDS81/copulae
|
2a312c2b849f95cfb2b40b381d34bc790d9d80c5
|
[
"MIT"
] | 100
|
2019-01-30T19:52:04.000Z
|
2022-03-18T10:00:17.000Z
|
copulae/mixtures/gmc/estimators/exceptions.py
|
CrisDS81/copulae
|
2a312c2b849f95cfb2b40b381d34bc790d9d80c5
|
[
"MIT"
] | 30
|
2019-07-14T00:30:03.000Z
|
2021-08-24T08:59:14.000Z
|
copulae/mixtures/gmc/estimators/exceptions.py
|
CrisDS81/copulae
|
2a312c2b849f95cfb2b40b381d34bc790d9d80c5
|
[
"MIT"
] | 25
|
2019-03-10T21:12:55.000Z
|
2021-11-09T11:54:16.000Z
|
class FitException(Exception):
pass
class InvalidStoppingCriteria(Exception):
pass
| 13.285714
| 41
| 0.763441
| 8
| 93
| 8.875
| 0.625
| 0.366197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172043
| 93
| 6
| 42
| 15.5
| 0.922078
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
be47807adb57e21af4a10f4c74555368d4f8a07f
| 49
|
py
|
Python
|
webimgui/exceptions.py
|
elnardu/webimgui
|
4b5f21ed5e5ad3c0d3f567050bdaa455d0da7327
|
[
"MIT"
] | 4
|
2019-11-04T07:26:48.000Z
|
2020-03-09T06:50:48.000Z
|
webimgui/exceptions.py
|
elnardu/webimgui
|
4b5f21ed5e5ad3c0d3f567050bdaa455d0da7327
|
[
"MIT"
] | 5
|
2021-03-10T08:09:17.000Z
|
2022-02-12T23:56:34.000Z
|
webimgui/exceptions.py
|
elnardu/webimgui
|
4b5f21ed5e5ad3c0d3f567050bdaa455d0da7327
|
[
"MIT"
] | null | null | null |
class WebimguiException(BaseException):
pass
| 16.333333
| 39
| 0.795918
| 4
| 49
| 9.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 2
| 40
| 24.5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
be4aa2a46abe58d2cc2d08c11e91d67d39d1a0cf
| 115
|
py
|
Python
|
src/aws_maintenance_window_reporter/__init__.py
|
binxio/aws-maintenance-window-reporter
|
aeb7d3bb0068d3fd49d00aedc8e83b9e7052d76e
|
[
"Apache-2.0"
] | null | null | null |
src/aws_maintenance_window_reporter/__init__.py
|
binxio/aws-maintenance-window-reporter
|
aeb7d3bb0068d3fd49d00aedc8e83b9e7052d76e
|
[
"Apache-2.0"
] | null | null | null |
src/aws_maintenance_window_reporter/__init__.py
|
binxio/aws-maintenance-window-reporter
|
aeb7d3bb0068d3fd49d00aedc8e83b9e7052d76e
|
[
"Apache-2.0"
] | null | null | null |
"""
reports upcoming AWS maintenance windows
"""
from aws_maintenance_window_reporter.report import handle, report
| 23
| 65
| 0.826087
| 14
| 115
| 6.571429
| 0.785714
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104348
| 115
| 4
| 66
| 28.75
| 0.893204
| 0.347826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be4b49aa2ec09c8806e455c101c5773df2ba1db0
| 1,118
|
py
|
Python
|
sequestrum/logging.py
|
goteemLight/sequestrum
|
32fea21e45101f11f9f45f730314b6d98f70b508
|
[
"MIT"
] | null | null | null |
sequestrum/logging.py
|
goteemLight/sequestrum
|
32fea21e45101f11f9f45f730314b6d98f70b508
|
[
"MIT"
] | null | null | null |
sequestrum/logging.py
|
goteemLight/sequestrum
|
32fea21e45101f11f9f45f730314b6d98f70b508
|
[
"MIT"
] | null | null | null |
# Logging module
import sys
import time
def delay_print(string):
for character in string:
sys.stdout.write(character)
sys.stdout.flush()
time.sleep(0.07)
print("\n", end="")
def format_output(error_type, error_message, package_name=None):
if package_name:
return("[{}:{}] {}".format(error_type, package_name, error_message))
else:
return("[{}] {}".format(error_type, error_message))
def print_fatal(error_message, package_name=None):
print("\033[1;31mFATAL\033[0m: {} {}".format(error_message, package_name))
sys.exit()
def print_error(error_message, package_name=None):
print("\033[1;31mERROR\033[0m: {} {}".format(error_message, package_name))
def print_warn(error_message, package_name=None):
print("\033[1;33mWARN\033[0m: {} {}".format(error_message, package_name))
def print_info(error_message, package_name=None):
print("\033[1;32mINFO\033[0m: {} {}".format(error_message, package_name))
def print_verbose(error_message, package_name=None):
print("\033[1;32mVERBOSE\033[0m: {} {}".format(error_message, package_name))
| 27.95
| 80
| 0.690519
| 153
| 1,118
| 4.810458
| 0.287582
| 0.211957
| 0.283967
| 0.34375
| 0.544837
| 0.508152
| 0.508152
| 0.415761
| 0.171196
| 0
| 0
| 0.055439
| 0.144902
| 1,118
| 39
| 81
| 28.666667
| 0.714435
| 0.012522
| 0
| 0
| 0
| 0
| 0.14882
| 0.104356
| 0
| 0
| 0
| 0
| 0
| 1
| 0.291667
| false
| 0
| 0.083333
| 0
| 0.375
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
be6ee15e7b6047e424dc5eb86b480fdec356aa3a
| 2,561
|
py
|
Python
|
tests/wallet/test_simple_transfers.py
|
sunpuyo/merkle-bridge
|
0dad3b832bf3f9f39c5e59298b69fb61d0a1144c
|
[
"MIT"
] | 5
|
2018-11-29T05:57:10.000Z
|
2019-12-03T23:38:37.000Z
|
tests/wallet/test_simple_transfers.py
|
sunpuyo/merkle-bridge
|
0dad3b832bf3f9f39c5e59298b69fb61d0a1144c
|
[
"MIT"
] | null | null | null |
tests/wallet/test_simple_transfers.py
|
sunpuyo/merkle-bridge
|
0dad3b832bf3f9f39c5e59298b69fb61d0a1144c
|
[
"MIT"
] | 3
|
2019-05-07T10:12:56.000Z
|
2021-04-03T08:02:24.000Z
|
def _test_transfer(wallet, asset):
""" Basic token/aer transfer on it's native chain."""
to = wallet.get_wallet_address('receiver')
amount = 2
to_balance, _ = wallet.get_balance(asset, 'mainnet',
account_name='receiver')
print('receiver balance before', to_balance)
from_balance, _ = wallet.get_balance(asset, 'mainnet')
print('sender balance before', from_balance)
wallet.transfer(amount, to, asset, 'mainnet', privkey_pwd='1234')
to_balance_after, _ = wallet.get_balance(asset, 'mainnet',
account_name='receiver')
print('receiver balance after', to_balance_after)
from_balance_after, _ = wallet.get_balance(asset, 'mainnet')
print('sender balance after', from_balance_after)
assert to_balance_after == to_balance + amount
assert from_balance_after == from_balance - amount
def test_token_transfer(wallet):
return _test_transfer(wallet, 'token1')
def test_transfer_pegged_token(wallet):
""" Pegged token transfer on sidechain."""
to = wallet.get_wallet_address('receiver')
asset = 'token1'
amount = 2
# give funds to sender on the sidechain
wallet.transfer_to_sidechain('mainnet',
'sidechain2',
asset,
amount,
privkey_pwd='1234')
to_balance, _ = wallet.get_balance(asset, 'sidechain2',
asset_origin_chain='mainnet',
account_name='receiver')
print('receiver balance before', to_balance)
from_balance, _ = wallet.get_balance(asset, 'sidechain2',
asset_origin_chain='mainnet')
print('sender balance before', from_balance)
wallet.transfer(amount, to, asset, 'sidechain2',
asset_origin_chain='mainnet', privkey_pwd='1234')
to_balance_after, _ = wallet.get_balance(asset, 'sidechain2',
asset_origin_chain='mainnet',
account_name='receiver')
print('receiver balance after', to_balance_after)
from_balance_after, _ = wallet.get_balance(asset, 'sidechain2',
asset_origin_chain='mainnet')
print('sender balance after', from_balance_after)
assert to_balance_after == to_balance + amount
assert from_balance_after == from_balance - amount
| 41.306452
| 76
| 0.594299
| 261
| 2,561
| 5.509579
| 0.157088
| 0.133519
| 0.089013
| 0.116829
| 0.798331
| 0.788595
| 0.718359
| 0.718359
| 0.718359
| 0.718359
| 0
| 0.012493
| 0.312378
| 2,561
| 61
| 77
| 41.983607
| 0.804089
| 0.047247
| 0
| 0.533333
| 0
| 0
| 0.156984
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 1
| 0.066667
| false
| 0
| 0
| 0.022222
| 0.088889
| 0.177778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
beb2b1234e7ead7775b4b5250455e4d42b5e364b
| 157
|
py
|
Python
|
jtyoui/game/__init__.py
|
vanton/Jtyoui
|
c44d66b038ac5f4e2d75b68b3493d02f7b7b385e
|
[
"MIT"
] | 1
|
2019-12-24T00:57:47.000Z
|
2019-12-24T00:57:47.000Z
|
jtyoui/game/__init__.py
|
liangxioa/Jtyoui
|
5a584cbf12d644b6c4fb13167d8841a383afbbac
|
[
"MIT"
] | null | null | null |
jtyoui/game/__init__.py
|
liangxioa/Jtyoui
|
5a584cbf12d644b6c4fb13167d8841a383afbbac
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.7
# -*- coding: utf-8 -*-
# @Time : 2018/1/2 16:30
# @Email : jtyoui@qq.com
# @Software: PyCharm
from .tank import TankGame # 坦克小游戏
| 19.625
| 35
| 0.611465
| 24
| 157
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102362
| 0.191083
| 157
| 7
| 36
| 22.428571
| 0.653543
| 0.738854
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe43c34b9de0b8f4f86586438abc905c29295d1f
| 271
|
py
|
Python
|
python/koheron/__init__.py
|
Koheron/koheron-sdk
|
82b732635f1adf5dd0b04b9290b589c1fc091f29
|
[
"MIT"
] | 77
|
2016-09-20T18:44:14.000Z
|
2022-03-30T16:04:09.000Z
|
python/koheron/__init__.py
|
rsarwar87/koheron-sdk
|
02c35bf3c1c29f1029fad18b881dbd193efac5a7
|
[
"MIT"
] | 101
|
2016-09-05T15:44:25.000Z
|
2022-03-29T09:22:09.000Z
|
python/koheron/__init__.py
|
rsarwar87/koheron-sdk
|
02c35bf3c1c29f1029fad18b881dbd193efac5a7
|
[
"MIT"
] | 34
|
2016-12-12T07:21:57.000Z
|
2022-01-12T21:00:52.000Z
|
from .version import __version__
from .koheron import KoheronClient
from .koheron import command
from .koheron import ConnectionError
from .koheron import connect
from .koheron import run_instrument
from .koheron import upload_instrument
from .alpha250 import Alpha250
| 24.636364
| 38
| 0.845018
| 34
| 271
| 6.558824
| 0.352941
| 0.295964
| 0.457399
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025316
| 0.125461
| 271
| 10
| 39
| 27.1
| 0.915612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
22832419110abef2b272ecedcf59eb5d38c4119b
| 667
|
py
|
Python
|
sdk/python/pulumi_aws/securityhub/__init__.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/securityhub/__init__.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/securityhub/__init__.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from .. import _utilities
import typing
# Export this package's modules as members:
from .account import *
from .action_target import *
from .finding_aggregator import *
from .insight import *
from .invite_accepter import *
from .member import *
from .organization_admin_account import *
from .organization_configuration import *
from .product_subscription import *
from .standards_control import *
from .standards_subscription import *
from ._inputs import *
from . import outputs
| 31.761905
| 87
| 0.770615
| 90
| 667
| 5.588889
| 0.611111
| 0.238569
| 0.067594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001764
| 0.149925
| 667
| 20
| 88
| 33.35
| 0.885362
| 0.328336
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
228bc8c5ac71d3b285315b50b44ba76226f67769
| 29
|
py
|
Python
|
bal/__main__.py
|
jbg/bal
|
ea2e87d5428bb287a0b327377bc548c0bc3f2a38
|
[
"BSD-2-Clause"
] | null | null | null |
bal/__main__.py
|
jbg/bal
|
ea2e87d5428bb287a0b327377bc548c0bc3f2a38
|
[
"BSD-2-Clause"
] | null | null | null |
bal/__main__.py
|
jbg/bal
|
ea2e87d5428bb287a0b327377bc548c0bc3f2a38
|
[
"BSD-2-Clause"
] | null | null | null |
from .bal import main
main()
| 9.666667
| 21
| 0.724138
| 5
| 29
| 4.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 29
| 2
| 22
| 14.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
22a396b1f1e460bcb97d47b0181eced5ab5f137e
| 55
|
py
|
Python
|
__transform__/__init__.py
|
bakkerthehacker/both
|
37bfdc41c97476cb74dced06570f5988356e4984
|
[
"MIT"
] | 6
|
2019-06-04T04:00:45.000Z
|
2021-01-23T22:36:37.000Z
|
__transform__/__init__.py
|
bakkerthehacker/both
|
37bfdc41c97476cb74dced06570f5988356e4984
|
[
"MIT"
] | null | null | null |
__transform__/__init__.py
|
bakkerthehacker/both
|
37bfdc41c97476cb74dced06570f5988356e4984
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import both.transforms # noqa
| 18.333333
| 30
| 0.618182
| 7
| 55
| 4.857143
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.181818
| 55
| 2
| 31
| 27.5
| 0.733333
| 0.472727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
22a7267d01098535985293eb7e3851a6c1914fba
| 125
|
py
|
Python
|
umdpfmt/parser/__init__.py
|
ScottWales/umdpfmt
|
0087932fc1d4efb5687631a364ae7bac8436c6a9
|
[
"Apache-2.0"
] | null | null | null |
umdpfmt/parser/__init__.py
|
ScottWales/umdpfmt
|
0087932fc1d4efb5687631a364ae7bac8436c6a9
|
[
"Apache-2.0"
] | null | null | null |
umdpfmt/parser/__init__.py
|
ScottWales/umdpfmt
|
0087932fc1d4efb5687631a364ae7bac8436c6a9
|
[
"Apache-2.0"
] | null | null | null |
from .FortranLexer import FortranLexer
from .FortranParser import FortranParser
from .FortranListener import FortranListener
| 31.25
| 44
| 0.88
| 12
| 125
| 9.166667
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096
| 125
| 3
| 45
| 41.666667
| 0.973451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
22e3d2bfd7aed8edcf0d759371e41fa3564a601f
| 5,736
|
py
|
Python
|
src/saltext/vmware/modules/license_mgr.py
|
jain-prerna/salt-ext-modules-vmware-1
|
0d697b1065dc78fb7c2d80b4cdd637a32a6274ef
|
[
"Apache-2.0"
] | 10
|
2021-11-02T20:24:44.000Z
|
2022-03-11T05:54:27.000Z
|
src/saltext/vmware/modules/license_mgr.py
|
jain-prerna/salt-ext-modules-vmware-1
|
0d697b1065dc78fb7c2d80b4cdd637a32a6274ef
|
[
"Apache-2.0"
] | 83
|
2021-10-01T15:13:02.000Z
|
2022-03-31T16:22:40.000Z
|
src/saltext/vmware/modules/license_mgr.py
|
jain-prerna/salt-ext-modules-vmware-1
|
0d697b1065dc78fb7c2d80b4cdd637a32a6274ef
|
[
"Apache-2.0"
] | 15
|
2021-09-30T23:17:27.000Z
|
2022-03-23T06:54:22.000Z
|
# Copyright 2021 VMware, Inc.
# SPDX-License-Identifier: Apache-2.0
import logging
# noreorder
import salt.exceptions
import saltext.vmware.utils.common as utils_common
import saltext.vmware.utils.license_mgr as utils_license_mgr
from saltext.vmware.utils.connect import get_service_instance
log = logging.getLogger(__name__)
try:
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
__virtualname__ = "vmware_license_mgr"
__proxyenabled__ = ["vmware_license_mgr"]
__func_alias__ = {"list_": "list"}
def __virtual__():
if not HAS_PYVMOMI:
return False, "Unable to import pyVmomi module."
return __virtualname__
def add(license_key, **kwargs):
"""
Add a license specified by license key to a Datacenter, Cluster, ESXI Server or vCenter
If no datacenter, cluster or ESXI Server is specified, it is assumed the operation is to be applied to a vCenter
license_key
License Key which specifies license to add to license manager
service_instance
Use this vCenter service connection instance instead of creating a new one [default None]
datacenter_name
Datacenter name to use for the operation [default None]
cluster_name
Name of the cluster to add license [default None]
esxi_hostname
Hostname of the ESXI Server to add license [default None]
CLI Example:
.. code-block:: bash
salt '*' vmware_license_mgr.add license_key=AAAAA-11111-AAAAA-11111-AAAAA datacenter_name=dc1
"""
ret = {}
service_instance = kwargs.get("service_instance", None)
datacenter_name = kwargs.get("datacenter_name", None)
cluster_name = kwargs.get("cluster_name", None)
esxi_hostname = kwargs.get("esxi_hostname", None)
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
if not utils_license_mgr.is_vcenter(service_instance):
ret["message"] = "Failed, not connected to a vCenter"
ret["result"] = False
return ret
try:
if __opts__.get("test", None):
ret["licenses"] = license_key
ret["message"] = "Test dry-run, not really connected to a vCenter testing"
return ret
result = utils_license_mgr.add_license(
service_instance, license_key, datacenter_name, cluster_name, esxi_hostname
)
if result:
ret["licenses"] = license_key
except (
salt.exceptions.VMwareApiError,
salt.exceptions.VMwareObjectRetrievalError,
salt.exceptions.VMwareRuntimeError,
salt.exceptions.CommandExecutionError,
) as exc:
log.exception(exc)
ret["message"] = f"Failed to add a license key due to Exception '{exc}'"
ret["result"] = False
return ret
if not result:
ret["message"] = f"Failed specified license key was not added to License Manager"
ret["result"] = False
return ret
def list_(service_instance=None):
"""
Returns a list of licenses for the specified Service Instance
service_instance
Use this vCenter service connection instance instead of creating a new one [default None]
.. code-block:: bash
salt '*' vmware_license_mgr.list
"""
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
return utils_license_mgr.list_licenses(service_instance)
def remove(license_key, **kwargs):
"""
Remove a license specified by license_key from a Datacenter, Cluster, ESXI Server or vCenter
If no datacenter, cluster or ESXI Server is specified, it is assumed the operation is to be applied to a vCenter
license_key
License Key which specifies license to remove from the license manager
service_instance
Use this vCenter service connection instance instead of creating a new one [default None]
datacenter_name
Datacenter name to use for the operation [default None]
cluster_name
Name of the cluster to add license [default None]
esxi_hostname
Hostname of the ESXI Server to add license [default None]
CLI Example:
.. code-block:: bash
salt '*' vmware_license_mgr.remove license_key=AAAAA-11111-AAAAA-11111-AAAAA
"""
ret = {}
service_instance = kwargs.get("service_instance", None)
datacenter_name = kwargs.get("datacenter_name", None)
cluster_name = kwargs.get("cluster_name", None)
esxi_hostname = kwargs.get("esxi_hostname", None)
if service_instance is None:
service_instance = get_service_instance(opts=__opts__, pillar=__pillar__)
if not utils_license_mgr.is_vcenter(service_instance):
ret["message"] = "Failed, not connected to a vCenter"
ret["result"] = False
return ret
try:
if __opts__.get("test", None):
ret["licenses"] = license_key
ret["message"] = "Test dry-run, not really connected to a vCenter testing"
return ret
result = utils_license_mgr.remove_license(
service_instance, license_key, datacenter_name, cluster_name, esxi_hostname
)
except (
salt.exceptions.VMwareApiError,
salt.exceptions.VMwareObjectRetrievalError,
salt.exceptions.VMwareRuntimeError,
salt.exceptions.CommandExecutionError,
) as exc:
log.exception(exc)
ret["message"] = f"Failed to remove license key due to Exception '{exc}'"
ret["result"] = False
return ret
if not result:
ret["message"] = f"Failed specified license key was not found in License Manager"
ret["result"] = False
return ret
| 30.83871
| 116
| 0.686715
| 721
| 5,736
| 5.248266
| 0.165049
| 0.095137
| 0.027748
| 0.031712
| 0.79334
| 0.79334
| 0.778013
| 0.731237
| 0.731237
| 0.731237
| 0
| 0.006177
| 0.237971
| 5,736
| 185
| 117
| 31.005405
| 0.859529
| 0.314331
| 0
| 0.673913
| 0
| 0
| 0.19111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.086957
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe1242b4c7a10132dd12f0915378f0d6ffae58e8
| 105
|
py
|
Python
|
tests/validation/__init__.py
|
EarthObservationSimulator/orbits
|
b476762532f8644b9fe0723760b5202ab77a4547
|
[
"Apache-2.0"
] | 4
|
2021-12-23T16:49:06.000Z
|
2022-02-09T21:36:31.000Z
|
tests/validation/__init__.py
|
EarthObservationSimulator/orbits
|
b476762532f8644b9fe0723760b5202ab77a4547
|
[
"Apache-2.0"
] | 15
|
2022-01-14T17:28:14.000Z
|
2022-02-11T02:39:25.000Z
|
tests/validation/__init__.py
|
EarthObservationSimulator/orbits
|
b476762532f8644b9fe0723760b5202ab77a4547
|
[
"Apache-2.0"
] | 1
|
2022-02-03T15:44:16.000Z
|
2022-02-03T15:44:16.000Z
|
"""Unit tests for orbitpy.preprocess module.
"""
import unittest
import json
import numpy
import sys, os
| 15
| 44
| 0.771429
| 15
| 105
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 105
| 7
| 45
| 15
| 0.9
| 0.390476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3bdf6098241ec8a94cd001f8b32ddad1638ef6b
| 69
|
py
|
Python
|
include/__init__.py
|
jeff-hykin/python-include
|
169b54aa081ded3dd499368aa4d7f8d35dacf303
|
[
"MIT"
] | null | null | null |
include/__init__.py
|
jeff-hykin/python-include
|
169b54aa081ded3dd499368aa4d7f8d35dacf303
|
[
"MIT"
] | null | null | null |
include/__init__.py
|
jeff-hykin/python-include
|
169b54aa081ded3dd499368aa4d7f8d35dacf303
|
[
"MIT"
] | 1
|
2021-08-06T21:53:35.000Z
|
2021-08-06T21:53:35.000Z
|
from __future__ import absolute_import
from include.include import *
| 23
| 38
| 0.855072
| 9
| 69
| 6
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 69
| 2
| 39
| 34.5
| 0.885246
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3bf406950f01fb4c19f8e9bbf65a5f185a28eda
| 177
|
py
|
Python
|
util/__init__.py
|
ishine/CVC
|
3fb04749367a2f9ef1451af37863b1122f92f59c
|
[
"MIT"
] | 41
|
2020-10-22T11:01:12.000Z
|
2022-01-28T18:06:20.000Z
|
util/__init__.py
|
ishine/CVC
|
3fb04749367a2f9ef1451af37863b1122f92f59c
|
[
"MIT"
] | 9
|
2020-11-04T18:21:19.000Z
|
2021-11-02T11:21:19.000Z
|
util/__init__.py
|
ishine/CVC
|
3fb04749367a2f9ef1451af37863b1122f92f59c
|
[
"MIT"
] | 10
|
2020-11-02T08:41:54.000Z
|
2022-03-13T03:59:06.000Z
|
"""This package includes a miscellaneous collection of useful helper functions."""
from util import *
from data import preprocess
from data.wav_folder import read_wav, write_wav
| 44.25
| 82
| 0.819209
| 26
| 177
| 5.461538
| 0.730769
| 0.112676
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124294
| 177
| 4
| 83
| 44.25
| 0.916129
| 0.429379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3f005a9254d97def327d00956a91e1ce747be52
| 205
|
py
|
Python
|
imageatm/scripts/__init__.py
|
vishalbelsare/imageatm
|
2da9d1902d375338e4363ebf95bcb8ac2bb47ac7
|
[
"Apache-2.0"
] | 215
|
2019-03-18T10:51:53.000Z
|
2022-01-28T20:04:32.000Z
|
imageatm/scripts/__init__.py
|
JadeBlue96/imageatm
|
d4c099164176e6ed7bf7095a82cb29fb301df2b5
|
[
"Apache-2.0"
] | 21
|
2019-03-19T09:13:20.000Z
|
2020-05-02T10:18:02.000Z
|
imageatm/scripts/__init__.py
|
vishalbelsare/imageatm
|
2da9d1902d375338e4363ebf95bcb8ac2bb47ac7
|
[
"Apache-2.0"
] | 54
|
2019-03-18T16:39:37.000Z
|
2022-03-31T10:16:48.000Z
|
from .run_dataprep import run_dataprep
from .run_training import run_training
from .run_training_cloud import run_training_cloud
from .run_evaluation import run_evaluation
from .run_cloud import run_cloud
| 34.166667
| 50
| 0.878049
| 32
| 205
| 5.25
| 0.21875
| 0.208333
| 0.178571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 205
| 5
| 51
| 41
| 0.908108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4300b5d1da022dc0970eac0feac38a9904667b58
| 1,658
|
py
|
Python
|
tests/panoramic/cli/test_errors.py
|
kubamahnert/panoramic-cli
|
036f45a05d39f5762088ce23dbe367b938192f79
|
[
"MIT"
] | 5
|
2020-11-13T17:26:59.000Z
|
2021-03-19T15:11:26.000Z
|
tests/panoramic/cli/test_errors.py
|
kubamahnert/panoramic-cli
|
036f45a05d39f5762088ce23dbe367b938192f79
|
[
"MIT"
] | 5
|
2020-10-28T10:22:35.000Z
|
2021-01-27T17:33:58.000Z
|
tests/panoramic/cli/test_errors.py
|
kubamahnert/panoramic-cli
|
036f45a05d39f5762088ce23dbe367b938192f79
|
[
"MIT"
] | 3
|
2021-01-26T07:58:03.000Z
|
2021-03-11T13:28:34.000Z
|
import pytest
from requests.exceptions import RequestException
from requests.models import Response
from panoramic.cli.errors import RefreshException
def test_no_response_request_exception():
with pytest.raises(
RefreshException, match='^Metadata could not be refreshed for table some_table in data connection some_source$'
):
e = RequestException('Failed to connect')
raise RefreshException('some_source', 'some_table').extract_request_id(e)
def test_no_headers_request_exception():
with pytest.raises(
RefreshException, match='^Metadata could not be refreshed for table some_table in data connection some_source$'
):
e = RequestException('Failed to connect', response={})
raise RefreshException('some_source', 'some_table').extract_request_id(e)
def test_no_request_id_request_exception():
with pytest.raises(
RefreshException, match='^Metadata could not be refreshed for table some_table in data connection some_source$'
):
e = RequestException('Failed to connect', response=Response())
raise RefreshException('some_source', 'some_table').extract_request_id(e)
def test_valid_request_id_request_exception():
with pytest.raises(
RefreshException,
match=r'^Metadata could not be refreshed for table some_table in data connection some_source \(RequestId\: some_request_id\)$',
):
response = Response()
response.headers['x-diesel-request-id'] = 'some_request_id'
e = RequestException('Failed to connect', response=response)
raise RefreshException('some_source', 'some_table').extract_request_id(e)
| 40.439024
| 135
| 0.736429
| 201
| 1,658
| 5.845771
| 0.223881
| 0.068936
| 0.042553
| 0.088511
| 0.79234
| 0.79234
| 0.79234
| 0.79234
| 0.79234
| 0.731915
| 0
| 0
| 0.17491
| 1,658
| 40
| 136
| 41.45
| 0.858918
| 0
| 0
| 0.483871
| 0
| 0
| 0.33655
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.129032
| false
| 0
| 0.129032
| 0
| 0.258065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4307acc45db9110c51a305783fbafbad72fc7bd9
| 230
|
py
|
Python
|
tellurium/teconverters/__init__.py
|
ShaikAsifullah/distributed-tellurium
|
007e9b3842b614edd34908c001119c6da1d41897
|
[
"Apache-2.0"
] | 1
|
2019-06-19T04:40:33.000Z
|
2019-06-19T04:40:33.000Z
|
tellurium/teconverters/__init__.py
|
ShaikAsifullah/distributed-tellurium
|
007e9b3842b614edd34908c001119c6da1d41897
|
[
"Apache-2.0"
] | null | null | null |
tellurium/teconverters/__init__.py
|
ShaikAsifullah/distributed-tellurium
|
007e9b3842b614edd34908c001119c6da1d41897
|
[
"Apache-2.0"
] | null | null | null |
# converts Antimony to/from SBML
from .convert_antimony import antimonyConverter
from .convert_omex import inlineOmexImporter
from .convert_phrasedml import phrasedmlImporter
from .inline_omex import inlineOmex, saveInlineOMEX
| 25.555556
| 51
| 0.86087
| 26
| 230
| 7.461538
| 0.576923
| 0.170103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 230
| 8
| 52
| 28.75
| 0.946341
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
432ee0c8bf29e8e1c752e96d0cf8f72cd1f7582d
| 285
|
py
|
Python
|
app/models.py
|
anubhavp28/Shortify
|
44dd6fde9b67d93efe80d990c06181edde2e841d
|
[
"MIT"
] | null | null | null |
app/models.py
|
anubhavp28/Shortify
|
44dd6fde9b67d93efe80d990c06181edde2e841d
|
[
"MIT"
] | null | null | null |
app/models.py
|
anubhavp28/Shortify
|
44dd6fde9b67d93efe80d990c06181edde2e841d
|
[
"MIT"
] | null | null | null |
from app import sqldb
class Urls(sqldb.Model):
id = sqldb.Column(sqldb.Integer, primary_key=True)
url = sqldb.Column(sqldb.String(500), nullable=False)
created_at = sqldb.Column(sqldb.TIMESTAMP, nullable=False)
shorturl = sqldb.Column(sqldb.String(50), nullable=False)
| 40.714286
| 62
| 0.740351
| 40
| 285
| 5.225
| 0.575
| 0.210526
| 0.30622
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020243
| 0.133333
| 285
| 7
| 63
| 40.714286
| 0.825911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
432f0047a3df2254ba9fd19159a3dbfa9ccb0af7
| 116
|
py
|
Python
|
mezger_farms/farm_core/admin.py
|
mezgerj/mezger_farms
|
6abeee6e64a293fa87e8cbc54c1f8946be83faa2
|
[
"MIT"
] | null | null | null |
mezger_farms/farm_core/admin.py
|
mezgerj/mezger_farms
|
6abeee6e64a293fa87e8cbc54c1f8946be83faa2
|
[
"MIT"
] | null | null | null |
mezger_farms/farm_core/admin.py
|
mezgerj/mezger_farms
|
6abeee6e64a293fa87e8cbc54c1f8946be83faa2
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import Farm
admin.site.register(Farm)
| 14.5
| 32
| 0.784483
| 17
| 116
| 5.352941
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146552
| 116
| 7
| 33
| 16.571429
| 0.919192
| 0.224138
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4a3493188c43925c13dc1b25036e01becaa813bb
| 121
|
py
|
Python
|
pygcgen/pygcgen_exceptions.py
|
topic2k/pygcgen
|
16aa7d8d11560eb5f6bb48ccfe15d7d8cf4c0f85
|
[
"MIT"
] | 2
|
2018-05-02T02:49:06.000Z
|
2018-12-15T08:16:15.000Z
|
pygcgen/pygcgen_exceptions.py
|
topic2k/pygcgen
|
16aa7d8d11560eb5f6bb48ccfe15d7d8cf4c0f85
|
[
"MIT"
] | 46
|
2016-08-11T19:17:04.000Z
|
2022-02-01T19:16:10.000Z
|
pygcgen/pygcgen_exceptions.py
|
topic2k/pygcgen
|
16aa7d8d11560eb5f6bb48ccfe15d7d8cf4c0f85
|
[
"MIT"
] | 2
|
2016-11-16T19:26:04.000Z
|
2017-04-20T08:32:58.000Z
|
# -*- coding: utf-8 -*-
class ChangelogGeneratorError(Exception):
pass
class GithubApiError(Exception):
pass
| 12.1
| 41
| 0.68595
| 11
| 121
| 7.545455
| 0.727273
| 0.313253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010204
| 0.190083
| 121
| 9
| 42
| 13.444444
| 0.836735
| 0.173554
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
4a37298c7373622ec7ba8eb14beb9aa1f621b92c
| 15,781
|
py
|
Python
|
molscore/scaffold_memory/ScaffoldFilter.py
|
MorganCThomas/MolScore
|
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
|
[
"MIT"
] | 28
|
2020-12-11T22:10:16.000Z
|
2022-02-25T05:00:51.000Z
|
molscore/scaffold_memory/ScaffoldFilter.py
|
MorganCThomas/MolScore
|
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
|
[
"MIT"
] | 3
|
2021-08-31T22:50:41.000Z
|
2021-11-04T15:41:01.000Z
|
molscore/scaffold_memory/ScaffoldFilter.py
|
MorganCThomas/MolScore
|
b12b7b5539bb3211982fc7a1b5938c0f383a05c0
|
[
"MIT"
] | 9
|
2021-03-03T12:10:10.000Z
|
2022-02-15T06:53:11.000Z
|
# coding=utf-8
"""
Adapted from
https://github.com/tblaschke/reinvent-memory
"""
import abc
import json
import logging
import numpy as np
import pandas as pd
from rdkit import Chem
from rdkit import DataStructs
from rdkit.Chem import AllChem
from rdkit.Chem.AtomPairs import Pairs
from rdkit.Chem.Scaffolds import MurckoScaffold
from molscore.scaffold_memory.ScaffoldMemory import ScaffoldMemory
class ScaffoldFilter(ScaffoldMemory):
def __init__(self, nbmax=25, minscore=0.6, generic=False, outputmode="binary"):
super(ScaffoldFilter, self).__init__()
self.nbmax = nbmax # number of smiles for one scaffold to score until the penalizer starts
self.minscore = minscore # only add smiles with a minimum score into the memory
self.generic = generic # store generic scaffolds or normal murcko scaffolds?
self._scaffoldfunc = self.getGenericScaffold if generic else self.getScaffold
self._outputmode = outputmode
@abc.abstractmethod
def score(self, smiles, scores_dict: dict) -> np.array:
raise NotImplemented
def validScores(self, smiles, scores) -> bool:
if not len(smiles) == len(scores):
logging.error("SMILES and score vector are not the same length. Do nothing")
logging.debug(smiles)
logging.debug(scores)
return False
else:
return True
def savetojson(self, file):
savedict = {'nbmax': self.nbmax, 'minscore': self.minscore, 'generic': self.generic,
"_scaffolds": self._scaffolds}
jsonstr = json.dumps(savedict, sort_keys=True, indent=4, separators=(',', ': '))
with open(file, 'w') as f:
f.write(jsonstr)
def savetocsv(self, file):
df = {"Cluster": [], "Scaffold": [], "SMILES": []}
for i, scaffold in enumerate(self._scaffolds):
for smi, score in self._scaffolds[scaffold].items():
df["Cluster"].append(i)
df["Scaffold"].append(scaffold)
df["SMILES"].append(smi)
for item in score.keys():
if item in df:
df[item].append(score[item])
else:
df[item] = [score[item]]
df = pd.DataFrame(df)
df.to_csv(file, index=False)
def _sigmoid(self, x, k=0.15):
# sigmoid function
# use k to adjust the slope
x = x*2 -1
s = 1 / (1 + np.exp(-x / k))
return s
def calculate_output(self, nb_in_bucket: int):
if nb_in_bucket == 0:
return 1
if nb_in_bucket > self.nbmax:
return 0
if nb_in_bucket <= self.nbmax:
frac = nb_in_bucket/self.nbmax
if self._outputmode == "sigmoid":
return 1 - self._sigmoid(frac)
elif self._outputmode == "linear":
return 1 - frac
else: #self._outputmode == "binary"
return 1
class ScaffoldMatcher(ScaffoldFilter):
def __init__(self, nbmax=25, minscore=0.6, generic=False, outputmode="binary"):
super().__init__(nbmax=nbmax, minscore=minscore, generic=generic, outputmode=outputmode)
def score(self, smiles, scores_dict: dict) -> np.array:
scores = scores_dict.pop("total_score")
if not self.validScores(smiles, scores): return scores
for i, smile in enumerate(smiles):
score = scores[i]
try:
scaffold = self._scaffoldfunc(smile)
except Exception:
scaffold = ''
scores[i] = 0
if self.has(scaffold, smile):
scores[i] = 0
elif score >= self.minscore:
save_score = {"total_score": float(score)}
for k in scores_dict:
save_score[k] = float(scores_dict[k][i])
self._update_memory([smile], [scaffold], [save_score])
scores[i] = scores[i] * self.calculate_output(len(self[scaffold]))
return scores
def savetojson(self, file):
savedict = {'nbmax': self.nbmax, 'minscore': self.minscore, 'generic': self.generic,
"_scaffolds": self._scaffolds}
jsonstr = json.dumps(savedict, sort_keys=True, indent=4, separators=(',', ': '))
with open(file, 'w') as f:
f.write(jsonstr)
class IdenticalMurckoScaffold(ScaffoldMatcher):
"""Penalizes compounds based on exact Murcko Scaffolds previously generated. 'minsimilarity' is ignored."""
def __init__(self, nbmax=25, minscore=0.6, outputmode="binary", **kwargs):
"""
:param nbmax: Maximum number of molecules per memory bin (cluster)
:param minscore: Minimum molecule score required to consider for memory binning
:param outputmode: 'binary' (1 or 0), 'linear' (1 - fraction of bin) or 'sigmoid' (1 - sigmoid(fraction of bin))
:param kwargs:
"""
super().__init__(nbmax=nbmax, minscore=minscore, generic=False, outputmode=outputmode)
class IdenticalTopologicalScaffold(ScaffoldMatcher):
"""Penalizes compounds based on exact Topological Scaffolds previously generated. 'minsimilarity' is ignored."""
def __init__(self, nbmax=25, minscore=0.6, outputmode="binary", **kwargs):
"""
:param nbmax: Maximum number of molecules per memory bin (cluster)
:param minscore: Minimum molecule score required to consider for memory binning
:param outputmode: 'binary' (1 or 0), 'linear' (1 - fraction of bin) or 'sigmoid' (1 - sigmoid(fraction of bin))
:param kwargs:
"""
super().__init__(nbmax=nbmax, minscore=minscore, generic=True, outputmode=outputmode)
class CompoundSimilarity(ScaffoldFilter):
"""Penalizes compounds based on the ECFP or FCFP Tanimoto similarity to previously generated compounds."""
def __init__(self, nbmax=25, minscore=0.6, minsimilarity=0.6, radius=2, useFeatures=False,
bits=2048, outputmode="binary", **kwargs):
"""
:param nbmax: Maximum number of molecules per memory bin (cluster)
:param minscore: Minimum molecule score required to consider for memory binning
:param minsimilarity: Minimum similarity to centroid molecule in bin
:param radius: Morgan fingerprint radius (e.g., 2 = ECFP4)
:param useFeatures: Include feature information in fingerprint
:param bits: Length of fingerprint (i.e., number of folded bits)
:param outputmode: 'binary' (1 or 0), 'linear' (1 - fraction of bin) or 'sigmoid' (1 - sigmoid(fraction of bin))
:param kwargs:
"""
super().__init__(nbmax=nbmax, minscore=minscore, generic=False, outputmode=outputmode)
self.minsimilarity = minsimilarity
self.radius = radius
self.useFeatures = useFeatures
self.bits = bits
def score(self, smiles, scores_dict: dict) -> np.array:
scores = scores_dict.pop("total_score")
if not self.validScores(smiles, scores): return scores
for i, smile in enumerate(smiles):
score = scores[i]
if score >= self.minscore:
cluster, fingerprint, isnewcluster = self.findCluster(smile)
if self.has(cluster, smile):
scores[i] = 0
continue
save_score = {"total_score": float(score)}
for k in scores_dict:
save_score[k] = float(scores_dict[k][i])
if isnewcluster:
self._update_memory([smile], [cluster], [save_score], [fingerprint])
else:
self._update_memory([smile], [cluster], [save_score])
scores[i] = scores[i] * self.calculate_output(len(self[cluster]))
return scores
def findCluster(self, smiles):
mol = Chem.MolFromSmiles(smiles)
if not mol:
return "", "", False
if self.bits > 0:
fp = AllChem.GetMorganFingerprintAsBitVect(mol, self.radius, nBits=self.bits, useFeatures=self.useFeatures)
else:
fp = AllChem.GetMorganFingerprint(mol, self.radius, useFeatures=self.useFeatures)
if smiles in self.getFingerprints():
return smiles, fp, False
fps = list(self.getFingerprints().values())
sims = DataStructs.BulkTanimotoSimilarity(fp, fps)
if len(sims) == 0:
return smiles, fp, True
closest = np.argmax(sims)
if sims[closest] >= self.minsimilarity:
return list(self.getFingerprints().keys())[closest], fp, False
else:
return smiles, fp, True
class ScaffoldSimilarityAtomPair(CompoundSimilarity):
"""Penalizes compounds based on atom pair Tanimoto similarity to previously generated Murcko Scaffolds."""
def __init__(self, nbmax=25, minscore=0.6, minsimilarity=0.6, outputmode="binary", **kwargs):
"""
:param nbmax: Maximum number of molecules per memory bin (cluster)
:param minscore: Minimum molecule score required to consider for memory binning
:param minsimilarity: Minimum similarity to centroid molecule in bin
:param outputmode: 'binary' (1 or 0), 'linear' (1 - fraction of bin) or 'sigmoid' (1 - sigmoid(fraction of bin))
:param kwargs:
"""
super().__init__(nbmax=nbmax, minscore=minscore, minsimilarity=minsimilarity, outputmode=outputmode)
def score(self, smiles, scores_dict: dict) -> np.array:
scores = scores_dict.pop("total_score")
if not self.validScores(smiles, scores): return scores
for i, smile in enumerate(smiles):
score = scores[i]
if score >= self.minscore:
cluster, fingerprint, isnewcluster = self.findCluster(smile)
if self.has(cluster, smile):
scores[i] = 0
continue
save_score = {"total_score": float(score)}
for k in scores_dict:
save_score[k] = float(scores_dict[k][i])
if isnewcluster:
self._update_memory([smile], [cluster], [save_score], [fingerprint])
else:
self._update_memory([smile], [cluster], [save_score])
scores[i] = scores[i] * self.calculate_output(len(self[cluster]))
return scores
def findCluster(self, smiles):
mol = Chem.MolFromSmiles(smiles)
if mol:
try:
scaffold = MurckoScaffold.GetScaffoldForMol(mol)
except:
return "", "", False
if scaffold:
cluster = Chem.MolToSmiles(scaffold, isomericSmiles=False)
else:
return "", "", False
else:
return "", "", False
fp = Pairs.GetAtomPairFingerprint(scaffold) # Change to Tanimoto?
if cluster in self.getFingerprints():
return cluster, fp, False
fps = list(self.getFingerprints().values())
sims = DataStructs.BulkTanimotoSimilarity(fp, fps)
if len(sims) == 0:
return cluster, fp, True
closest = np.argmax(sims)
if sims[closest] >= self.minsimilarity:
return list(self.getFingerprints().keys())[closest], fp, False
else:
return cluster, fp, True
class ScaffoldSimilarityECFP(CompoundSimilarity):
"""Penalizes compounds based on atom pair Tanimoto similarity to previously generated Murcko Scaffolds."""
def __init__(self, nbmax=25, minscore=0.8, minsimilarity=0.8, radius=2, useFeatures=False,
bits=1024, outputmode="binary", **kwargs):
"""
:param nbmax: Maximum number of molecules per memory bin (cluster)
:param minscore: Minimum molecule score required to consider for memory binning
:param minsimilarity: Minimum similarity to centroid molecule in bin
:param radius: Morgan fingerprint radius (e.g., 2 = ECFP4)
:param useFeatures: Include feature information in fingerprint
:param bits: Length of fingerprint (i.e., number of folded bits)
:param outputmode: 'binary' (1 or 0), 'linear' (1 - fraction of bin) or 'sigmoid' (1 - sigmoid(fraction of bin))
:param kwargs:
"""
super().__init__(nbmax=nbmax, minscore=minscore, minsimilarity=minsimilarity, outputmode=outputmode)
self.radius = radius
self.useFeatures = useFeatures
self.bits = bits
def score(self, smiles, scores_dict: dict) -> np.array:
scores = scores_dict.pop("total_score")
if not self.validScores(smiles, scores): return scores
for i, smile in enumerate(smiles):
score = scores[i]
if score >= self.minscore:
cluster, fingerprint, isnewcluster = self.findCluster(smile)
if self.has(cluster, smile):
scores[i] = 0
continue
save_score = {"total_score": float(score)}
for k in scores_dict:
save_score[k] = float(scores_dict[k][i])
if isnewcluster:
self._update_memory([smile], [cluster], [save_score], [fingerprint])
else:
self._update_memory([smile], [cluster], [save_score])
scores[i] = scores[i] * self.calculate_output(len(self[cluster]))
return scores
def findCluster(self, smiles):
mol = Chem.MolFromSmiles(smiles)
if mol:
try:
scaffold = MurckoScaffold.GetScaffoldForMol(mol)
except:
return "", "", False
if scaffold:
cluster = Chem.MolToSmiles(scaffold, isomericSmiles=False)
else:
return "", "", False
else:
return "", "", False
if self.bits > 0:
fp = AllChem.GetMorganFingerprintAsBitVect(scaffold, self.radius, nBits=self.bits,
useFeatures=self.useFeatures)
else:
fp = AllChem.GetMorganFingerprint(scaffold, self.radius, useFeatures=self.useFeatures)
if smiles in self.getFingerprints():
return smiles, fp, False
fps = list(self.getFingerprints().values())
sims = DataStructs.BulkTanimotoSimilarity(fp, fps)
if len(sims) == 0:
return cluster, fp, True
closest = np.argmax(sims)
if sims[closest] >= self.minsimilarity:
return list(self.getFingerprints().keys())[closest], fp, False
else:
return cluster, fp, True
class NoScaffoldFilter(ScaffoldFilter):
"""Don't penalize compounds. Only save them with more than 'minscore'. All other arguments are ignored."""
def __init__(self, minscore=0.6, minsimilarity=0.6, nbmax=25, outputmode="binary"):
super().__init__(minscore=minscore)
def score(self, smiles, scores_dict: dict) -> np.array:
"""
we only log the compounds
"""
scores = scores_dict.pop("total_score")
if not self.validScores(smiles, scores): return scores
for i, smile in enumerate(smiles):
score = scores[i]
try:
scaffold = self._scaffoldfunc(smile)
except Exception:
scaffold = ''
if score >= self.minscore:
save_score = {"total_score": float(score)}
for k in scores_dict:
save_score[k] = float(scores_dict[k][i])
self._update_memory([smile], [scaffold], [save_score])
return scores
| 41.528947
| 120
| 0.598505
| 1,722
| 15,781
| 5.390244
| 0.135308
| 0.022624
| 0.014006
| 0.0181
| 0.767722
| 0.755656
| 0.738742
| 0.734755
| 0.734755
| 0.714609
| 0
| 0.009019
| 0.297383
| 15,781
| 379
| 121
| 41.638522
| 0.828102
| 0.185793
| 0
| 0.689139
| 0
| 0
| 0.027257
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086142
| false
| 0
| 0.041199
| 0
| 0.277154
| 0.074906
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a8d07628f93bbb159f3c2317b02aa4d791d4f72
| 179
|
py
|
Python
|
pyatdllib/core/errors.py
|
lisagorewitdecker/immaculater
|
fe46d282ae1d6325d67ebcf8f2b3d3b95580d5e7
|
[
"Apache-2.0"
] | null | null | null |
pyatdllib/core/errors.py
|
lisagorewitdecker/immaculater
|
fe46d282ae1d6325d67ebcf8f2b3d3b95580d5e7
|
[
"Apache-2.0"
] | null | null | null |
pyatdllib/core/errors.py
|
lisagorewitdecker/immaculater
|
fe46d282ae1d6325d67ebcf8f2b3d3b95580d5e7
|
[
"Apache-2.0"
] | null | null | null |
"""Defines DataError, an exception."""
class Error(Exception):
pass
class DataError(Error):
"""A cousin of ValueError -- the data passed break fundamental constraints."""
| 17.9
| 80
| 0.715084
| 21
| 179
| 6.095238
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156425
| 179
| 9
| 81
| 19.888889
| 0.847682
| 0.586592
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
4aae2caee126f31a5602eaa3a758c5da9920c512
| 169
|
py
|
Python
|
reinforcement_learning/rl_network_compression_ray_custom/src/tensorflow_resnet/compressor/__init__.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 2,610
|
2020-10-01T14:14:53.000Z
|
2022-03-31T18:02:31.000Z
|
reinforcement_learning/rl_network_compression_ray_custom/src/tensorflow_resnet/compressor/__init__.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 1,959
|
2020-09-30T20:22:42.000Z
|
2022-03-31T23:58:37.000Z
|
reinforcement_learning/rl_network_compression_ray_custom/src/tensorflow_resnet/compressor/__init__.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 2,052
|
2020-09-30T22:11:46.000Z
|
2022-03-31T23:02:51.000Z
|
from .core import ModeKeys, Module
from .resnet import ResNet18Model
from .train import tensorflow_train
__all__ = ["Module", "ModeKeys", "ResNet", "tensorflow_train"]
| 28.166667
| 62
| 0.775148
| 20
| 169
| 6.25
| 0.5
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013423
| 0.118343
| 169
| 5
| 63
| 33.8
| 0.825503
| 0
| 0
| 0
| 0
| 0
| 0.213018
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4359d9d4ed08cc70a4f53985597335e066f9151e
| 154
|
py
|
Python
|
faker/providers/company/tl_PH/__init__.py
|
jacksmith15/faker
|
bc5dda1983e4d055aa2698ccf0806a462cb8370e
|
[
"MIT"
] | 1
|
2022-02-16T23:14:19.000Z
|
2022-02-16T23:14:19.000Z
|
faker/providers/company/tl_PH/__init__.py
|
jacksmith15/faker
|
bc5dda1983e4d055aa2698ccf0806a462cb8370e
|
[
"MIT"
] | 33
|
2020-12-09T16:49:15.000Z
|
2022-01-04T22:03:10.000Z
|
faker/providers/company/tl_PH/__init__.py
|
jacksmith15/faker
|
bc5dda1983e4d055aa2698ccf0806a462cb8370e
|
[
"MIT"
] | 3
|
2022-02-07T18:18:54.000Z
|
2022-03-11T22:09:01.000Z
|
from ..fil_PH import Provider as FilPhProvider
class Provider(FilPhProvider):
"""No difference from Company Provider for fil_PH locale"""
pass
| 19.25
| 63
| 0.746753
| 20
| 154
| 5.65
| 0.7
| 0.088496
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 154
| 7
| 64
| 22
| 0.896825
| 0.344156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
435e25a969957bec4dae3e481d9f54e85d7d96f1
| 135
|
py
|
Python
|
src/api/pdi/application/dashboard/GetConnectionWidget/GetConnectionWidgetRequest.py
|
ahmetcagriakca/pythondataintegrator
|
079b968d6c893008f02c88dbe34909a228ac1c7b
|
[
"MIT"
] | 1
|
2020-12-18T21:37:28.000Z
|
2020-12-18T21:37:28.000Z
|
src/api/pdi/application/dashboard/GetConnectionWidget/GetConnectionWidgetRequest.py
|
ahmetcagriakca/pythondataintegrator
|
079b968d6c893008f02c88dbe34909a228ac1c7b
|
[
"MIT"
] | null | null | null |
src/api/pdi/application/dashboard/GetConnectionWidget/GetConnectionWidgetRequest.py
|
ahmetcagriakca/pythondataintegrator
|
079b968d6c893008f02c88dbe34909a228ac1c7b
|
[
"MIT"
] | 1
|
2020-12-18T21:37:31.000Z
|
2020-12-18T21:37:31.000Z
|
from pdip.cqrs.decorators import requestclass
@requestclass
class GetConnectionWidgetRequest:
# TODO:Request attributes
pass
| 16.875
| 45
| 0.8
| 13
| 135
| 8.307692
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155556
| 135
| 7
| 46
| 19.285714
| 0.947368
| 0.17037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0.25
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
435fa4c0f1c359aa744dd7512a1f71f16d3decb1
| 39
|
py
|
Python
|
web_app/vis_comm/routes/python_scripts/delete_json.py
|
RapidsAtHKUST/FYP16-CommunityDetectionVis
|
4b7c76f6f8f94d09ae4ab98262b894dfd6af3bc0
|
[
"MIT"
] | null | null | null |
web_app/vis_comm/routes/python_scripts/delete_json.py
|
RapidsAtHKUST/FYP16-CommunityDetectionVis
|
4b7c76f6f8f94d09ae4ab98262b894dfd6af3bc0
|
[
"MIT"
] | null | null | null |
web_app/vis_comm/routes/python_scripts/delete_json.py
|
RapidsAtHKUST/FYP16-CommunityDetectionVis
|
4b7c76f6f8f94d09ae4ab98262b894dfd6af3bc0
|
[
"MIT"
] | 1
|
2021-12-02T10:34:23.000Z
|
2021-12-02T10:34:23.000Z
|
import os
os.system("rm result.json")
| 9.75
| 27
| 0.717949
| 7
| 39
| 4
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 39
| 3
| 28
| 13
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0.358974
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
43855e7fb0c7a9c51c0eb41182f7a76699acd8d8
| 211
|
py
|
Python
|
dusty/systems/docker/common.py
|
gamechanger/dusty
|
dd9778e3a4f0c623209e53e98aa9dc1fe76fc309
|
[
"MIT"
] | 421
|
2015-06-02T16:29:59.000Z
|
2021-06-03T18:44:42.000Z
|
dusty/systems/docker/common.py
|
gamechanger/dusty
|
dd9778e3a4f0c623209e53e98aa9dc1fe76fc309
|
[
"MIT"
] | 404
|
2015-06-02T20:23:42.000Z
|
2019-08-21T16:59:41.000Z
|
dusty/systems/docker/common.py
|
gamechanger/dusty
|
dd9778e3a4f0c623209e53e98aa9dc1fe76fc309
|
[
"MIT"
] | 16
|
2015-06-16T17:21:02.000Z
|
2020-03-27T02:27:09.000Z
|
def spec_for_service(app_or_lib_name, expanded_specs):
if app_or_lib_name in expanded_specs['apps']:
return expanded_specs['apps'][app_or_lib_name]
return expanded_specs['libs'][app_or_lib_name]
| 42.2
| 54
| 0.772512
| 35
| 211
| 4.142857
| 0.428571
| 0.137931
| 0.22069
| 0.331034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127962
| 211
| 4
| 55
| 52.75
| 0.788043
| 0
| 0
| 0
| 0
| 0
| 0.056872
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
43b0c2957366beec2f5956d8a628c002ce8d6bb4
| 40
|
py
|
Python
|
tests/components/tonym/__init__.py
|
tonymyatt/core
|
ba9ef1b3b97fd6519c2440b101e968f982f1ab3d
|
[
"Apache-2.0"
] | null | null | null |
tests/components/tonym/__init__.py
|
tonymyatt/core
|
ba9ef1b3b97fd6519c2440b101e968f982f1ab3d
|
[
"Apache-2.0"
] | null | null | null |
tests/components/tonym/__init__.py
|
tonymyatt/core
|
ba9ef1b3b97fd6519c2440b101e968f982f1ab3d
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for the Tony M integration."""
| 20
| 39
| 0.675
| 6
| 40
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 40
| 1
| 40
| 40
| 0.794118
| 0.825
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
43cabc0a0fcd2422fa71a2ce65a705f9cb9bd210
| 46
|
py
|
Python
|
power_perceiver/exceptions.py
|
openclimatefix/power_perceiver
|
bafcdfaf6abf42fbab09da641479f74709ddd395
|
[
"MIT"
] | null | null | null |
power_perceiver/exceptions.py
|
openclimatefix/power_perceiver
|
bafcdfaf6abf42fbab09da641479f74709ddd395
|
[
"MIT"
] | 33
|
2022-02-16T07:51:41.000Z
|
2022-03-31T11:24:11.000Z
|
power_perceiver/exceptions.py
|
openclimatefix/power_perceiver
|
bafcdfaf6abf42fbab09da641479f74709ddd395
|
[
"MIT"
] | null | null | null |
class NoPVSystemsInSlice(Exception):
pass
| 15.333333
| 36
| 0.782609
| 4
| 46
| 9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 37
| 23
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
43dd90551ffd60e9e1b29bcda483e16f53e64594
| 188
|
py
|
Python
|
graphql_compiler/api/sql/postgres.py
|
kensho-technologies/graphql-compiler
|
4318443b7b2512a059f3616112bfc40bbf8eec06
|
[
"Apache-2.0"
] | 521
|
2017-07-18T23:56:25.000Z
|
2022-03-25T16:39:06.000Z
|
graphql_compiler/api/sql/postgres.py
|
kensho-technologies/graphql-compiler
|
4318443b7b2512a059f3616112bfc40bbf8eec06
|
[
"Apache-2.0"
] | 740
|
2017-07-19T01:52:42.000Z
|
2021-09-30T11:15:00.000Z
|
graphql_compiler/api/sql/postgres.py
|
kensho-technologies/graphql-compiler
|
4318443b7b2512a059f3616112bfc40bbf8eec06
|
[
"Apache-2.0"
] | 56
|
2017-07-18T23:56:14.000Z
|
2021-10-30T08:08:56.000Z
|
# Copyright 2019-present Kensho Technologies, LLC.
# pylint: disable=unused-import
from ...schema.schema_info import create_postgresql_schema_info # noqa
# pylint: enable=unused-import
| 26.857143
| 71
| 0.797872
| 24
| 188
| 6.083333
| 0.708333
| 0.164384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023952
| 0.111702
| 188
| 6
| 72
| 31.333333
| 0.850299
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
78d7c3bd3f56413c693c2cc4a6500bc74a373d12
| 97
|
py
|
Python
|
openpose/__init__.py
|
YIN95/OpenPose-PyTorch
|
c40f3d2767346bc96e707eb237c72a27541b89c5
|
[
"MIT"
] | 1
|
2019-08-15T11:47:58.000Z
|
2019-08-15T11:47:58.000Z
|
openpose/__init__.py
|
YIN95/OpenPose-PyTorch
|
c40f3d2767346bc96e707eb237c72a27541b89c5
|
[
"MIT"
] | null | null | null |
openpose/__init__.py
|
YIN95/OpenPose-PyTorch
|
c40f3d2767346bc96e707eb237c72a27541b89c5
|
[
"MIT"
] | null | null | null |
from . import networks
from . import estimations
from . import dataloader
__version__ = '0.0.1'
| 16.166667
| 25
| 0.752577
| 13
| 97
| 5.307692
| 0.615385
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.164948
| 97
| 5
| 26
| 19.4
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0.052083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
78f65ab648fd406fc5c8051db9cbb9cba5e5f257
| 40
|
py
|
Python
|
autoit/autoit_demo.py
|
norstc/little
|
6ba936031eac825db70f4fb5c6601b4908423796
|
[
"Apache-2.0"
] | null | null | null |
autoit/autoit_demo.py
|
norstc/little
|
6ba936031eac825db70f4fb5c6601b4908423796
|
[
"Apache-2.0"
] | null | null | null |
autoit/autoit_demo.py
|
norstc/little
|
6ba936031eac825db70f4fb5c6601b4908423796
|
[
"Apache-2.0"
] | null | null | null |
import autoit
autoit.run("notepad.exe")
| 13.333333
| 25
| 0.775
| 6
| 40
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 3
| 25
| 13.333333
| 0.837838
| 0
| 0
| 0
| 0
| 0
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
78f90a24b5a10894f408e22307c4cf9fe635eaad
| 128
|
py
|
Python
|
sentry/boot.py
|
DeveloperMetal/frappe-sentry
|
6947f2d04fd76e4afc86bc093edf9d14eb6f8f63
|
[
"MIT"
] | null | null | null |
sentry/boot.py
|
DeveloperMetal/frappe-sentry
|
6947f2d04fd76e4afc86bc093edf9d14eb6f8f63
|
[
"MIT"
] | null | null | null |
sentry/boot.py
|
DeveloperMetal/frappe-sentry
|
6947f2d04fd76e4afc86bc093edf9d14eb6f8f63
|
[
"MIT"
] | 1
|
2020-06-07T08:35:13.000Z
|
2020-06-07T08:35:13.000Z
|
import frappe
def boot_session(bootinfo):
bootinfo.sentry_dsn = frappe.db.get_single_value("Sentry Settings", "sentry_dsn")
| 32
| 85
| 0.789063
| 18
| 128
| 5.333333
| 0.722222
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101563
| 128
| 4
| 85
| 32
| 0.834783
| 0
| 0
| 0
| 0
| 0
| 0.193798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6013d0d6694b36f3bd396a3a372a406b2617192b
| 158
|
py
|
Python
|
solutions/iterate_icao24_callsign.py
|
lbasora/back2python
|
76e825049579ebf52e3b3e88003a1c2f53d2062b
|
[
"MIT"
] | 1
|
2018-09-02T19:33:08.000Z
|
2018-09-02T19:33:08.000Z
|
solutions/iterate_icao24_callsign.py
|
lbasora/back2python
|
76e825049579ebf52e3b3e88003a1c2f53d2062b
|
[
"MIT"
] | 1
|
2021-09-11T09:24:25.000Z
|
2021-09-12T19:53:16.000Z
|
solutions/iterate_icao24_callsign.py
|
lbasora/back2python
|
76e825049579ebf52e3b3e88003a1c2f53d2062b
|
[
"MIT"
] | 6
|
2019-08-28T09:23:01.000Z
|
2020-09-14T07:34:06.000Z
|
def iterate_icao24_callsign(data):
for _, chunk in data.groupby(["icao24", "callsign"]):
yield chunk
sum(1 for _ in iterate_icao24_callsign(df))
| 26.333333
| 57
| 0.702532
| 22
| 158
| 4.772727
| 0.590909
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053846
| 0.177215
| 158
| 5
| 58
| 31.6
| 0.753846
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
6024283246f64bdd4ce36de0519c4fe272572f28
| 103
|
py
|
Python
|
app/__init__.py
|
morganelle/flask-test
|
16a75090f737ac7dc95a81d35a7426a483b31b1b
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
morganelle/flask-test
|
16a75090f737ac7dc95a81d35a7426a483b31b1b
|
[
"MIT"
] | null | null | null |
app/__init__.py
|
morganelle/flask-test
|
16a75090f737ac7dc95a81d35a7426a483b31b1b
|
[
"MIT"
] | null | null | null |
"""Instantiates Flask app obj."""
from flask import Flask
app = Flask(__name__)
from app import views
| 17.166667
| 33
| 0.747573
| 15
| 103
| 4.866667
| 0.533333
| 0.219178
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15534
| 103
| 5
| 34
| 20.6
| 0.83908
| 0.262136
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
6036fe0923d8e97af4a35366f54889bca0ee69e4
| 88
|
py
|
Python
|
musica/admin.py
|
LucasSantus/music
|
f75396c380837933e8f16167a11eb064185d1e7c
|
[
"MIT"
] | null | null | null |
musica/admin.py
|
LucasSantus/music
|
f75396c380837933e8f16167a11eb064185d1e7c
|
[
"MIT"
] | null | null | null |
musica/admin.py
|
LucasSantus/music
|
f75396c380837933e8f16167a11eb064185d1e7c
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Musica
admin.site.register(Musica)
| 22
| 32
| 0.829545
| 13
| 88
| 5.615385
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102273
| 88
| 4
| 33
| 22
| 0.924051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
608e8ae371acc1cf333af5cb3d71174a8e311a03
| 26,972
|
py
|
Python
|
tests/plotting/testplotutils.py
|
lelatbones/holoviews
|
c94deaaf6250821ff30b764893e598ce24be98c0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/plotting/testplotutils.py
|
lelatbones/holoviews
|
c94deaaf6250821ff30b764893e598ce24be98c0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/plotting/testplotutils.py
|
lelatbones/holoviews
|
c94deaaf6250821ff30b764893e598ce24be98c0
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import, unicode_literals
from unittest import SkipTest
from nose.plugins.attrib import attr
import numpy as np
from holoviews import NdOverlay, Overlay, Dimension
from holoviews.core.spaces import DynamicMap, HoloMap
from holoviews.core.options import Store, Cycle
from holoviews.element.comparison import ComparisonTestCase
from holoviews.element import (Image, Scatter, Curve, Points,
Area, VectorField, HLine, Path)
from holoviews.operation import operation
from holoviews.plotting.util import (
compute_overlayable_zorders, get_min_distance, process_cmap,
initialize_dynamic, split_dmap_overlay, _get_min_distance_numpy,
bokeh_palette_to_palette, mplcmap_to_palette, color_intervals)
from holoviews.streams import PointerX
try:
from holoviews.plotting.bokeh import util
bokeh_renderer = Store.renderers['bokeh']
except:
bokeh_renderer = None
class TestOverlayableZorders(ComparisonTestCase):
def test_compute_overlayable_zorders_holomap(self):
hmap = HoloMap({0: Points([])})
sources = compute_overlayable_zorders(hmap)
self.assertEqual(sources[0], [hmap, hmap.last])
def test_compute_overlayable_zorders_with_overlaid_holomap(self):
points = Points([])
hmap = HoloMap({0: points})
curve = Curve([])
combined = hmap*curve
sources = compute_overlayable_zorders(combined)
self.assertEqual(sources[0], [points, combined.last, combined])
def test_dynamic_compute_overlayable_zorders_two_mixed_layers(self):
area = Area(range(10))
dmap = DynamicMap(lambda: Curve(range(10)), kdims=[])
combined = area*dmap
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertEqual(sources[0], [area])
self.assertEqual(sources[1], [dmap])
def test_dynamic_compute_overlayable_zorders_two_mixed_layers_reverse(self):
area = Area(range(10))
dmap = DynamicMap(lambda: Curve(range(10)), kdims=[])
combined = dmap*area
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertEqual(sources[0], [dmap])
self.assertEqual(sources[1], [area])
def test_dynamic_compute_overlayable_zorders_two_dynamic_layers(self):
area = DynamicMap(lambda: Area(range(10)), kdims=[])
dmap = DynamicMap(lambda: Curve(range(10)), kdims=[])
combined = area*dmap
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertEqual(sources[0], [area])
self.assertEqual(sources[1], [dmap])
def test_dynamic_compute_overlayable_zorders_two_deep_dynamic_layers(self):
area = DynamicMap(lambda: Area(range(10)), kdims=[])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
area_redim = area.redim(x='x2')
curve_redim = curve.redim(x='x2')
combined = area_redim*curve_redim
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertIn(area_redim, sources[0])
self.assertIn(area, sources[0])
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertIn(curve_redim, sources[1])
self.assertIn(curve, sources[1])
self.assertNotIn(area_redim, sources[1])
self.assertNotIn(area, sources[1])
def test_dynamic_compute_overlayable_zorders_three_deep_dynamic_layers(self):
area = DynamicMap(lambda: Area(range(10)), kdims=[])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve2 = DynamicMap(lambda: Curve(range(10)), kdims=[])
area_redim = area.redim(x='x2')
curve_redim = curve.redim(x='x2')
curve2_redim = curve2.redim(x='x3')
combined = area_redim*curve_redim
combined1 = (combined*curve2_redim)
combined1[()]
sources = compute_overlayable_zorders(combined1)
self.assertIn(area_redim, sources[0])
self.assertIn(area, sources[0])
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertNotIn(curve2_redim, sources[0])
self.assertNotIn(curve2, sources[0])
self.assertIn(curve_redim, sources[1])
self.assertIn(curve, sources[1])
self.assertNotIn(area_redim, sources[1])
self.assertNotIn(area, sources[1])
self.assertNotIn(curve2_redim, sources[1])
self.assertNotIn(curve2, sources[1])
self.assertIn(curve2_redim, sources[2])
self.assertIn(curve2, sources[2])
self.assertNotIn(area_redim, sources[2])
self.assertNotIn(area, sources[2])
self.assertNotIn(curve_redim, sources[2])
self.assertNotIn(curve, sources[2])
def test_dynamic_compute_overlayable_zorders_three_deep_dynamic_layers_cloned(self):
area = DynamicMap(lambda: Area(range(10)), kdims=[])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve2 = DynamicMap(lambda: Curve(range(10)), kdims=[])
area_redim = area.redim(x='x2')
curve_redim = curve.redim(x='x2')
curve2_redim = curve2.redim(x='x3')
combined = area_redim*curve_redim
combined1 = (combined*curve2_redim).redim(y='y2')
combined1[()]
sources = compute_overlayable_zorders(combined1)
self.assertIn(area_redim, sources[0])
self.assertIn(area, sources[0])
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertNotIn(curve2_redim, sources[0])
self.assertNotIn(curve2, sources[0])
self.assertIn(curve_redim, sources[1])
self.assertIn(curve, sources[1])
self.assertNotIn(area_redim, sources[1])
self.assertNotIn(area, sources[1])
self.assertNotIn(curve2_redim, sources[1])
self.assertNotIn(curve2, sources[1])
self.assertIn(curve2_redim, sources[2])
self.assertIn(curve2, sources[2])
self.assertNotIn(area_redim, sources[2])
self.assertNotIn(area, sources[2])
self.assertNotIn(curve_redim, sources[2])
self.assertNotIn(curve, sources[2])
def test_dynamic_compute_overlayable_zorders_mixed_dynamic_and_non_dynamic_overlays_reverse(self):
area1 = Area(range(10))
area2 = Area(range(10))
overlay = area1 * area2
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve_redim = curve.redim(x='x2')
combined = curve_redim*overlay
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertIn(curve_redim, sources[0])
self.assertIn(curve, sources[0])
self.assertNotIn(overlay, sources[0])
self.assertIn(area1, sources[1])
self.assertIn(overlay, sources[1])
self.assertNotIn(curve_redim, sources[1])
self.assertNotIn(curve, sources[1])
self.assertIn(area2, sources[2])
self.assertIn(overlay, sources[2])
self.assertNotIn(curve_redim, sources[2])
self.assertNotIn(curve, sources[2])
def test_dynamic_compute_overlayable_zorders_mixed_dynamic_and_non_dynamic_ndoverlays(self):
ndoverlay = NdOverlay({i: Area(range(10+i)) for i in range(2)})
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve_redim = curve.redim(x='x2')
combined = ndoverlay*curve_redim
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertIn(ndoverlay[0], sources[0])
self.assertIn(ndoverlay, sources[0])
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertIn(ndoverlay[1], sources[1])
self.assertIn(ndoverlay, sources[1])
self.assertNotIn(curve_redim, sources[1])
self.assertNotIn(curve, sources[1])
self.assertIn(curve_redim, sources[2])
self.assertIn(curve, sources[2])
self.assertNotIn(ndoverlay, sources[2])
def test_dynamic_compute_overlayable_zorders_mixed_dynamic_and_dynamic_ndoverlay_with_streams(self):
ndoverlay = DynamicMap(lambda x: NdOverlay({i: Area(range(10+i)) for i in range(2)}),
kdims=[], streams=[PointerX()])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve_redim = curve.redim(x='x2')
combined = ndoverlay*curve_redim
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertIn(ndoverlay, sources[0])
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertIn(ndoverlay, sources[1])
self.assertNotIn(curve_redim, sources[1])
self.assertNotIn(curve, sources[1])
self.assertIn(curve_redim, sources[2])
self.assertIn(curve, sources[2])
self.assertNotIn(ndoverlay, sources[2])
def test_dynamic_compute_overlayable_zorders_mixed_dynamic_and_dynamic_ndoverlay_with_streams_cloned(self):
ndoverlay = DynamicMap(lambda x: NdOverlay({i: Area(range(10+i)) for i in range(2)}),
kdims=[], streams=[PointerX()])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve_redim = curve.redim(x='x2')
combined = ndoverlay*curve_redim
combined[()]
sources = compute_overlayable_zorders(combined.clone())
self.assertIn(ndoverlay, sources[0])
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertIn(ndoverlay, sources[1])
self.assertNotIn(curve_redim, sources[1])
self.assertNotIn(curve, sources[1])
self.assertIn(curve_redim, sources[2])
self.assertIn(curve, sources[2])
self.assertNotIn(ndoverlay, sources[2])
def test_dynamic_compute_overlayable_zorders_mixed_dynamic_and_non_dynamic_ndoverlays_reverse(self):
ndoverlay = NdOverlay({i: Area(range(10+i)) for i in range(2)})
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve_redim = curve.redim(x='x2')
combined = curve_redim*ndoverlay
combined[()]
sources = compute_overlayable_zorders(combined)
self.assertIn(curve_redim, sources[0])
self.assertIn(curve, sources[0])
self.assertNotIn(ndoverlay, sources[0])
self.assertIn(ndoverlay[0], sources[1])
self.assertIn(ndoverlay, sources[1])
self.assertNotIn(curve_redim, sources[1])
self.assertNotIn(curve, sources[1])
self.assertIn(ndoverlay[1], sources[2])
self.assertIn(ndoverlay, sources[2])
self.assertNotIn(curve_redim, sources[2])
self.assertNotIn(curve, sources[2])
def test_dynamic_compute_overlayable_zorders_three_deep_dynamic_layers_reduced(self):
area = DynamicMap(lambda: Area(range(10)), kdims=[])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve2 = DynamicMap(lambda: Curve(range(10)), kdims=[])
area_redim = area.redim(x='x2')
curve_redim = curve.redim(x='x2')
curve2_redim = curve2.redim(x='x3')
combined = (area_redim*curve_redim).map(lambda x: x.get(0), Overlay)
combined1 = combined*curve2_redim
combined1[()]
sources = compute_overlayable_zorders(combined1)
self.assertIn(curve_redim, sources[0])
self.assertIn(curve, sources[0])
self.assertIn(area_redim, sources[0])
self.assertIn(area, sources[0])
self.assertNotIn(curve2_redim, sources[0])
self.assertNotIn(curve2, sources[0])
self.assertIn(curve2_redim, sources[1])
self.assertIn(curve2, sources[1])
self.assertNotIn(area_redim, sources[1])
self.assertNotIn(area, sources[1])
self.assertNotIn(curve_redim, sources[1])
self.assertNotIn(curve, sources[1])
def test_dynamic_compute_overlayable_zorders_three_deep_dynamic_layers_reduced_layers_by_one(self):
area = DynamicMap(lambda: Area(range(10)), kdims=[])
area2 = DynamicMap(lambda: Area(range(10)), kdims=[])
curve = DynamicMap(lambda: Curve(range(10)), kdims=[])
curve2 = DynamicMap(lambda: Curve(range(10)), kdims=[])
area_redim = area.redim(x='x2')
curve_redim = curve.redim(x='x2')
curve2_redim = curve2.redim(x='x3')
combined = (area_redim*curve_redim*area2).map(lambda x: x.clone(x.items()[:2]), Overlay)
combined1 = combined*curve2_redim
combined1[()]
sources = compute_overlayable_zorders(combined1)
self.assertNotIn(curve_redim, sources[0])
self.assertNotIn(curve, sources[0])
self.assertNotIn(curve2_redim, sources[0])
self.assertNotIn(curve2, sources[0])
self.assertNotIn(area, sources[0])
self.assertNotIn(area_redim, sources[0])
self.assertNotIn(area2, sources[0])
self.assertNotIn(area_redim, sources[1])
self.assertNotIn(area, sources[1])
self.assertNotIn(curve2_redim, sources[1])
self.assertNotIn(curve2, sources[1])
self.assertNotIn(area2, sources[0])
self.assertIn(curve2_redim, sources[2])
self.assertIn(curve2, sources[2])
self.assertNotIn(area_redim, sources[2])
self.assertNotIn(area, sources[2])
self.assertNotIn(area2, sources[0])
self.assertNotIn(curve_redim, sources[2])
self.assertNotIn(curve, sources[2])
class TestInitializeDynamic(ComparisonTestCase):
def test_dynamicmap_default_initializes(self):
dims = [Dimension('N', default=5, range=(0, 10))]
dmap = DynamicMap(lambda N: Curve([1, N, 5]), kdims=dims)
initialize_dynamic(dmap)
self.assertEqual(dmap.keys(), [5])
def test_dynamicmap_numeric_values_initializes(self):
dims = [Dimension('N', values=[10, 5, 0])]
dmap = DynamicMap(lambda N: Curve([1, N, 5]), kdims=dims)
initialize_dynamic(dmap)
self.assertEqual(dmap.keys(), [0])
class TestSplitDynamicMapOverlay(ComparisonTestCase):
"""
Tests the split_dmap_overlay utility
"""
def setUp(self):
self.dmap_element = DynamicMap(lambda: Image([]))
self.dmap_overlay = DynamicMap(lambda: Overlay([Curve([]), Points([])]))
self.dmap_ndoverlay = DynamicMap(lambda: NdOverlay({0: Curve([]), 1: Curve([])}))
self.element = Scatter([])
self.el1, self.el2 = Path([]), HLine(0)
self.overlay = Overlay([self.el1, self.el2])
self.ndoverlay = NdOverlay({0: VectorField([]), 1: VectorField([])})
def test_dmap_ndoverlay(self):
test = self.dmap_ndoverlay
initialize_dynamic(test)
layers = [self.dmap_ndoverlay, self.dmap_ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay(self):
test = self.dmap_overlay
initialize_dynamic(test)
layers = [self.dmap_overlay, self.dmap_overlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_element_mul_dmap_overlay(self):
test = self.dmap_element * self.dmap_overlay
initialize_dynamic(test)
layers = [self.dmap_element, self.dmap_overlay, self.dmap_overlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_element_mul_dmap_ndoverlay(self):
test = self.dmap_element * self.dmap_ndoverlay
initialize_dynamic(test)
layers = [self.dmap_element, self.dmap_ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_element_mul_element(self):
test = self.dmap_element * self.element
initialize_dynamic(test)
layers = [self.dmap_element, self.element]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_element_mul_overlay(self):
test = self.dmap_element * self.overlay
initialize_dynamic(test)
layers = [self.dmap_element, self.el1, self.el2]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_element_mul_ndoverlay(self):
test = self.dmap_element * self.ndoverlay
initialize_dynamic(test)
layers = [self.dmap_element, self.ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay_mul_dmap_ndoverlay(self):
test = self.dmap_overlay * self.dmap_ndoverlay
initialize_dynamic(test)
layers = [self.dmap_overlay, self.dmap_overlay, self.dmap_ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay_mul_element(self):
test = self.dmap_overlay * self.element
initialize_dynamic(test)
layers = [self.dmap_overlay, self.dmap_overlay, self.element]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay_mul_overlay(self):
test = self.dmap_overlay * self.overlay
initialize_dynamic(test)
layers = [self.dmap_overlay, self.dmap_overlay, self.el1, self.el2]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_all_combinations(self):
test = (self.dmap_overlay * self.element * self.dmap_ndoverlay *
self.overlay * self.dmap_element * self.ndoverlay)
initialize_dynamic(test)
layers = [self.dmap_overlay, self.dmap_overlay, self.element,
self.dmap_ndoverlay, self.el1, self.el2, self.dmap_element,
self.ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay_operation_mul_dmap_ndoverlay(self):
mapped = operation(self.dmap_overlay)
test = mapped * self.dmap_ndoverlay
initialize_dynamic(test)
layers = [mapped, mapped, self.dmap_ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay_linked_operation_mul_dmap_ndoverlay(self):
mapped = operation(self.dmap_overlay, link_inputs=True)
test = mapped * self.dmap_ndoverlay
initialize_dynamic(test)
layers = [mapped, mapped, self.dmap_ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
def test_dmap_overlay_linked_operation_mul_dmap_element_ndoverlay(self):
mapped = self.dmap_overlay.map(lambda x: x.get(0), Overlay)
test = mapped * self.element * self.dmap_ndoverlay
initialize_dynamic(test)
layers = [mapped, self.element, self.dmap_ndoverlay]
self.assertEqual(split_dmap_overlay(test), layers)
class TestPlotColorUtils(ComparisonTestCase):
def test_process_cmap_list_cycle(self):
colors = process_cmap(['#ffffff', '#959595', '#000000'], 4)
self.assertEqual(colors, ['#ffffff', '#959595', '#000000', '#ffffff'])
def test_process_cmap_cycle(self):
colors = process_cmap(Cycle(values=['#ffffff', '#959595', '#000000']), 4)
self.assertEqual(colors, ['#ffffff', '#959595', '#000000', '#ffffff'])
def test_process_cmap_invalid_str(self):
with self.assertRaises(ValueError):
process_cmap('NonexistentColorMap', 3)
def test_process_cmap_invalid_type(self):
with self.assertRaises(TypeError):
process_cmap({'A', 'B', 'C'}, 3)
class TestMPLColormapUtils(ComparisonTestCase):
def setUp(self):
try:
import matplotlib.cm # noqa
except:
raise SkipTest("Matplotlib needed to test matplotlib colormap instances")
def test_mpl_colormap_name_palette(self):
colors = process_cmap('Greys', 3)
self.assertEqual(colors, ['#ffffff', '#959595', '#000000'])
def test_mpl_colormap_instance(self):
from matplotlib.cm import get_cmap
cmap = get_cmap('Greys')
colors = process_cmap(cmap, 3)
self.assertEqual(colors, ['#ffffff', '#959595', '#000000'])
def test_mpl_colormap_categorical(self):
colors = mplcmap_to_palette('Category20', 3)
self.assertEqual(colors, ['#1f77b4', '#c5b0d5', '#9edae5'])
def test_mpl_colormap_categorical_reverse(self):
colors = mplcmap_to_palette('Category20_r', 3)
self.assertEqual(colors, ['#1f77b4', '#8c564b', '#9edae5'][::-1])
def test_mpl_colormap_sequential(self):
colors = mplcmap_to_palette('YlGn', 3)
self.assertEqual(colors, ['#ffffe5', '#77c578', '#004529'])
def test_mpl_colormap_sequential_reverse(self):
colors = mplcmap_to_palette('YlGn_r', 3)
self.assertEqual(colors, ['#ffffe5', '#78c679', '#004529'][::-1])
def test_mpl_colormap_diverging(self):
colors = mplcmap_to_palette('RdBu', 3)
self.assertEqual(colors, ['#67001f', '#f6f6f6', '#053061'])
def test_mpl_colormap_diverging_reverse(self):
colors = mplcmap_to_palette('RdBu_r', 3)
self.assertEqual(colors, ['#67001f', '#f7f6f6', '#053061'][::-1])
def test_mpl_colormap_perceptually_uniform(self):
colors = mplcmap_to_palette('viridis', 4)
self.assertEqual(colors, ['#440154', '#30678d', '#35b778', '#fde724'])
def test_mpl_colormap_perceptually_uniform_reverse(self):
colors = mplcmap_to_palette('viridis_r', 4)
self.assertEqual(colors, ['#440154', '#30678d', '#35b778', '#fde724'][::-1])
class TestBokehPaletteUtils(ComparisonTestCase):
def setUp(self):
try:
import bokeh.palettes # noqa
except:
raise SkipTest('Bokeh required to test bokeh palette utilities')
def test_bokeh_palette_categorical_palettes_not_interpolated(self):
# Ensure categorical palettes are not expanded
categorical = ('accent', 'category20', 'dark2', 'colorblind', 'pastel1',
'pastel2', 'set1', 'set2', 'set3', 'paired')
for cat in categorical:
self.assertTrue(len(set(bokeh_palette_to_palette(cat))) <= 20)
def test_bokeh_palette_categorical(self):
colors = bokeh_palette_to_palette('Category20', 3)
self.assertEqual(colors, ['#1f77b4', '#c5b0d5', '#9edae5'])
def test_bokeh_palette_categorical_reverse(self):
colors = bokeh_palette_to_palette('Category20_r', 3)
self.assertEqual(colors, ['#1f77b4', '#8c564b', '#9edae5'][::-1])
def test_bokeh_palette_sequential(self):
colors = bokeh_palette_to_palette('YlGn', 3)
self.assertEqual(colors, ['#ffffe5', '#78c679', '#004529'])
def test_bokeh_palette_sequential_reverse(self):
colors = bokeh_palette_to_palette('YlGn_r', 3)
self.assertEqual(colors, ['#ffffe5', '#78c679', '#004529'][::-1])
def test_bokeh_palette_diverging(self):
colors = bokeh_palette_to_palette('RdBu', 3)
self.assertEqual(colors, ['#67001f', '#f7f7f7', '#053061'])
def test_bokeh_palette_diverging_reverse(self):
colors = bokeh_palette_to_palette('RdBu_r', 3)
self.assertEqual(colors, ['#67001f', '#f7f7f7', '#053061'][::-1])
def test_bokeh_palette_uniform_interpolated(self):
colors = bokeh_palette_to_palette('Viridis', 4)
self.assertEqual(colors, ['#440154', '#30678D', '#35B778', '#FDE724'])
def test_bokeh_palette_perceptually_uniform(self):
colors = bokeh_palette_to_palette('viridis', 4)
self.assertEqual(colors, ['#440154', '#30678D', '#35B778', '#FDE724'])
def test_bokeh_palette_perceptually_uniform_reverse(self):
colors = bokeh_palette_to_palette('viridis_r', 4)
self.assertEqual(colors, ['#440154', '#30678D', '#35B778', '#FDE724'][::-1])
def test_color_intervals(self):
levels = [0, 38, 73, 95, 110, 130, 156]
colors = ['#5ebaff', '#00faf4', '#ffffcc', '#ffe775', '#ffc140', '#ff8f20']
cmap = color_intervals(colors, levels, N=10)
self.assertEqual(cmap, ['#5ebaff', '#5ebaff', '#00faf4',
'#00faf4', '#ffffcc', '#ffe775',
'#ffc140', '#ff8f20', '#ff8f20'])
def test_color_intervals_clipped(self):
levels = [0, 38, 73, 95, 110, 130, 156, 999]
colors = ['#5ebaff', '#00faf4', '#ffffcc', '#ffe775', '#ffc140', '#ff8f20', '#ff6060']
cmap = color_intervals(colors, levels, clip=(10, 90), N=100)
self.assertEqual(cmap, ['#5ebaff', '#5ebaff', '#5ebaff', '#00faf4', '#00faf4',
'#00faf4', '#00faf4', '#ffffcc'])
class TestPlotUtils(ComparisonTestCase):
def test_get_min_distance_float32_type(self):
xs, ys = (np.arange(0, 2., .2, dtype='float32'),
np.arange(0, 2., .2, dtype='float32'))
X, Y = np.meshgrid(xs, ys)
dist = get_min_distance(Points((X.flatten(), Y.flatten())))
self.assertEqual(round(dist, 5), 0.2)
def test_get_min_distance_int32_type(self):
xs, ys = (np.arange(0, 10, dtype='int32'),
np.arange(0, 10, dtype='int32'))
X, Y = np.meshgrid(xs, ys)
dist = get_min_distance(Points((X.flatten(), Y.flatten())))
self.assertEqual(dist, 1.0)
def test_get_min_distance_float32_type_no_scipy(self):
xs, ys = (np.arange(0, 2., .2, dtype='float32'),
np.arange(0, 2., .2, dtype='float32'))
X, Y = np.meshgrid(xs, ys)
dist = _get_min_distance_numpy(Points((X.flatten(), Y.flatten())))
self.assertEqual(dist, np.float32(0.2))
def test_get_min_distance_int32_type_no_scipy(self):
xs, ys = (np.arange(0, 10, dtype='int32'),
np.arange(0, 10, dtype='int32'))
X, Y = np.meshgrid(xs, ys)
dist = _get_min_distance_numpy(Points((X.flatten(), Y.flatten())))
self.assertEqual(dist, 1.0)
@attr(optional=1) # Flexx is optional
class TestBokehUtils(ComparisonTestCase):
def setUp(self):
if not bokeh_renderer:
raise SkipTest("Bokeh required to test bokeh plot utils.")
def test_py2js_funcformatter_single_arg(self):
def test(x): return '%s$' % x
jsfunc = util.py2js_tickformatter(test)
js_func = ('var x = tick;\nvar formatter;\nformatter = function () {\n'
' return "" + x + "$";\n};\n\nreturn formatter();\n')
self.assertEqual(jsfunc, js_func)
def test_py2js_funcformatter_two_args(self):
def test(x, pos): return '%s$' % x
jsfunc = util.py2js_tickformatter(test)
js_func = ('var x = tick;\nvar formatter;\nformatter = function () {\n'
' return "" + x + "$";\n};\n\nreturn formatter();\n')
self.assertEqual(jsfunc, js_func)
def test_py2js_funcformatter_arg_and_kwarg(self):
def test(x, pos=None): return '%s$' % x
jsfunc = util.py2js_tickformatter(test)
js_func = ('var x = tick;\nvar formatter;\nformatter = function () {\n'
' pos = (pos === undefined) ? null: pos;\n return "" '
'+ x + "$";\n};\n\nreturn formatter();\n')
self.assertEqual(jsfunc, js_func)
| 41.752322
| 111
| 0.653863
| 3,168
| 26,972
| 5.356692
| 0.088699
| 0.067177
| 0.033235
| 0.037949
| 0.824278
| 0.775722
| 0.747731
| 0.70112
| 0.681084
| 0.643665
| 0
| 0.040376
| 0.215816
| 26,972
| 645
| 112
| 41.817054
| 0.76195
| 0.004078
| 0
| 0.551724
| 0
| 0
| 0.058935
| 0.002347
| 0
| 0
| 0
| 0
| 0.350575
| 1
| 0.136015
| false
| 0
| 0.030651
| 0.005747
| 0.181992
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
60ea3008a91fe44e0195e30d8aa059569d3ff7af
| 209
|
py
|
Python
|
wavenet/__init__.py
|
Luna86/tensorflow-wavenet-local-condition
|
126a8e2d8973054d273db72f02c8e4074e6fefe7
|
[
"MIT"
] | null | null | null |
wavenet/__init__.py
|
Luna86/tensorflow-wavenet-local-condition
|
126a8e2d8973054d273db72f02c8e4074e6fefe7
|
[
"MIT"
] | null | null | null |
wavenet/__init__.py
|
Luna86/tensorflow-wavenet-local-condition
|
126a8e2d8973054d273db72f02c8e4074e6fefe7
|
[
"MIT"
] | null | null | null |
from .model_back import WaveNetModel
from .skeleton_reader import SkeletonReader
from .ops import (mu_law_encode, mu_law_decode, time_to_batch,
batch_to_time, causal_conv, optimizer_factory)
| 41.8
| 64
| 0.784689
| 29
| 209
| 5.241379
| 0.689655
| 0.065789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167464
| 209
| 4
| 65
| 52.25
| 0.873563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
880cbaa38bbbc8b8242a32654ff7fa54bedc433e
| 137
|
py
|
Python
|
linux_proc_extras/datadog_checks/linux_proc_extras/__init__.py
|
chotiwat/integrations-core
|
57a4437790cafcb769e120aa64d84aaecbf6a414
|
[
"BSD-3-Clause"
] | 2
|
2019-04-03T17:21:38.000Z
|
2020-02-04T16:28:05.000Z
|
linux_proc_extras/datadog_checks/linux_proc_extras/__init__.py
|
chotiwat/integrations-core
|
57a4437790cafcb769e120aa64d84aaecbf6a414
|
[
"BSD-3-Clause"
] | 10
|
2018-02-27T19:06:07.000Z
|
2021-08-30T03:23:26.000Z
|
linux_proc_extras/datadog_checks/linux_proc_extras/__init__.py
|
chotiwat/integrations-core
|
57a4437790cafcb769e120aa64d84aaecbf6a414
|
[
"BSD-3-Clause"
] | 6
|
2018-01-09T21:37:20.000Z
|
2020-05-26T09:28:09.000Z
|
from . import linux_proc_extras
MoreUnixCheck = linux_proc_extras.MoreUnixCheck
__version__ = "1.0.0"
__all__ = ['linux_proc_extras']
| 17.125
| 47
| 0.788321
| 18
| 137
| 5.222222
| 0.555556
| 0.287234
| 0.478723
| 0.595745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0.116788
| 137
| 7
| 48
| 19.571429
| 0.752066
| 0
| 0
| 0
| 0
| 0
| 0.160584
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7151027a9414b8df8788bbaff22a54257280b6a4
| 51,491
|
py
|
Python
|
integration-test/663-combo-outdoor-landuse-pois.py
|
rinnyB/vector-datasource
|
024909ed8245a4ad4a25c908413ba3602de6c335
|
[
"MIT"
] | null | null | null |
integration-test/663-combo-outdoor-landuse-pois.py
|
rinnyB/vector-datasource
|
024909ed8245a4ad4a25c908413ba3602de6c335
|
[
"MIT"
] | 2
|
2021-03-31T20:22:37.000Z
|
2021-12-13T20:50:11.000Z
|
integration-test/663-combo-outdoor-landuse-pois.py
|
rinnyB/vector-datasource
|
024909ed8245a4ad4a25c908413ba3602de6c335
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from shapely.wkt import loads as wkt_loads
import dsl
from . import FixtureTest
class ComboOutdoorLandusePois(FixtureTest):
def test_water_park_way(self):
# Waterworld in Concord
self.generate_fixtures(dsl.way(3710152499, wkt_loads('POINT (-122.050548768958 37.97324451957718)'), {u'source': u'openstreetmap.org', u'entrance': u'main'}),dsl.way(31198945, wkt_loads('POLYGON ((-122.052427415712 37.9741442063009, -122.052401723894 37.97490870120568, -122.052341626602 37.9750440944344, -122.050822395793 37.97522671911049, -122.050084160293 37.97474639895169, -122.049140120761 37.9735758576706, -122.050548768958 37.97324451957718, -122.052229966012 37.97285185473669, -122.052427415712 37.9741442063009))'), {u'website': u'http://www.waterworldcalifornia.com/', u'source': u'openstreetmap.org', u'way_area': u'81866', u'name': u'Waterworld', u'leisure': u'water_park'})) # noqa
self.assert_has_feature(
13, 1318, 3160, 'landuse',
{'kind': 'water_park', 'sort_rank': 121})
self.assert_has_feature(
13, 1318, 3160, 'pois',
{'kind': 'water_park', 'min_zoom': 13})
def test_water_park_node(self):
# Antioch WaterPark
self.generate_fixtures(dsl.way(2753215890, wkt_loads('POINT (-121.772067886777 37.96553969055471)'), {u'source': u'openstreetmap.org', u'name': u'Antioch WaterPark', u'leisure': u'water_park'})) # noqa
self.assert_has_feature(
16, 10600, 25287, 'pois',
{'kind': 'water_park', 'min_zoom': 15})
def test_beach_resort_way(self):
# Sandos Finisterra Los Cabos, extra large resort
self.generate_fixtures(dsl.way(381817396, wkt_loads('POLYGON ((-109.911528186215 22.8774976682163, -109.911490726468 22.87811219854719, -109.909321744214 22.87794062692459, -109.909130762385 22.87774182538109, -109.909169030616 22.87528426003229, -109.910665534048 22.87476001560358, -109.910748897706 22.87575387670478, -109.911159787117 22.87626752325368, -109.911528186215 22.8774976682163))'), {u'name': u'Sandos Finisterra Los Cabos', u'way_area': u'80573.9', u'leisure': u'beach_resort', u'source': u'openstreetmap.org', u'landuse': u'commercial', u'tourism': u'hotel'})) # noqa
self.assert_has_feature(
14, 3189, 7122, 'pois',
{'kind': 'beach_resort', 'min_zoom': 14})
def test_beach_resort_node(self):
# Hilton Hawaiian Village
# really this should be merged with its large AOI!
self.generate_fixtures(dsl.way(2407351205, wkt_loads('POINT (-157.837960852959 21.2830034117716)'), {u'addr:housenumber': u'2005', u'name': u'Hilton Hawaiian Village', u'wheelchair:description': u'Clearly labeled accessible pathways and features. ADA compliant rooms and swimming pool!', u'wheelchair': u'yes', u'leisure': u'beach_resort', u'addr:city': u'Honolulu', u'source': u'openstreetmap.org', u'addr:street': u'Kalia Road'})) # noqa
self.assert_has_feature(
16, 4034, 28801, 'pois',
{'kind': 'beach_resort', 'min_zoom': 16})
def test_another_beach_resort_node(self):
# Silver Gull Beach Club
self.generate_fixtures(dsl.way(3575812477, wkt_loads('POINT (-73.90201124147698 40.55765950893559)'), {u'source': u'openstreetmap.org', u'name': u'Silver Gull Beach Club', u'leisure': u'beach_resort'})) # noqa
self.assert_has_feature(
16, 19314, 24677, 'pois',
{'kind': 'beach_resort', 'min_zoom': 16})
def test_beach_resort_hotel(self):
# Terrasol Beach Resort
# Needs to appear **before** tourism=hotel
self.generate_fixtures(dsl.way(381817391, wkt_loads('POLYGON ((-109.908072007991 22.87663500364009, -109.906840597399 22.8770294645879, -109.906859192526 22.87714260497978, -109.90672893681 22.87716337910149, -109.906736392827 22.87734968393129, -109.90642449776 22.87725549698129, -109.905270432114 22.8751045737028, -109.905714199865 22.87495402104009, -109.906376437892 22.87489666365959, -109.906832602393 22.87480487524709, -109.908072007991 22.87663500364009))'), {u'name': u'Terrasol Beach Resort', u'way_area': u'50750.2', u'leisure': u'beach_resort', u'source': u'openstreetmap.org', u'landuse': u'commercial', u'tourism': u'hotel'})) # noqa
self.assert_has_feature(
16, 12760, 28488, 'pois',
{'id': 381817391, 'kind': 'beach_resort'})
def test_beach(self):
# Needs to appear **after** natural=beach.
# Unnamed beach in Maskenthine Lake area
self.generate_fixtures(dsl.way(257716817, wkt_loads('POLYGON ((-97.23657827452129 41.97736257162749, -97.23656488962359 41.97741239066617, -97.2365274298762 41.97746227644729, -97.2364897904658 41.9774662833356, -97.23645502566428 41.97747830399917, -97.23643894582069 41.97750815530388, -97.23641738625389 41.97756799143391, -97.23648449040559 41.97765166842779, -97.23648179545981 41.9776815864325, -97.23643616104339 41.97769961736599, -97.2362617980467 41.97772345825911, -97.23621364834749 41.97770709686279, -97.23638091465338 41.97733559186379, -97.23643077115169 41.97710826686608, -97.236428615195 41.97698959541528, -97.2364006775896 41.97689870508889, -97.23645763077859 41.97685609811668, -97.23650587030939 41.9768760659646, -97.2365327299364 41.97698371860168, -97.2365515047258 41.97710138833558, -97.2365462046657 41.97720109356419, -97.2365462046657 41.97729478828849, -97.2365595895634 41.97732670990938, -97.23657827452129 41.97736257162749))'), {u'natural': u'beach', u'way_area': u'2181.95', u'surface': u'sand', u'access': u'yes', u'source': u'openstreetmap.org', u'leisure': u'beach_resort'})) # noqa
self.assert_has_feature(
16, 15066, 24333, 'landuse',
{'kind': 'beach', 'id': 257716817})
def test_summer_camp(self):
# Camp Ahmek
self.generate_fixtures(dsl.way(3655879348, wkt_loads('POINT (-78.7068316389515 45.5614200134323)'), {u'source': u'openstreetmap.org', u'name': u'Camp Ahmek', u'leisure': u'summer_camp'})) # noqa
self.assert_has_feature(
15, 9219, 11714, 'pois',
{'kind': 'summer_camp', 'min_zoom': 15})
# Camp Goodtimes
self.generate_fixtures(dsl.way(4050178586, wkt_loads('POINT (-122.58818319015 49.3035588116317)'), {u'website': u'http://www.campgoodtimes.org/', u'source': u'openstreetmap.org', u'name': u'Camp Goodtimes', u'leisure': u'summer_camp'})) # noqa
self.assert_has_feature(
15, 5225, 11211, 'pois',
{'kind': 'summer_camp', 'min_zoom': 15})
def test_battlefield_nodes(self):
# Battle of Blackburn's Ford (1861)
self.generate_fixtures(dsl.way(3838356961, wkt_loads('POINT (-77.4495472316781 38.80331937506308)'), {u'source': u'openstreetmap.org', u'historic': u'battlefield', u'name': u"Battle of Blackburn's Ford (1861)"})) # noqa
self.assert_has_feature(
16, 18668, 25092, 'pois',
{'kind': 'battlefield', 'min_zoom': 17})
# 2nd Battle of Kernstown
self.generate_fixtures(dsl.way(3992988013, wkt_loads('POINT (-78.19618961575398 39.1403245573439)'), {u'source': u'openstreetmap.org', u'historic': u'battlefield', u'name': u'2nd Battle of Kernstown (1864)'})) # noqa
self.assert_has_feature(
16, 18532, 25013, 'pois',
{'kind': 'battlefield', 'min_zoom': 17})
def test_battlefield_ways(self):
# Antietam National Battlefield
self.generate_fixtures(dsl.way(231393152, wkt_loads('POLYGON ((-77.7571536513265 39.47738031729748, -77.75650991859391 39.48092461682239, -77.7516582974074 39.48305109598149, -77.75131711726249 39.4856610921356, -77.75102848856169 39.4873195968996, -77.7505575018583 39.49033096937768, -77.75144593567428 39.4904618564733, -77.7518202636532 39.48819479891898, -77.7536517488544 39.48835785815387, -77.75316252635071 39.49132301531999, -77.75049309265241 39.4910769809272, -77.75000000739298 39.49515248489939, -77.74092468740359 39.4906909082977, -77.7408964803037 39.49020860913168, -77.74088983277059 39.48988159771769, -77.740900612554 39.48953378677329, -77.74089522266229 39.48882380890269, -77.7409421147201 39.48857131819859, -77.7410400310861 39.4880133673396, -77.74127404221758 39.48686625400699, -77.7419357412559 39.48421941849621, -77.7434618890921 39.48442061952611, -77.7430941188148 39.4831989833595, -77.74299701093248 39.48240657235379, -77.74176730714008 39.48245878065399, -77.7415921356597 39.4812814150282, -77.74838043476718 39.4807557853195, -77.74666698819429 39.47589159800938, -77.7414874819291 39.48096926869969, -77.73462309551699 39.48082525935928, -77.73654566988809 39.4784154680689, -77.7350930042422 39.47785070991189, -77.73331523829491 39.47831520562958, -77.73264886801709 39.4767177822101, -77.7289560735472 39.47765191672308, -77.72745624634879 39.47658527443539, -77.72659575013809 39.47639320358648, -77.7253962297392 39.4763206741677, -77.72543782173689 39.47599865927669, -77.72542174189329 39.4757667854432, -77.725215129378 39.4753185678039, -77.72494150254241 39.47481757854999, -77.72470695242178 39.4744418169064, -77.72445201054408 39.47413740618869, -77.7241557461634 39.47387661043469, -77.72371045127709 39.47354938353649, -77.72337385254011 39.47328650529269, -77.7231860148142 39.47302036692938, -77.7229566749222 39.4726570774055, -77.72278500687139 39.47241069959791, -77.72271934002408 39.47223567582368, -77.72271134501808 39.4720639107805, -77.7227621896632 39.47187030187457, -77.72294059507858 39.4715794020937, -77.72328527865308 39.47114558123369, -77.72367146439369 39.4705357587826, -77.72395452353979 39.4702065744853, -77.7243849962239 39.46992919025771, -77.72503807143549 39.4695377992316, -77.72572474363868 39.4691391939182, -77.7263148469488 39.46880896280408, -77.7266702204752 39.4685407962334, -77.72710213046381 39.4681464154349, -77.72727514598751 39.46785959163208, -77.72730730567469 39.46760910527428, -77.72719061451929 39.46735230727771, -77.72695318978968 39.46701270555519, -77.7266889952646 39.46665548741528, -77.72634700663599 39.46627246930618, -77.7259151864789 39.4657671845417, -77.72563491211029 39.4654338166445, -77.7253894923746 39.46520398812729, -77.725311698271 39.4650548835235, -77.7252956184274 39.46495661301228, -77.72440709477991 39.46556017346001, -77.723465121373 39.46485778755228, -77.72605604231541 39.46295725996779, -77.72643962294178 39.46214547669301, -77.72682580868241 39.46130476300839, -77.72740782715502 39.46037075578698, -77.72766402667401 39.459796210627, -77.72787189683069 39.45941100470967, -77.72817301211398 39.45905416369259, -77.7286824467116 39.45860715618349, -77.72935034412539 39.4578999837476, -77.73021533191239 39.45699298088689, -77.72732742793708 39.45452007746609, -77.72928719256089 39.44937665918739, -77.72869178919061 39.4491933921929, -77.7283900450866 39.44909190840738, -77.72816609508628 39.448949983467, -77.7279903846167 39.44877601056468, -77.7278697408741 39.44860828029749, -77.7278347964095 39.44846226168389, -77.72625034791139 39.44710548786819, -77.7258554485125 39.44631509371761, -77.72966682059999 39.44551699046867, -77.72956746692951 39.44505609121459, -77.7293838512855 39.44460351348219, -77.72909279713339 39.4440742768392, -77.72878296819189 39.44352228199608, -77.7283981299242 39.44301266970221, -77.72812450308869 39.44262015608489, -77.72795687745661 39.44232920341818, -77.7279541825108 39.44188486348789, -77.72800251187309 39.44167570015068, -77.7280989909346 39.44153889380729, -77.7282398467711 39.4414478050533, -77.7284236420783 39.44137940169139, -77.72865154466591 39.4413556061857, -77.72901105044259 39.4413773898268, -77.72931000976908 39.44137530858748, -77.729744524872 39.44129448707969, -77.73072359870018 39.44104175449769, -77.73130426969989 39.44091438192388, -77.7318755982206 39.4408180892134, -77.73228190622349 39.4407030650375, -77.7327915204842 39.44051457205398, -77.7335077472603 39.440245255835, -77.73676449949129 39.44427427590178, -77.7418714218815 39.44609865984219, -77.7459892093124 39.4469636976172, -77.74597312946878 39.44737789914999, -77.74652783915678 39.4487713629572, -77.74686569553509 39.45130155778968, -77.74049601135 39.4506686015089, -77.73938021393562 39.4531936639095, -77.7396623747664 39.45413810236229, -77.73963767109609 39.4546318188335, -77.7392728652592 39.45496225594318, -77.7388202041875 39.45511228386128, -77.73845539835058 39.4551115208907, -77.74188498644229 39.45809626712708, -77.74244221141309 39.45919315504679, -77.74264514083571 39.45952537377579, -77.7428283073222 39.4598095963902, -77.7414368169471 39.46034557965589, -77.74132470719958 39.46061350038547, -77.74078374173548 39.46165666832228, -77.74360570936901 39.4624312837427, -77.74537817525611 39.46297002094499, -77.7453281390948 39.46345798785998, -77.74523758891418 39.46564998236328, -77.74500214047821 39.46890036250069, -77.74501929830021 39.4701866722038, -77.74552028873411 39.47113171223582, -77.74616078753169 39.4717635809758, -77.74695049649789 39.47232589217428, -77.74672304306799 39.4724179806883, -77.7464248023937 39.47247761625728, -77.7464248023937 39.4729744617726, -77.745864702814 39.47304741073871, -77.74637970696638 39.4742399629324, -77.74917140137489 39.47403457192537, -77.7492979739984 39.47298111871558, -77.74788393590968 39.47221258430239, -77.75063053489089 39.4717156640018, -77.75286212971969 39.47184818105199, -77.7532483154603 39.47032425440981, -77.75532495090259 39.47041315567039, -77.75406371624369 39.47714837873037, -77.7571536513265 39.47738031729748))'), {u'gnis:state_id': u'24', u'name': u'Antietam National Battlefield', u'way_area': u'1.29683e+07', u'gnis:county_id': u'043', u'leisure': u'park', u'ele': u'146', u'source': u'openstreetmap.org', u'historic': u'battlefield', u'date': u'1862', u'gnis:created': u'09/13/1996', u'boundary': u'national_park', u'gnis:feature_id': u'589057'})) # noqa
self.assert_has_feature(
10, 290, 389, 'pois',
{'kind': 'battlefield', 'min_zoom': 10})
self.assert_has_feature(
10, 290, 389, 'landuse',
{'kind': 'battlefield', 'sort_rank': 25})
# White Oak Road Battlefield
self.generate_fixtures(dsl.way(316054549, wkt_loads('POLYGON ((-77.565081806157 37.1419568067139, -77.5521795732257 37.14196217734999, -77.55216843411618 37.14258330674419, -77.552167895127 37.14262412323089, -77.55216681714869 37.14271177119101, -77.5521619662462 37.1431056128957, -77.552159900121 37.1432814088492, -77.5469617088979 37.14352816696068, -77.5469115829051 37.1419643972128, -77.5441524075099 37.14195780923269, -77.54403912995258 37.1435066848588, -77.54282047543809 37.1456199216102, -77.5445457797728 37.14662331604248, -77.54502287502019 37.14680991600809, -77.5459458939746 37.1512800863047, -77.54594831942589 37.151287461069, -77.5476693118472 37.1515702791572, -77.54815979199229 37.1516836924934, -77.5481787464448 37.15168806004911, -77.54739793079992 37.15164008852248, -77.5468219310397 37.15160629363778, -77.54649539343391 37.15158459903228, -77.54637474969131 37.1515765799026, -77.54611881966679 37.15155846525818, -77.5460382407858 37.151552808906, -77.5455472216515 37.15151786839139, -77.5448795039009 37.15146288000778, -77.54431347544029 37.1514129035516, -77.54424960522358 37.15140681759117, -77.54424421533189 37.15140617319538, -77.54418591466998 37.15140008723438, -77.54412222411628 37.1513927840807, -77.54405871322579 37.15138497972928, -77.54399520233521 37.15137660258058, -77.5439318711076 37.15136758103478, -77.5438689890377 37.15135820149, -77.5437428655719 37.15133750920769, -77.5436470153311 37.15131982410379, -77.54355062610109 37.1513014229995, -77.5434544165341 37.1512823774937, -77.54336027309239 37.15126318878378, -77.54326253638941 37.1512425680752, -77.54316686581168 37.15122187576129, -77.5429664516718 37.15117655312999, -77.5428618877727 37.1511520660099, -77.54275759336819 37.15112700608339, -77.54265347862679 37.15110137335, -77.54254873506468 37.15107488140968, -77.5415227691787 37.15083079712308, -77.54149186713289 37.15082349391429, -77.54127366635041 37.15077315903488, -77.54073647381048 37.15064929069658, -77.5402742007653 37.15054561345749, -77.5397375472145 37.1504473061099, -77.5395155735079 37.15040656544549, -77.53940283493969 37.15039081333739, -77.53929171333908 37.15037577723107, -77.53918050190688 37.15036145712708, -77.5390691108116 37.15034770982479, -77.53895762988491 37.15033460692489, -77.53879081273661 37.1503162771813, -77.5387350273575 37.1503104059344, -77.53865992819969 37.1503019570661, -77.53713539733099 37.15013197709088, -77.5365319091232 37.1500633119463, -77.53653244811228 37.15005915909928, -77.53653828716169 37.15000968292141, -77.53682817350391 37.1475495089644, -77.53730652639268 37.14754771888462, -77.53817870070201 37.14532992073969, -77.5389701164673 37.14346586884828, -77.53406414720619 37.14105367512509, -77.53520464829089 37.14049619436879, -77.53523492151598 37.14048137112499, -77.53533023276759 37.14043489629827, -77.53568057572841 37.14031810042128, -77.52966743287961 37.1365233894929, -77.5296649175968 37.13652181399298, -77.52973112343319 37.13645872235999, -77.5357895413724 37.1402817941248, -77.5363779378835 37.14008565389029, -77.53650271387639 37.1400716898893, -77.53665273252889 37.14008300431089, -77.5368562009407 37.14003738856299, -77.53709802741518 37.13992617768409, -77.53721220328779 37.13985471046509, -77.54137993704849 37.1398953135928, -77.54359850630568 37.1399167967204, -77.54521673145848 37.13889476597458, -77.5434090516123 37.13747870924649, -77.54355538717211 37.13729251593809, -77.5424480339213 37.13664298415549, -77.54265671256178 37.13562771255759, -77.54186242218761 37.13612629088869, -77.5369161185702 37.1320827846269, -77.53783635274721 37.1311495985473, -77.5371312650807 37.13005948226109, -77.54677944072679 37.1239371788359, -77.5469292797162 37.1241229752552, -77.54719724716539 37.1236638552868, -77.5475324085979 37.12346466368231, -77.54804687376109 37.12335478934039, -77.54906673110318 37.1229784649292, -77.54958514885369 37.1239351733208, -77.54972330974439 37.12419037469169, -77.54982428038228 37.12437667186528, -77.5500751798411 37.12456425782948, -77.55042489398129 37.12457557457, -77.55232240535588 37.1292274711103, -77.5547732789455 37.13101545657838, -77.55461194152051 37.1320315778611, -77.5547069832776 37.13225968046039, -77.5555001058419 37.1328228796635, -77.55514401366329 37.1340783175303, -77.5553482905589 37.13476095760389, -77.55588224916379 37.1346837562763, -77.5559780994046 37.13616245595158, -77.55605894778019 37.13636820260389, -77.55614527587899 37.1365884862519, -77.55628379609578 37.1367647986445, -77.55645160139089 37.13697849272398, -77.55666010036829 37.1372439623769, -77.55667671920109 37.13737909588351, -77.55880994850628 37.13820199428929, -77.55949141048079 37.13814878652629, -77.5600336335863 37.13852990510139, -77.562258401219 37.13919789634888, -77.5624350100038 37.13914161000699, -77.56351370699699 37.13934827949988, -77.5640125414743 37.13881219800559, -77.5650325784794 37.1389424591402, -77.565081806157 37.1419568067139))'), {u'website': u'http://www.civilwar.org/', u'name': u'White Oak Road Battlefield', u'way_area': u'5.44096e+06', u'source': u'openstreetmap.org', u'historic': u'battlefield', u'owner': u'Civil War Trust', u'boundary': u'protected_area'})) # noqa
self.assert_has_feature(
11, 582, 796, 'pois',
{'kind': 'battlefield', 'min_zoom': 10})
self.assert_has_feature(
11, 582, 796, 'landuse',
{'kind': 'battlefield', 'sort_rank': 25})
def test_boat_storage_node(self):
# unnamed Boat Storage
self.generate_fixtures(dsl.way(2117389172, wkt_loads('POINT (-121.970562865891 37.24155295545678)'), {u'source': u'openstreetmap.org', u'amenity': u'boat_storage'})) # noqa
self.assert_has_feature(
16, 10563, 25453, 'pois',
{'kind': 'boat_storage', 'min_zoom': 17})
def test_boat_storage_way(self):
# in Tiburon, CA
self.generate_fixtures(dsl.way(261064362, wkt_loads('POLYGON ((-122.448582479715 37.88885607598908, -122.448190904082 37.88904869843268, -122.447988423817 37.88876086345788, -122.448498038078 37.88871747542818, -122.448582479715 37.88885607598908))'), {u'source': u'openstreetmap.org', u'amenity': u'boat_storage', u'way_area': u'1774.38'})) # noqa
self.assert_has_feature(
16, 10476, 25304, 'pois',
{'kind': 'boat_storage', 'min_zoom': 17})
def test_monument_nodes(self):
# Red Cross monument in DC
self.generate_fixtures(dsl.way(3314950786, wkt_loads('POINT (-77.04514662151358 38.896204468872)'), {u'source': u'openstreetmap.org', u'historic': u'monument', u'name': u'Red Cross'})) # noqa
self.assert_has_feature(
16, 18742, 25070, 'pois',
{'kind': 'monument', 'min_zoom': 17})
# Major General James B. McPherson in DC
self.generate_fixtures(dsl.way(2316449632, wkt_loads('POINT (-77.0341319286574 38.90196197487669)'), {u'wikipedia': u'en:Major General James B. McPherson (statue)', u'historic': u'monument', u'wikidata': u'Q15243968', u'name': u'Major General James B. McPherson', u'source': u'openstreetmap.org'})) # noqa
self.assert_has_feature(
16, 18744, 25069, 'pois',
{'kind': 'monument', 'min_zoom': 17})
def test_monument_ways(self):
# polygon but no landuse / Netherlands Carillon near DC
self.generate_fixtures(dsl.way(504619701, wkt_loads('POINT (-77.06945251781912 38.8881826544277)'), {u'source': u'openstreetmap.org', u'wpt_description': u'20-SEP-09 3:45:57PM', u'name': u'083', u'ele': u'33.771729'}),dsl.way(504619705, wkt_loads('POINT (-77.06953830692871 38.8881826544277)'), {u'source': u'openstreetmap.org', u'wpt_description': u'20-SEP-09 3:42:59PM', u'name': u'080', u'ele': u'28.724976'}),dsl.way(41273627, wkt_loads('POLYGON ((-77.06953830692871 38.8881826544277, -77.06953830692871 38.88824950021529, -77.06945251781912 38.88824950021529, -77.06945251781912 38.8881826544277, -77.06953830692871 38.8881826544277))'), {u'way_area': u'91.298', u'source': u'openstreetmap.org', u'historic': u'monument', u'name': u'Netherlands Carillon'})) # noqa
self.assert_has_feature(
16, 18737, 25072, 'pois',
{'kind': 'monument', 'min_zoom': (lambda z: z >= 16 and z < 17)})
# building, Jefferson Monument
self.generate_fixtures(dsl.way(248460669, wkt_loads('POLYGON ((-77.0370703179518 38.8813805569467, -77.03706573654379 38.88143027651917, -77.0370529804668 38.88148006598588, -77.0370291751117 38.88153419101258, -77.03699899171821 38.88158118325048, -77.03695793870969 38.88162810552841, -77.0369230840767 38.881659014076, -77.0368929905147 38.88168188079841, -77.036878078481 38.88168992261029, -77.03687744966028 38.88175348785668, -77.0368896667481 38.88175411721529, -77.03688948708511 38.88179509544039, -77.03688912775888 38.88184390565048, -77.03664577414848 38.88184285672079, -77.0364399701169 38.88184208750568, -77.03619679616949 38.88184110850469, -77.03619706566408 38.8817902703606, -77.03619742499021 38.8817488026315, -77.0362118878663 38.8817488026315, -77.03621242685539 38.88169670570311, -77.03621242685539 38.88168341923199, -77.03616993654252 38.8816523009085, -77.03615089225849 38.88163537812908, -77.03611271385888 38.88159446974068, -77.03607363714399 38.88153698817049, -77.03605055044119 38.88148677916968, -77.03604004015239 38.88145188459259, -77.03603123666258 38.88138230517421, -77.03603707571199 38.88131552285419, -77.0360539640393 38.8812576214831, -77.03607327781791 38.88121699259429, -77.03611064773368 38.88116181838618, -77.03615709063391 38.88111258813138, -77.03619464021278 38.88108209892239, -77.0362464730047 38.88104881252328, -77.0362954311877 38.88102440714939, -77.03633657402769 38.88101077090526, -77.0363929882275 38.88099972204919, -77.0363947848581 38.88097937256921, -77.0367349768562 38.8809820998194, -77.0367350666877 38.88100265908709, -77.03678734863729 38.88101909251098, -77.03685112902239 38.88104671464049, -77.0368886786013 38.88106951163047, -77.0369247908757 38.88109601487028, -77.03695793870969 38.88112545513309, -77.0369994408758 38.8811727972878, -77.0370261208398 38.88121349613108, -77.03704839905879 38.88126048858109, -77.037063041598 38.88130901944169, -77.0370682518266 38.88133978826989, -77.0370703179518 38.8813805569467))'), {u'building': u'yes', u'name:en': u'Jefferson Monument', u'name': u'Jefferson Monument', u'way_area': u'11480.4', u'wikipedia': u'en:Jefferson Memorial', u'name:de': u'Jefferson-Denkmal', u'source': u'openstreetmap.org', u'historic': u'monument', u'wikidata': u'Q326183', u'name:it': u'Monumento a Jefferson', u'ref:nrhp': u'66000029', u'tourism': u'attraction', u'start_date': u'1943', u'name:es': u'Monumento a Jefferson', u'alt_name': u'Jefferson Memorial'})) # noqa
self.assert_has_feature(
15, 9371, 12537, 'pois',
{'kind': 'monument', 'min_zoom': 15})
def test_monument_building_attraction(self):
# building, and tourism=attraction, National World War II Memorial
self.generate_fixtures(dsl.way(66418767, wkt_loads('POLYGON ((-77.04105542421519 38.88959416399089, -77.04096020279511 38.8895952827271, -77.0409292109178 38.88959570225321, -77.0409228328792 38.88962045428789, -77.0409149277047 38.8896513593585, -77.04088133071311 38.88971051246538, -77.04084782355299 38.88976141486299, -77.04080362644099 38.88981371564017, -77.040759429329 38.88985077365418, -77.04071873564669 38.88987147019699, -77.04067462836619 38.8898906983986, -77.04061453107369 38.889905871194, -77.04055623041178 38.88991265350239, -77.04052577752358 38.88991328278869, -77.04047843630819 38.88991412183711, -77.04042363907578 38.8899099965157, -77.0403692011696 38.88989440419681, -77.0403105411816 38.88986727495231, -77.0402558337808 38.88983021694688, -77.04022044015861 38.88979441749699, -77.04018684316688 38.88974631195759, -77.04015863606701 38.8897008633804, -77.04013914262541 38.8896513593585, -77.0401145287866 38.8896513593585, -77.0397804453324 38.8896513593585, -77.039782241963 38.88976686868919, -77.03964983029009 38.88976519058888, -77.03962315032619 38.8897654003514, -77.0396051840205 38.88976547027229, -77.03960293823231 38.88965660343028, -77.03956988022979 38.88965688311408, -77.0395743718062 38.88917002184471, -77.039611382396 38.88916918278748, -77.039611382396 38.8890531131126, -77.03962854021789 38.88905290334799, -77.03976804858149 38.88905108538769, -77.0397683180761 38.88916638593012, -77.04010312018248 38.88916456797268, -77.04012503907539 38.88916449805129, -77.04014444268549 38.8891191190233, -77.0401692361874 38.88907779541479, -77.0401939398577 38.8890420655071, -77.04022223678921 38.8890048672281, -77.04026284063998 38.88897046579469, -77.04031584124169 38.88893886121119, -77.0403618349843 38.8889169058064, -77.0404077388953 38.88890306131199, -77.04045373263789 38.8888948804731, -77.04050610441891 38.88889411133439, -77.04054033023129 38.888893482039, -77.04059153420241 38.8888990058535, -77.04065513492461 38.88891550737279, -77.0407152322171 38.88894165807761, -77.04076472938918 38.88897186422729, -77.04080003317991 38.88900081177539, -77.04083012674189 38.88903108782159, -77.0408566270428 38.88906821616541, -77.04089723089361 38.88914107436559, -77.04091825147128 38.8891910681929, -77.04095696886 38.8891910681929, -77.0410466207254 38.8891912080358, -77.04105542421519 38.88959416399089))'), {u'addr:housenumber': u'1964', u'name': u'National World War II Memorial', u'name:lt': u'Nacionalinis Antrojo pasaulinio karo memorialas', u'source': u'openstreetmap.org', u'addr:postcode': u'20227', u'wheelchair': u'yes', u'way_area': u'16778.2', u'wikipedia': u'en:National World War II Memorial', u'addr:state': u'D.C.', u'name:de': u'Nationales Denkmal des 2. Weltkriegs', u'phone': u'202-208-3818', u'start_date': u'2002', u'historic': u'monument', u'wikidata': u'Q1470020', u'name:it': u'Memoriale Nazionale della Seconda Guerra Mondiale', u'name:pl': u'Pomnik II Wojny \u015awiatowej', u'addr:street': u'Independence Avenue Southwest', u'tourism': u'attraction', u'email': u'WWII.CustomerService@Oaktreesys.com', u'addr:city': u'Washington'})) # noqa
self.assert_has_feature(
15, 9371, 12536, 'pois',
{'kind': 'monument', 'min_zoom': 15})
def test_dam_node(self):
# Letts Valley 1-039 Dam
self.generate_fixtures(dsl.way(358811238, wkt_loads('POINT (-122.716532232156 39.3079024910915)'), {u'gnis:state_id': u'06', u'name': u'Letts Valley 1-039 Dam', u'gnis:county_id': u'011', u'ele': u'1418', u'source': u'openstreetmap.org', u'gnis:created': u'07/01/1994', u'waterway': u'dam', u'gnis:feature_id': u'1663242'})) # noqa
self.assert_has_feature(
14, 2607, 6243, 'pois',
{'kind': 'dam', 'min_zoom': 14})
def test_dam_way(self):
# O'Shaughnessy Dam, Yosemite
# 13, 1370, 3161
self.generate_fixtures(dsl.way(189656737, wkt_loads('POLYGON ((-119.789060343112 37.94813892096909, -119.789017403642 37.94832076250939, -119.788941496 37.94841178936769, -119.788826601476 37.9483809748407, -119.788329743292 37.94814848413098, -119.788040665433 37.94793079792151, -119.787833873255 37.94772409105717, -119.787719607551 37.94777559073129, -119.78769427506 37.94774321748778, -119.787665079813 37.94770878990909, -119.78775068926 37.94765785689778, -119.787658072954 37.94752538839991, -119.78767136802 37.94751738361128, -119.787606779151 37.94740085362629, -119.787624116636 37.947394903154, -119.787499520306 37.94707959827269, -119.787451011281 37.94675125755689, -119.787461431738 37.94649219675929, -119.787555036191 37.94650686060238, -119.787880136492 37.9469248143209, -119.788051804543 37.94708979912581, -119.78829318186 37.9472209224657, -119.7885400389 37.9472843942955, -119.788638584087 37.94737776012397, -119.788673887877 37.94741147946841, -119.788717007011 37.94744930745397, -119.78878159588 37.94748366431551, -119.788792106169 37.94753821022748, -119.78880279612 37.94774116316769, -119.788899365014 37.94781306433669, -119.788985243955 37.9478638555634, -119.789028093594 37.94794850752979, -119.789060343112 37.94813892096909))'), {u'name': u"O'Shaughnessy Dam", u'way_area': u'20361.8', u'wikipedia': u"en:O'Shaughnessy Dam (California)", u'source': u'openstreetmap.org', u'wikidata': u'Q7071958', u'waterway': u'dam'})) # noqa
self.assert_has_feature(
13, 1370, 3161, 'landuse',
{'kind': 'dam', 'sort_rank': 223})
self.assert_has_feature(
12, 685, 1580, 'pois',
{'kind': 'dam', 'min_zoom': 12})
def test_linear_dam_way(self):
# Named dam line in front of Cherry Lake
# Should be labeled in the stylesheet, no POI generate
self.generate_fixtures(dsl.way(62201624, wkt_loads('LINESTRING (-119.914729171638 37.97434928092859, -119.906693292263 37.9770387079972)'), {u'gnis:state_id': u'06', u'name': u'Cherry Valley Dam', u'gnis:county_id': u'109', u'ele': u'1420', u'source': u'openstreetmap.org', u'gnis:created': u'01/19/1981', u'waterway': u'dam', u'gnis:feature_id': u'258257'})) # noqa
self.assert_has_feature(
12, 683, 1580, 'landuse',
{'kind': 'dam', "sort_rank": 265})
def test_dog_park_node(self):
# Indian Lake Dog Exercise Area, near Madison, WI
self.generate_fixtures(dsl.way(262220409, wkt_loads('POINT (-89.63521750678829 43.18944849586439)'), {u'source': u'openstreetmap.org', u'name': u'Indian Lake Dog Exercise Area', u'leisure': u'dog_park'})) # noqa
self.assert_has_feature(
16, 16450, 24033, 'pois',
{'kind': 'dog_park', 'min_zoom': 17})
def test_dog_park_ways(self):
# Dog Run at Upper Noe Valley Rec Center, SF
self.generate_fixtures(dsl.way(4177860895, wkt_loads('POINT (-122.427832025472 37.74251689791318)'), {u'source': u'openstreetmap.org', u'barrier': u'gate'}),dsl.way(417184097, wkt_loads('POLYGON ((-122.427847117169 37.74273277610897, -122.427162331428 37.74276488429628, -122.427153438107 37.74274030590639, -122.426962096951 37.74274961159829, -122.426933171199 37.74249849957781, -122.427082111873 37.7424879152052, -122.427055431909 37.74231181659579, -122.427054623425 37.74230116116019, -122.427069176133 37.74230052183398, -122.427060103149 37.7421694598501, -122.427100796831 37.74216839430459, -122.42711427156 37.74235209412828, -122.427155234737 37.74236857452328, -122.427162511091 37.74243861616107, -122.427193502968 37.7427378196526, -122.427792589431 37.74269981547749, -122.427810286242 37.74267140112828, -122.427803818372 37.7425296133624, -122.427808759106 37.74251512201229, -122.427832025472 37.74251689791318, -122.427847117169 37.74273277610897))'), {u'name': u'Dog Run', u'barrier': u'fence', u'way_area': u'1662.33', u'leisure': u'dog_park', u'source': u'openstreetmap.org', u'addr:city': u'San Francisco'})) # noqa
self.assert_has_feature(
16, 10480, 25338, 'pois',
{'kind': 'dog_park', 'min_zoom': 16})
# Dog park at Walter Hass Playground, SF
self.generate_fixtures(dsl.way(375333476, wkt_loads('POLYGON ((-122.435567238721 37.74031275899967, -122.435563196302 37.74034671522779, -122.435532294256 37.74039232168538, -122.43549609215 37.74042308116498, -122.435470579996 37.74046655643549, -122.435455937457 37.74051429395798, -122.435449200092 37.74056728825491, -122.43543437789 37.74060337550239, -122.435402218203 37.74062987262371, -122.435360716037 37.74064471953209, -122.435347600634 37.74064599821298, -122.435327119045 37.74064791623439, -122.435293611885 37.7406436539646, -122.435262799671 37.74063306932671, -122.43523863499 37.74061495467461, -122.435223902619 37.7405842663125, -122.435222465315 37.74055350689979, -122.435231897625 37.7405238130436, -122.435252019888 37.74049305360581, -122.435272231981 37.74045909744478, -122.435281484629 37.74042627787698, -122.435289569466 37.74037008676489, -122.435285616879 37.7403234147215, -122.435299001777 37.74027780822149, -122.435325771572 37.74025017436279, -122.435378143353 37.74022161699669, -122.435420992992 37.74021096126019, -122.435477317361 37.7402141579813, -122.43552429925 37.74024065524199, -122.435551069046 37.74027667161049, -122.435567238721 37.74031275899967))'), {u'source': u'openstreetmap.org', u'way_area': u'1480.15', u'name': u'Dog Park', u'leisure': u'dog_park'})) # noqa
self.assert_has_feature(
16, 10479, 25338, 'pois',
{'kind': 'dog_park', 'min_zoom': 16})
self.assert_has_feature(
16, 10479, 25338, 'landuse',
{'kind': 'dog_park', 'sort_rank': 111})
def test_recreation_track_way(self):
# Red Gra / Running Track
self.generate_fixtures(dsl.way(95922608, wkt_loads('POLYGON ((0.002371732013132 51.1255746157489, 0.002473421303295 51.12573259131459, 0.002628021363692 51.1258406706463, 0.004202229067583 51.1260670895425, 0.004399588935504 51.12605147253781, 0.004539456625240999 51.12599148514911, 0.004647074796279 51.1259211802814, 0.004692080392013 51.1258200921806, 0.004744272510020001 51.12569927103149, 0.004756309934828 51.12557816766799, 0.004735379188708 51.12545249722189, 0.004616082918977 51.1253554674511, 0.004452679368795 51.12527506958999, 0.002973423590436 51.1250640950142, 0.002703120521444 51.1250753710715, 0.002535225394842 51.12518638370859, 0.002407125635327 51.1253554674511, 0.002371732013132 51.1255746157489))'), {u'name': u'Red Gra / Running Track', u'way_area': u'35100.3', u'leisure': u'track', u'lit': u'no', u'source': u'openstreetmap.org', u'surface': u'gravel', u'sport': u'running'})) # noqa
self.assert_has_feature(
15, 16384, 10951, 'landuse',
{'id': 95922608, 'kind': 'recreation_track', 'sort_rank': 72})
# Cox Stadium recreation track
self.assert_has_feature(
16, 32768, 21903, 'pois',
{'id': 95922608, 'kind': 'recreation_track', 'min_zoom': 16})
def test_recreation_track_nodes(self):
# Pista de Atletismo
self.generate_fixtures(dsl.way(4218421638, wkt_loads('POINT (-64.30885444216 -36.6260189229092)'), {u'source': u'openstreetmap.org', u'sport': u'running', u'name': u'Pista de Atletismo', u'leisure': u'track'})) # noqa
self.assert_has_feature(
16, 21060, 39942, 'pois',
{'kind': 'recreation_track', 'min_zoom': 17})
# cycle, Sand Pit
self.generate_fixtures(dsl.way(418185265, wkt_loads('POINT (-122.01112772985 36.99488990613708)'), {u'source': u'openstreetmap.org', u'sport': u'cycling', u'name': u'Sand Pit', u'leisure': u'track'})) # noqa
self.assert_has_feature(
16, 10556, 25509, 'pois',
{'kind': 'recreation_track', 'min_zoom': 17})
# motor, Mazda Raceway Laguna Seca
self.generate_fixtures(dsl.way(444949878, wkt_loads('POINT (-121.752977609012 36.58526005267629)'), {u'source': u'openstreetmap.org', u'sport': u'motor', u'name': u'Mazda Raceway Laguna Seca', u'leisure': u'track'})) # noqa
self.assert_has_feature(
16, 10603, 25602, 'pois',
{'kind': 'recreation_track', 'min_zoom': 17})
def test_fishing_area_node(self):
# Unnamed fishing spot near Davis, CA
self.generate_fixtures(dsl.way(2613055910, wkt_loads('POINT (-121.756509335551 38.51715097092738)'), {u'source': u'openstreetmap.org', u'leisure': u'fishing'})) # noqa
self.assert_has_feature(
16, 10602, 25159, 'pois',
{'kind': 'fishing_area', 'min_zoom': 17})
def test_fishing_area_ways(self):
# 16, 10471, 22459, 'pois',
self.generate_fixtures(dsl.way(62099107, wkt_loads('POLYGON ((-122.481363711231 49.1669114107027, -122.479427212973 49.1672838608298, -122.477683672838 49.16750629580189, -122.476121502559 49.16769037511028, -122.474800440102 49.1677848814198, -122.474607212485 49.16779868437708, -122.474634431438 49.16745631116189, -122.474692462605 49.1674556650616, -122.474733695277 49.1674553713797, -122.4748520034 49.1674543141247, -122.474858830596 49.1675454141806, -122.474887037696 49.16756479714959, -122.475013969645 49.16754159632221, -122.475531309418 49.16745836693539, -122.477210260684 49.16720650467838, -122.47986666881 49.16680298202059, -122.481193570316 49.16667000090749, -122.481363711231 49.1669114107027))'), {u'source': u'openstreetmap.org', u'way_area': u'43410', u'name': u'Two Bit Bar', u'leisure': u'fishing'})) # noqa
self.assert_has_feature(
16, 10471, 22460, 'pois',
{'kind': 'fishing_area', 'min_zoom': 16})
# Alpine Lake
self.generate_fixtures(dsl.way(234164554, wkt_loads('POLYGON ((-111.585414840494 40.68151227447739, -111.58533758538 40.68175070547638, -111.585111479423 40.6819011890593, -111.584510326835 40.68206849861048, -111.584179387484 40.68208110130799, -111.583892645245 40.6820644112486, -111.583556226171 40.682009981189, -111.583374227495 40.68190956817138, -111.583318981105 40.68179668849949, -111.583346559384 40.68163768928918, -111.583434773945 40.6815707923359, -111.583649920455 40.68154156747469, -111.583964330805 40.68149136063199, -111.584234544042 40.6813951704831, -111.58454886456 40.6812487731699, -111.584802638628 40.68116089375347, -111.585056322864 40.68116089375347, -111.585282428821 40.68127391074909, -111.585414840494 40.68151227447739))'), {u'natural': u'water', u'name': u'Alpine Lake', u'way_area': u'21104.5', u'leisure': u'fishing', u'water': u'lake', u'source': u'openstreetmap.org', u'boat': u'private'})) # noqa
self.assert_has_feature(
16, 12454, 24647, 'pois',
{'kind': 'fishing_area', 'min_zoom': 16})
def test_swimming_area_node(self):
# Swimming hole at Seneca Rocks
self.generate_fixtures(dsl.way(3733554139, wkt_loads('POINT (-79.36861763415348 38.8404317111475)'), {u'source': u'openstreetmap.org', u'leisure': u'swimming_area'})) # noqa
self.assert_has_feature(
16, 18319, 25083, 'pois',
{'kind': 'swimming_area', 'min_zoom': 16})
def test_swimming_area_way(self):
# Pine Lake Swimming Beach
self.generate_fixtures(dsl.way(368533731, wkt_loads('POLYGON ((-122.041398798968 47.58771120686248, -122.041206200172 47.58791211575618, -122.040999138499 47.58781408491459, -122.040715720026 47.58783129182468, -122.040850107993 47.5875739146357, -122.041078998727 47.5873900903225, -122.04131893874 47.5875248989212, -122.041286240063 47.58755919174979, -122.041355230677 47.58759348455588, -122.041329808355 47.58761069153849, -122.041398798968 47.58771120686248))'), {u'source': u'openstreetmap.org', u'way_area': u'3699.56', u'name': u'Pine Lake Swimming Beach', u'leisure': u'swimming_area'})) # noqa
self.assert_has_feature(
# 16, 10551, 22893, 'pois',
16, 10551, 22892, 'pois',
{'kind': 'swimming_area', 'min_zoom': 16})
def test_firepit_node(self):
# UC Berkeley area
self.generate_fixtures(dsl.way(3795571179, wkt_loads('POINT (-122.267422829627 37.86982551522379)'), {u'source': u'openstreetmap.org', u'leisure': u'firepit'})) # noqa
self.assert_has_feature(
16, 10509, 25309, 'pois',
{'kind': 'firepit', 'min_zoom': 18})
def test_firepit_way(self):
# Bloomfield area
self.generate_fixtures(dsl.way(349337076, wkt_loads('POLYGON ((-120.989937198076 39.3205694852611, -120.9899327065 39.32059165410438, -120.989919231771 39.3206116686028, -120.989898121361 39.32062709644129, -120.989871531229 39.3206364782333, -120.989842425814 39.3206387020653, -120.989813859388 39.32063342046418, -120.989789245549 39.3206212588812, -120.989770919917 39.3206036072083, -120.989761218112 39.32058220281161, -120.989760858786 39.3205596169963, -120.989770201265 39.32053807359638, -120.989787987908 39.3205200744282, -120.989812422083 39.32050756535101, -120.989840808846 39.32050186677066, -120.98986982443 39.32050367363768, -120.989896504394 39.3205125689822, -120.989917974129 39.32052771886307, -120.989932077679 39.32054731641071, -120.989937198076 39.3205694852611))'), {u'source': u'openstreetmap.org', u'way_area': u'300.608', u'name': u'Fire pit', u'leisure': u'firepit'})) # noqa
self.assert_has_feature(
16, 10742, 24971, 'pois',
{'kind': 'firepit', 'min_zoom': 18})
def test_stone(self):
# Old Man Boulder
self.generate_fixtures(dsl.way(329837642, wkt_loads('POLYGON ((-122.635268924756 37.89168410851629, -122.635158342144 37.89176372090539, -122.635100760135 37.8917364981569, -122.635074170002 37.89167496336538, -122.63516984058 37.89163235687251, -122.635253024575 37.89163171883829, -122.635268924756 37.89168410851629))'), {u'sport': u'climbing', u'source': u'openstreetmap.org', u'way_area': u'258.122', u'natural': u'stone', u'name': u'Old Man Boulder'})) # noqa
self.assert_has_feature(
16, 10442, 25304, 'landuse',
{'kind': 'stone', 'sort_rank': 28})
self.assert_has_feature(
16, 10442, 25304, 'pois',
{'kind': 'stone', 'min_zoom': 17, 'name': 'Old Man Boulder'})
def test_rock(self):
# Goodrich Pinnacle
self.generate_fixtures(dsl.way(377706598, wkt_loads('POLYGON ((-119.566239075936 37.73144219101408, -119.565670532193 37.73176460006549, -119.565531023829 37.7320785532334, -119.565005329725 37.73238397957958, -119.564586894465 37.73190456124839, -119.564887291096 37.73169667972098, -119.565144838088 37.73171792259689, -119.565927989353 37.73129370334119, -119.566222996092 37.7313234008996, -119.566239075936 37.73144219101408))'), {u'sport': u'climbing', u'source': u'openstreetmap.org', u'way_area': u'10681.6', u'natural': u'rock', u'name': u'Goodrich Pinnacle'})) # noqa
self.assert_has_feature(
16, 11001, 25340, 'landuse',
{'kind': 'rock', 'sort_rank': 27})
self.assert_has_feature(
16, 11001, 25340, 'pois',
{'kind': 'rock', 'min_zoom': 17, 'name': 'Goodrich Pinnacle'})
def test_caravan_site_node(self):
# Redwood Acres RV Park, Eureka CA
self.generate_fixtures(dsl.way(2246385222, wkt_loads('POINT (-124.129336574678 40.7797228041273)'), {u'source': u'openstreetmap.org', u'tourism': u'caravan_site', u'name': u'Redwood Acres RV Park'})) # noqa
self.assert_has_feature(
15, 5085, 12312, 'pois',
{'kind': 'caravan_site', 'min_zoom': 15})
def test_caravan_site_way(self):
# Pillar Point RV Park
self.generate_fixtures(dsl.way(291546386, wkt_loads('POLYGON ((-122.47365535761 37.5015223926268, -122.473432934745 37.50193231750269, -122.471975328365 37.50136938288639, -122.471683914887 37.50125678120049, -122.471698557426 37.50122620767539, -122.47177401591 37.50113569856521, -122.471851271025 37.50107148691729, -122.47193032277 37.50102045964159, -122.472031473071 37.50099565866259, -122.472215358209 37.501077402088, -122.47219891904 37.50112408203359, -122.47268796188 37.5012582778063, -122.473046659173 37.50137209102618, -122.473052228728 37.50130638297679, -122.47365535761 37.5015223926268))'), {u'website': u'http://pillarpointrvparklive.com/', u'addr:housenumber': u'4000', u'name': u'Pillar Point RV Park', u'source': u'openstreetmap.org', u'addr:postcode': u'94019', u'way_area': u'11068.8', u'phone': u'(650) 712-9277', u'addr:street': u'Cabrillo Highway North', u'tourism': u'caravan_site', u'addr:country': u'US', u'addr:city': u'Half Moon Bay'})) # noqa
self.assert_has_feature(
14, 2618, 6348, 'landuse',
{'kind': 'caravan_site', 'sort_rank': 71})
self.assert_has_feature(
14, 2618, 6348, 'pois',
{'kind': 'caravan_site', 'min_zoom': 14})
def test_picnic_site_node(self):
# South Park, SF
self.generate_fixtures(dsl.way(2401887217, wkt_loads('POINT (-122.394469044978 37.7811384844766)'), {u'source': u'openstreetmap.org', u'tourism': u'picnic_site'})) # noqa
self.assert_has_feature(
16, 10486, 25329, 'pois',
{'kind': 'picnic_site', 'min_zoom': 16})
# Why is this missing?
# Golden Gate Park, SF
self.generate_fixtures(dsl.way(3297410094, wkt_loads('POINT (-122.474581879994 37.77030601950328)'), {u'source': u'openstreetmap.org', u'tourism': u'picnic_site'})) # noqa
self.assert_has_feature(
16, 10472, 25332, 'pois',
{'kind': 'picnic_site', 'min_zoom': 16})
def test_picnic_site_way(self):
# Golden Gate Park, SF
self.generate_fixtures(dsl.way(400701941, wkt_loads('POLYGON ((-122.459222036108 37.76803425324729, -122.459040127263 37.76805037286478, -122.459021801631 37.7678423088086, -122.459187181475 37.76781411714908, -122.459222036108 37.76803425324729))'), {u'source': u'openstreetmap.org', u'way_area': u'592.035', u'tourism': u'picnic_site'})) # noqa
self.assert_has_feature(
16, 10474, 25332, 'landuse',
{'kind': 'picnic_site', 'sort_rank': 122})
self.assert_has_feature(
16, 10474, 25332, 'pois',
{'kind': 'picnic_site', 'min_zoom': 16})
def test_picnic_site_building(self):
# building, South Park, SF
self.generate_fixtures(dsl.way(231863022, wkt_loads('POLYGON ((-122.394083398226 37.78154005346708, -122.393937062666 37.7816447762805, -122.393858010921 37.78158691250707, -122.394002280356 37.78146039303109, -122.394083398226 37.78154005346708))'), {u'building': u'no', u'name': u'Picnic Area', u'way_area': u'301.836', u'note': u'Five tables.', u'source': u'openstreetmap.org', u'tourism': u'picnic_site'})) # noqa
self.assert_has_feature(
16, 10486, 25329, 'pois',
{'kind': 'picnic_site', 'min_zoom': 16, 'id': 231863022})
def test_fort_node(self):
# Fort Strong
self.generate_fixtures(dsl.way(1148222790, wkt_loads('POINT (-70.95587891853059 42.32996679009089)'), {u'source': u'openstreetmap.org', u'historic': u'fort', u'wikidata': u'Q5472138', u'name': u'Fort Strong'})) # noqa
self.assert_has_feature(
16, 19850, 24247, 'pois',
{'kind': 'fort', 'min_zoom': 16})
def test_fort_way(self):
# Battery 2
self.generate_fixtures(dsl.way(265893625, wkt_loads('POLYGON ((-73.9637687104612 40.84842168440429, -73.96362614782558 40.8484769976283, -73.9636460904249 40.84850642098299, -73.96361078663419 40.84852238977281, -73.96359192201329 40.84849391775769, -73.963534070509 40.84851620611419, -73.9634516051659 40.8487882185932, -73.96357422520219 40.84897189263839, -73.9635187991491 40.8489932974607, -73.96339501130299 40.84879440222639, -73.9634823275486 40.8484804631974, -73.9637440067909 40.8483779910018, -73.9637687104612 40.84842168440429))'), {u'way_area': u'726.454', u'source': u'openstreetmap.org', u'historic': u'fort', u'name': u'Battery 2'})) # noqa
self.assert_has_feature(
16, 19303, 24607, 'landuse',
{'kind': 'fort', 'sort_rank': 53})
self.assert_has_feature(
16, 19303, 24607, 'pois',
{'kind': 'fort', 'min_zoom': (lambda z: z >= 15 and z < 16)})
# Fort Monroe
self.generate_fixtures(dsl.way(51064272, wkt_loads('POLYGON ((-76.31040871158289 37.0023446768107, -76.3100868452166 37.0031222674211, -76.30959600574539 37.00316940039099, -76.3094458074299 37.00475662375248, -76.309920477226 37.0048508876365, -76.3101002301144 37.00566051919608, -76.30907561170129 37.00578261599479, -76.30871619575611 37.00551266846238, -76.3071026418428 37.00604459976819, -76.30708539418931 37.00643592379419, -76.30622983871268 37.00692430483049, -76.30590797234639 37.00597106936429, -76.3062325336586 37.0057204915612, -76.305553946293 37.00491078890229, -76.3051382059795 37.00504579972589, -76.30416452204301 37.00443530785439, -76.30445422872218 37.0040904602791, -76.3046232018271 37.00411399059809, -76.30499330772409 37.00337715815628, -76.3048618841981 37.00331926438859, -76.30495315303089 37.0029038197136, -76.30545998251419 37.00280101635519, -76.305588800926 37.0029381113821, -76.306455136186 37.00258888086299, -76.30640150676349 37.00242825437279, -76.30683009298561 37.00217077779468, -76.30729470165049 37.00207048436938, -76.3074368151285 37.00221827005718, -76.30830584533429 37.0018541858668, -76.30824422090579 37.00169140559281, -76.3086491814359 37.00144719865776, -76.3090326722307 37.00173846770868, -76.3089548781271 37.00189271071981, -76.3097945334232 37.00231691325678, -76.30991248222 37.00218828249908, -76.31040871158289 37.0023446768107))'), {u'name': u'Fort Monroe', u'barrier': u'wall', u'way_area': u'277328', u'source': u'openstreetmap.org', u'historic': u'fort', u'wikidata': u'Q1438639', u'tourism': u'attraction'})) # noqa
self.assert_has_feature(
13, 2359, 3188, 'landuse',
{'kind': 'fort'})
self.assert_has_feature(
13, 2359, 3188, 'pois',
{'kind': 'fort', 'min_zoom': 13, 'name': 'Fort Monroe'})
| 118.098624
| 6,451
| 0.737702
| 6,270
| 51,491
| 5.983413
| 0.329346
| 0.015993
| 0.020791
| 0.031986
| 0.211003
| 0.162118
| 0.123814
| 0.098438
| 0.081085
| 0.059495
| 0
| 0.562196
| 0.124604
| 51,491
| 435
| 6,452
| 118.370115
| 0.270105
| 0.035094
| 0
| 0.342007
| 0
| 0.092937
| 0.715095
| 0.00115
| 0
| 0
| 0
| 0
| 0.223048
| 1
| 0.137546
| false
| 0
| 0.011152
| 0
| 0.152416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7163e5ebc7dcb1bfb2b86f24bb762e02088ea3d7
| 123,737
|
py
|
Python
|
cctbx/geometry_restraints/tst_ext.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
cctbx/geometry_restraints/tst_ext.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
cctbx/geometry_restraints/tst_ext.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from __future__ import division
from cctbx.array_family import flex
from cctbx import geometry_restraints
from cctbx import crystal
from cctbx import sgtbx
from cctbx import uctbx
from cctbx.crystal import direct_space_asu
from scitbx import matrix
from libtbx.test_utils import approx_equal, not_approx_equal, eps_eq, show_diff
from libtbx.utils import null_out
from cStringIO import StringIO
import math
import sys
def finite_difference_gradients(restraint_type, sites_cart, proxy, unit_cell=None, eps=1.e-8):
def residual(restraint_type, sites_cart, proxy, unit_cell):
if unit_cell is None:
return restraint_type(
sites_cart=sites_cart,
proxy=proxy).residual()
else:
return restraint_type(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=proxy).residual()
result = []
for i in xrange(len(sites_cart)):
result_i = []
for j in xrange(3):
h = [0,0,0]
h[j] = eps
h = matrix.col(h)
sites_cart[i]=matrix.col(sites_cart[i]) + h
qp = residual(restraint_type,sites_cart,proxy,unit_cell)
sites_cart[i]=matrix.col(sites_cart[i]) - 2*h
qm = residual(restraint_type,sites_cart,proxy,unit_cell)
dq = (qp-qm)/2.0
result_i.append(dq/(eps))
result.append(result_i)
return result
def exercise_bond_similarity():
# test without symmetry operations
i_seqs=((0,2),
(1,3),
(4,5))
weights=(1,2,3)
p = geometry_restraints.bond_similarity_proxy(
i_seqs=i_seqs,
weights=weights)
assert tuple(p.i_seqs) == i_seqs
assert approx_equal(p.weights, weights)
assert p.sym_ops == None
#
expected_deltas = \
(-0.033333333333333, 0.066666666666666, -0.033333333333333)
expected_rms_deltas = math.sqrt(
sum([delta * delta for delta in expected_deltas])
/len(expected_deltas))
expected_residual = sum([weights[i] * expected_deltas[i]
* expected_deltas[i]
for i in range(3)])\
/ sum([w for w in weights])
expected_gradients = (
((0,0,0.011111111111), (0,0,-0.011111111111)),
((0,-0.044444444444,0), (0,0.044444444444,0)),
((0.033333333333,0,0), (-0.033333333333,0,0)))
sites_array=[
((1,2,3),(1,2,4.5)),((2,4,6),(2,5.6,6)),((4,14,19),(5.5,14,19))]
b = geometry_restraints.bond_similarity(
sites_array=sites_array,
weights=weights)
assert approx_equal(b.sites_array, sites_array)
assert approx_equal(b.weights, weights)
assert approx_equal(b.mean_distance(), 1.533333333333333)
assert approx_equal(b.deltas(), expected_deltas)
assert approx_equal(b.rms_deltas(), expected_rms_deltas)
assert approx_equal(b.residual(), expected_residual)
assert approx_equal(b.gradients(), expected_gradients)
#
sites_cart = flex.vec3_double(
[(1,2,3),(2,4,6),(1,2,4.5),(2,5.6,6),(4,14,19),(5.5,14,19)])
b = geometry_restraints.bond_similarity(
sites_cart=sites_cart,
proxy=p)
assert approx_equal(b.sites_array, sites_array)
assert approx_equal(b.weights, weights)
assert approx_equal(b.mean_distance(), 1.533333333333333)
assert approx_equal(b.deltas(), expected_deltas)
assert approx_equal(b.rms_deltas(), expected_rms_deltas)
assert approx_equal(b.residual(), expected_residual)
assert approx_equal(b.gradients(), expected_gradients)
# test with symmetry operations
unit_mx = sgtbx.rt_mx()
i_seqs=((0,2),
(1,3),
(4,5))
sym_ops=(unit_mx,
unit_mx,
sgtbx.rt_mx('1+x,y,z'),)
weights=(1,2,3)
p = geometry_restraints.bond_similarity_proxy(
i_seqs=i_seqs,
sym_ops=sym_ops,
weights=weights)
assert tuple(p.i_seqs) == i_seqs
assert tuple(p.sym_ops) == sym_ops
assert approx_equal(p.weights, weights)
#
expected_deltas = \
(-0.033333333333333, 0.066666666666666, -0.033333333333333)
expected_rms_deltas = math.sqrt(
sum([delta * delta for delta in expected_deltas])
/len(expected_deltas))
expected_residual = sum([weights[i] * expected_deltas[i]
* expected_deltas[i]
for i in range(3)])\
/ sum([w for w in weights])
expected_gradients = (
((0,0,0.011111111111), (0,0,-0.011111111111)),
((0,-0.044444444444,0), (0,0.044444444444,0)),
((0.033333333333,0,0), (-0.033333333333,0,0)))
sites_array=[
((1,2,3),(1,2,4.5)),((2,4,6),(2,5.6,6)),((14,24,29),(15.5,24,29))]
b = geometry_restraints.bond_similarity(
sites_array=sites_array,
weights=weights)
assert approx_equal(b.sites_array, sites_array)
assert approx_equal(b.weights, weights)
assert approx_equal(b.mean_distance(), 1.533333333333333)
assert approx_equal(b.deltas(), expected_deltas)
assert approx_equal(b.rms_deltas(), expected_rms_deltas)
assert approx_equal(b.residual(), expected_residual)
assert approx_equal(b.gradients(), expected_gradients)
#
unit_cell = uctbx.unit_cell([15,25,30,90,90,90])
sites_cart = flex.vec3_double(
[(1,2,3),(2,4,6),(1,2,4.5),(2,5.6,6),(14,24,29),(0.5,24,29)])
b = geometry_restraints.bond_similarity(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
assert approx_equal(b.sites_array, sites_array)
assert approx_equal(b.weights, weights)
assert approx_equal(b.mean_distance(), 1.533333333333333)
assert approx_equal(b.deltas(), expected_deltas)
assert approx_equal(b.rms_deltas(), expected_rms_deltas)
assert approx_equal(b.residual(), expected_residual)
assert approx_equal(b.gradients(), expected_gradients)
#
proxies = geometry_restraints.shared_bond_similarity_proxy([p,p])
assert eps_eq(geometry_restraints.bond_similarity_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [expected_residual]*2)
assert eps_eq(geometry_restraints.bond_similarity_deltas_rms(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [expected_rms_deltas]*2)
residual_sum = geometry_restraints.bond_similarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert eps_eq(residual_sum, 2*expected_residual)
gradient_array = flex.vec3_double(sites_cart.size(), (0,0,0))
residual_sum = geometry_restraints.bond_similarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=gradient_array)
assert eps_eq(residual_sum, 2*expected_residual)
fd_grads = finite_difference_gradients(
restraint_type=geometry_restraints.bond_similarity,
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
for g,e in zip(gradient_array, fd_grads):
assert approx_equal(g, matrix.col(e)*2)
# check proxies with and without sym_ops are happy side-by-side
p_sym = geometry_restraints.bond_similarity_proxy(
i_seqs=i_seqs,
sym_ops=sym_ops,
weights=weights)
assert p_sym.sym_ops == sym_ops
restraint_sym = geometry_restraints.bond_similarity(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p_sym)
p_no_sym = geometry_restraints.bond_similarity_proxy(
i_seqs=i_seqs,
weights=weights)
assert p_no_sym.sym_ops == None
restraint_no_sym = geometry_restraints.bond_similarity(
sites_cart=sites_cart,
proxy=p_no_sym)
proxies = geometry_restraints.shared_bond_similarity_proxy([p_sym,p_no_sym])
assert approx_equal(geometry_restraints.bond_similarity_deltas_rms(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [restraint_sym.rms_deltas(),restraint_no_sym.rms_deltas()])
assert approx_equal(geometry_restraints.bond_similarity_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [restraint_sym.residual(),restraint_no_sym.residual()])
residual_sum = geometry_restraints.bond_similarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, restraint_sym.residual() + restraint_no_sym.residual())
def exercise_bond():
def check_bpar(p, distance_ideal=3.5, weight=1, slack=0, limit=-1,
top_out=False, origin_id=0):
assert approx_equal(p.distance_ideal, distance_ideal)
assert approx_equal(p.weight, weight)
assert approx_equal(p.slack, slack)
assert approx_equal(p.limit, limit)
assert p.top_out == top_out
assert p.origin_id == origin_id
def check_bproxy(p, i_seqs=(0,1), sym_op=None, distance_ideal=3.5,
weight=1, slack=0, limit=-1, top_out=False, origin_id=0):
assert p.i_seqs == i_seqs
if p.rt_mx_ji is not None:
assert p.rt_mx_ji.as_double_array == sym_op.as_double_array
else:
assert sym_op is None
assert approx_equal(p.distance_ideal, distance_ideal)
assert approx_equal(p.weight, weight)
assert approx_equal(p.slack, slack)
assert approx_equal(p.limit, limit)
assert p.top_out == top_out
assert p.origin_id == origin_id
p = geometry_restraints.bond_params(
distance_ideal=3.5,
weight=2,
slack=2,
limit=1,
top_out=True,
origin_id=2)
check_bpar(p, weight=2, slack=2, limit=1, top_out=True, origin_id=2)
p = geometry_restraints.bond_params(
distance_ideal=3.5,
weight=1)
check_bpar(p)
p.distance_ideal = 35
p.weight = 10
p.slack = 3
p.limit = 3
p.top_out = True
p.origin_id = 3
check_bpar(p, distance_ideal=35, weight=10, slack=3, limit=3,
top_out=True, origin_id=3)
p.distance_ideal = 3.5
p.weight = 1
p.slack = 0
p.limit = -1.0
p.top_out = False
p.origin_id = 0
check_bpar(p)
#
c = p.scale_weight(factor=2)
check_bpar(p)
check_bpar(c, weight=2)
#
t = geometry_restraints.bond_params_table()
assert t.size() == 0
d = geometry_restraints.bond_params_dict()
assert len(d) == 0
p = geometry_restraints.bond_params(distance_ideal=3, weight=2)
d[10] = p
check_bpar(d[10], distance_ideal=3, weight=2)
t.append(d)
t.append(d)
check_bpar(t[1][10], distance_ideal=3, weight=2)
t[0][13] = p
check_bpar(t[0][13], distance_ideal=3, weight=2)
t[0][13].distance_ideal = 5
check_bpar(t[0][13], distance_ideal=5, weight=2)
check_bpar(t[1][10], distance_ideal=3, weight=2)
t[1][1] = geometry_restraints.bond_params(distance_ideal=4, weight=5,
slack=2, limit=1, top_out=True, origin_id=2)
while (t.size() < 14):
t.append(geometry_restraints.bond_params_dict())
s = t.proxy_select(iselection=flex.size_t([1]))
check_bpar(s[0][0], distance_ideal=4, weight=5, slack=2, limit=1,
top_out=True, origin_id=2)
#
check_bpar(t.lookup(13, 0), distance_ideal=5, weight=2)
t.lookup(0, 13).weight = 48
check_bpar(t.lookup(0, 13), distance_ideal=5, weight=48)
t.lookup(13, 0).weight = 2
#
rest = t.proxy_remove(selection=flex.bool([True]*14))
assert [p.size() for p in rest] == [0]*14
rest = t.proxy_remove(selection=flex.bool([False]*14))
assert [p.size() for p in rest] == [2,2]+[0]*12
rest = t.proxy_remove(selection=flex.bool([False,True]+[False]*12))
assert [p.size() for p in rest] == [2,1]+[0]*12
rest = t.proxy_remove(
selection=flex.bool([True,False]+[False]*8+[True]+[False]*3))
assert [p.size() for p in rest] == [1,2]+[0]*12
rest = t.proxy_remove(
selection=flex.bool([True,True]+[False]*8+[True]+[False]*3))
assert [p.size() for p in rest] == [1]+[0]*13
#
p = geometry_restraints.bond_params(distance_ideal=2.8, weight=2,
slack=2, limit=1, top_out=True, origin_id=2)
assert t[3].size() == 0
t.update(i_seq=4, j_seq=3, params=p)
assert t[3].size() == 1
check_bpar(t[3][4], distance_ideal=2.8, weight=2, slack=2, limit=1,
top_out=True, origin_id=2)
p = geometry_restraints.bond_params(distance_ideal=3.8, weight=3)
t.update(i_seq=3, j_seq=5, params=p)
assert t[3].size() == 2
assert approx_equal(t[3][5].distance_ideal, 3.8)
p = geometry_restraints.bond_params(distance_ideal=1.8, weight=4)
t.update(i_seq=3, j_seq=4, params=p)
assert t[3].size() == 2
check_bpar(t[3][4], distance_ideal=1.8, weight=4)
#
assert geometry_restraints.bond_params_table().mean_residual(1) == 0
assert t.mean_residual(bond_stretch_factor=0) == 0
assert approx_equal(t.mean_residual(0.5), 9.26166666667)
assert approx_equal(t.mean_residual(1.0), 37.0466666667)
assert approx_equal(t.mean_residual(2.0), 148.186666667)
#
p = geometry_restraints.bond_simple_proxy(
i_seqs=[1,0], distance_ideal=3.5, weight=1)
check_bproxy(p, i_seqs=(1,0))
p = geometry_restraints.bond_simple_proxy(
i_seqs=[1,0],
distance_ideal=3.5,
weight=1,
slack=2,
limit=1,
top_out=True,
origin_id=2)
check_bproxy(p, i_seqs=(1,0), distance_ideal=3.5, weight=1, slack=2, limit=1,
top_out=True, origin_id=2)
p = p.sort_i_seqs()
check_bproxy(p, i_seqs=(0,1), distance_ideal=3.5, weight=1, slack=2, limit=1,
top_out=True, origin_id=2)
p.distance_ideal = 35
p.weight = 10
p.slack = 3
p.limit = 3
p.top_out = True
p.origin_id = 3
check_bproxy(p, i_seqs=(0,1), distance_ideal=35, weight=10, slack=3, limit=3,
top_out=True, origin_id=3)
p.distance_ideal = 3.5
p.weight = 1
p.slack = 0
p.limit = -1.0
p.top_out = False
p.origin_id = 0
check_bproxy(p, i_seqs=(0,1))
b = geometry_restraints.bond(
sites=[(1,2,3),(2,4,6)],
distance_ideal=3.5,
weight=1)
assert approx_equal(b.sites, [(1,2,3),(2,4,6)])
assert approx_equal(b.distance_ideal, 3.5)
assert approx_equal(b.weight, 1)
assert approx_equal(b.slack, 0)
assert approx_equal(b.limit, -1)
assert not b.top_out
assert approx_equal(b.origin_id, 0)
assert approx_equal(b.distance_model**2, 14)
assert approx_equal(b.delta, -0.241657386774)
assert approx_equal(b.residual(), 0.0583982925824)
assert approx_equal(b.gradients(),
((-0.12917130661302928, -0.25834261322605856, -0.38751391983908784),
( 0.12917130661302928, 0.25834261322605856, 0.38751391983908784)))
b = geometry_restraints.bond(
sites=[(1,2,3),(1,2,3)],
distance_ideal=3.5,
weight=1,
slack=2,
limit=1,
top_out=True,
origin_id=2)
assert approx_equal(b.distance_model, 0)
assert approx_equal(b.weight, 1)
assert approx_equal(b.slack, 2)
assert approx_equal(b.limit, 1)
assert b.top_out
assert approx_equal(b.origin_id, 2)
assert approx_equal(b.gradients(), [(0,0,0), (0,0,0)])
sites_cart = flex.vec3_double([(1,2,3),(2,4,6)])
b = geometry_restraints.bond(
sites_cart=sites_cart,
proxy=p)
assert approx_equal(b.sites, [(1,2,3),(2,4,6)])
assert approx_equal(b.distance_ideal, 3.5)
assert approx_equal(b.weight, 1)
assert approx_equal(p.slack, 0)
assert approx_equal(p.limit, -1)
assert not p.top_out
assert approx_equal(p.origin_id, 0)
assert approx_equal(b.distance_model**2, 14)
proxies = geometry_restraints.shared_bond_simple_proxy([p,p])
for proxy in proxies:
assert approx_equal(proxy.weight, 1)
proxy.weight = 12
assert approx_equal(proxy.slack, 0)
proxy.slack = 3
assert approx_equal(proxy.limit, -1)
proxy.limit = 3
assert not proxy.top_out
proxy.top_out = True
assert approx_equal(proxy.origin_id, 0)
proxy.origin_id = 3
for proxy in proxies:
assert approx_equal(proxy.weight, 12)
proxy.weight = 1
assert approx_equal(proxy.slack, 3)
proxy.slack = 0
assert approx_equal(proxy.limit, 3)
proxy.limit = -1.0
assert proxy.top_out
proxy.top_out = False
assert approx_equal(proxy.origin_id, 3)
proxy.origin_id = 0
tab = geometry_restraints.extract_bond_params(
n_seq=2, bond_simple_proxies=proxies)
assert tab[0].keys() == [1]
assert approx_equal(tab[0].values()[0].distance_ideal, 3.5)
assert approx_equal(tab[0].values()[0].weight, 1)
assert tab[1].keys() == []
assert approx_equal(geometry_restraints.bond_distances_model(
sites_cart=sites_cart,
proxies=proxies), [14**0.5]*2)
assert approx_equal(geometry_restraints.bond_deltas(
sites_cart=sites_cart,
proxies=proxies), [-0.241657386774]*2)
assert approx_equal(geometry_restraints.bond_residuals(
sites_cart=sites_cart,
proxies=proxies), [0.0583982925824]*2)
residual_sum = geometry_restraints.bond_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, 2*0.0583982925824)
#
for sym_op in (None, sgtbx.rt_mx("-y,z,x")):
if sym_op is None:
p = geometry_restraints.bond_simple_proxy(
i_seqs=[1,0],
distance_ideal=3.5,
weight=1)
check_bproxy(p, i_seqs=(1,0), sym_op=sym_op)
else:
for proxy_t in (geometry_restraints.bond_simple_proxy,
geometry_restraints.bond_sym_proxy):
p = proxy_t(
i_seqs=[1,0],
rt_mx_ji=sgtbx.rt_mx("-y,z,x"),
distance_ideal=3.5,
weight=1, slack=2, limit=1, top_out=True, origin_id=2)
check_bproxy(p, i_seqs=(1,0), sym_op=sym_op, slack=2, limit=1,
top_out=True, origin_id=2)
unit_cell = uctbx.unit_cell([15,25,30,90,90,90])
sites_cart = flex.vec3_double([[1,2,3],[2,3,4]])
b = geometry_restraints.bond(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
assert approx_equal(b.sites, [(2,3,4),(-1.2,2.5,2)])
assert approx_equal(b.distance_ideal, 3.5)
assert approx_equal(b.weight, 1)
assert approx_equal(b.distance_model**2, 14.49)
assert approx_equal(p.slack, 2)
assert approx_equal(p.limit, 1)
assert p.top_out
assert approx_equal(p.origin_id, 2)
#
sites_cart = flex.vec3_double([[1,2,3],[2,3,4]])
asu_mappings = direct_space_asu.non_crystallographic_asu_mappings(
sites_cart=sites_cart)
pair_generator = crystal.neighbors_fast_pair_generator(
asu_mappings=asu_mappings,
distance_cutoff=5)
p = geometry_restraints.bond_asu_proxy(
pair=pair_generator.next(),
distance_ideal=2,
weight=10, slack=2, limit=1, top_out=True, origin_id=2)
p = geometry_restraints.bond_asu_proxy(pair=p, params=p)
assert pair_generator.at_end()
assert p.i_seq == 0
assert p.j_seq == 1
assert p.j_sym == 0
assert approx_equal(p.distance_ideal, 2)
assert approx_equal(p.weight, 10)
assert approx_equal(p.slack, 2)
assert approx_equal(p.limit, 1)
assert p.top_out
assert approx_equal(p.origin_id, 2)
p.distance_ideal = 35
assert approx_equal(p.distance_ideal, 35)
p.distance_ideal = 2
assert approx_equal(p.distance_ideal, 2)
p.weight = 1
assert approx_equal(p.weight, 1)
p.weight = 10
assert approx_equal(p.weight, 10)
p.slack = 3
assert approx_equal(p.slack, 3)
p.slack = 0
assert approx_equal(p.slack, 0)
p.limit = 3
assert approx_equal(p.limit, 3)
p.limit = -1.0
assert approx_equal(p.limit, -1)
p.top_out = True
assert p.top_out
p.top_out = False
assert not p.top_out
p.origin_id = 3
assert approx_equal(p.origin_id, 3)
p.origin_id = 0
assert approx_equal(p.origin_id, 0)
assert p.as_simple_proxy().i_seqs == (0,1)
assert approx_equal(p.as_simple_proxy().distance_ideal, 2)
assert approx_equal(p.as_simple_proxy().weight, 10)
sym_proxies = geometry_restraints.shared_bond_asu_proxy([p,p])
for proxy in sym_proxies:
assert approx_equal(proxy.distance_ideal, 2)
proxy.distance_ideal = -4
for proxy in sym_proxies:
assert approx_equal(proxy.distance_ideal, -4)
proxy.distance_ideal = 2
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
asu_mappings=asu_mappings)
sorted_asu_proxies.process(proxies=sym_proxies)
assert approx_equal(
geometry_restraints.bond_distances_model(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies),
[3**.5]*2)
assert approx_equal(
geometry_restraints.bond_deltas(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies),
[2-3**.5]*2)
assert approx_equal(
geometry_restraints.bond_residuals(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies),
[10*(2-3**.5)**2]*2)
assert approx_equal(
geometry_restraints.bond_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=None),
(10*(2-3**.5)**2)*2)
gradient_array = flex.vec3_double(2, [0,0,0])
assert approx_equal(
geometry_restraints.bond_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=gradient_array),
(10*(2-3**.5)**2)*2)
assert approx_equal(gradient_array,
[[ 6.1880215351700611]*3,
[-6.1880215351700611]*3])
for disable_cache in [False, True]:
gradient_array = flex.vec3_double(2, [0,0,0])
assert approx_equal(
geometry_restraints.bond_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=gradient_array,
disable_cache=disable_cache),
(10*(2-3**.5)**2)*2)
assert approx_equal(gradient_array,
[[ 6.1880215351700611]*3,
[-6.1880215351700611]*3])
#
for p in geometry_restraints.shared_bond_simple_proxy(size=2):
assert p.distance_ideal == 0
assert p.weight == 0
assert p.slack == 0
assert p.i_seqs == (0,0)
#
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
asu_mappings=asu_mappings)
sorted_asu_proxies.push_back(proxy=sym_proxies[0])
assert sorted_asu_proxies.simple.size() == 0
assert sorted_asu_proxies.asu.size() == 1
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
asu_mappings=asu_mappings)
sorted_asu_proxies.process(proxy=proxies[0])
sorted_asu_proxies.process(proxy=sym_proxies[0])
assert sorted_asu_proxies.simple.size() == 2
assert sorted_asu_proxies.asu.size() == 0
assert sorted_asu_proxies.n_total() == 2
residual_0 = geometry_restraints.bond(
sites_cart=sites_cart,
proxy=proxies[0]).residual()
residual_1 = geometry_restraints.bond(
sites_cart=sites_cart,
asu_mappings=asu_mappings,
proxy=sym_proxies[0]).residual()
assert approx_equal(residual_1, 10*(2-3**.5)**2)
gradient_array = flex.vec3_double(2, [0,0,0])
assert approx_equal(geometry_restraints.bond_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=gradient_array), residual_0+residual_1)
assert approx_equal(gradient_array,
[(5.1354626519124107, 5.1354626519124107, 5.1354626519124107),
(-5.1354626519124107, -5.1354626519124107, -5.1354626519124107)])
sorted_asu_proxies.process(sorted_asu_proxies.simple.deep_copy())
assert sorted_asu_proxies.simple.size() == 4
sorted_asu_proxies.process(sorted_asu_proxies.asu.deep_copy())
assert sorted_asu_proxies.asu.size() == 0
sorted_asu_proxies.push_back(sorted_asu_proxies.asu.deep_copy())
assert sorted_asu_proxies.asu.size() == 0
#
pair_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings)
assert pair_asu_table.table()[0].keys() == []
geometry_restraints.add_pairs(
pair_asu_table=pair_asu_table,
bond_simple_proxies=proxies)
assert pair_asu_table.table()[0].keys() == [1]
#
pair_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings)
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
pair_asu_table=pair_asu_table)
assert sorted_asu_proxies.simple.size() == 0
assert sorted_asu_proxies.asu.size() == 0
pair_asu_table.add_all_pairs(distance_cutoff=2)
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
pair_asu_table=pair_asu_table)
assert sorted_asu_proxies.simple.size() == 1
assert sorted_asu_proxies.asu.size() == 0
#
def sign(x):
if (x < 0): return -1
return 1
mt = flex.mersenne_twister(seed=0)
for slack in [0, 1/3., 2/3., 1]:
for ish in xrange(9):
sh = ish / 2.
site1 = matrix.col((1,2,3)) \
+ sh * matrix.col(mt.random_double_point_on_sphere())
b = geometry_restraints.bond(
sites=[(1,2,3),site1],
distance_ideal=2,
weight=1,
slack=slack)
assert approx_equal(b.distance_model, sh)
assert approx_equal(
b.delta_slack,
sign(b.delta) * max(0, (abs(b.delta) - b.slack)))
#
for i in xrange(3):
rs = []
eps = 1.e-6
for signed_eps in [eps, -eps]:
site0 = [1,2,3]
site0[i] += signed_eps
be = geometry_restraints.bond(
sites=[site0,site1],
distance_ideal=2,
weight=1,
slack=slack)
rs.append(be.residual())
g_fin = (rs[0]-rs[1])/(2*eps)
g_ana = b.gradients()[0][i]
assert approx_equal(g_ana, g_fin)
#
sites_cart = flex.vec3_double([(1,2,3),(2,3,4)])
gradients_inp = flex.vec3_double([(10,20,30),(20,30,40)])
site_symmetry_table_indices = flex.size_t([0,1])
home_sites_cart = flex.vec3_double([(1.1,1.9,3.05),(2.1,3.2,4.3)])
iselection = flex.size_t([0,1])
def get(weight, slack, no_grads=False, no_site_sym=False):
gradients = None
if (not no_grads):
gradients = gradients_inp.deep_copy()
site_sym = None
if (not no_site_sym):
site_sym = site_symmetry_table_indices
residual_sum = geometry_restraints \
.home_restraints_summation_skip_special_positions(
sites_cart=sites_cart,
gradients=gradients,
site_symmetry_table_indices=site_sym,
home_sites_cart=home_sites_cart,
iselection=iselection,
weight=weight,
slack=slack)
if (not no_grads):
return [residual_sum, gradients]
return residual_sum
assert approx_equal(get(1, 0),
[0.0225, [(9.8,20.2,29.9), (20,30,40)]])
assert approx_equal(get(1, 0, True), 0.0225)
assert approx_equal(get(1, 0, True, True), 0.1625)
assert approx_equal(get(1, 0, False, True),
[0.1625, [(9.8,20.2,29.9), (19.8,29.6,39.4)]])
assert approx_equal(get(3, 0),
[0.0675, [(9.4,20.6,29.7), (20,30,40)]])
assert approx_equal(get(1, 3),
[0.0, [(10,20,30), (20,30,40)]])
site_symmetry_table_indices = flex.size_t([1,0])
assert approx_equal(get(1, 0),
[0.14, [(10,20,30), (19.8,29.6,39.4)]])
site_symmetry_table_indices = flex.size_t([0,0])
iselection = flex.size_t([1])
assert approx_equal(get(1, 0),
[0.14, [(10,20,30), (19.8,29.6,39.4)]])
iselection = flex.size_t([])
assert approx_equal(get(1, 0),
[0.0, [(10,20,30), (20,30,40)]])
iselection = flex.size_t([0,1])
# top_out potential
sites = [[0,0,0],[1.5,0,0]]
proxy1 = geometry_restraints.bond_simple_proxy(
i_seqs=[0,1],
distance_ideal=1.5,
weight=400)
proxy2 = geometry_restraints.bond_simple_proxy(
i_seqs=[0,1],
distance_ideal=1.5,
weight=400,
limit=0.6,
top_out=True)
for i in range(200):
sites[1][0] = 1.5 + 0.01 * (i - 100)
sites_cart = flex.vec3_double(sites) # XXX why isn't this automatic?
bond1 = geometry_restraints.bond(sites_cart, proxy1)
bond2 = geometry_restraints.bond(sites_cart, proxy2)
res1 = bond1.residual()
res2 = bond2.residual()
if (i <= 100):
assert (res1 == res2)
else :
assert (res2 < res1)
grads_fd = finite_difference_gradients(geometry_restraints.bond,
sites_cart, proxy2)
grads_an = bond2.gradients()
assert approx_equal(grads_fd, grads_an, eps=0.0001)
class py_nonbonded_cos(object): # prototype
def __init__(self, max_residual, exponent=1):
self.max_residual = max_residual
self.exponent = exponent
def residual_and_gradients(self, proxy, sites_cart, gradient_array=None):
vdw_distance = proxy.vdw_distance
i, j = proxy.i_seqs
diff_vec = matrix.col(sites_cart[i]) - matrix.col(sites_cart[j])
d = abs(diff_vec)
if (d >= vdw_distance): return 0
x = d / vdw_distance
m = self.max_residual
e = self.exponent
pi = math.pi
pix = pi * x
cpixpo = math.cos(pix) + 1
result = m * (cpixpo/2)**e
if (gradient_array is not None and d != 0):
"""
r=m ((Cos[Pi x]+1)/2)^e
g=D[r,x]
"""
drdx = -(e*m*pi*cpixpo**(e-1)*math.sin(pix)) / 2**e
drdd = drdx / vdw_distance
gradient_0 = diff_vec * drdd / d
gradient_array[i] = matrix.col(gradient_array[i]) + gradient_0
gradient_array[j] = matrix.col(gradient_array[j]) - gradient_0
return result
def nb_cos_finite_difference_gradients(nbf, proxy, sites_cart, eps=1.e-6):
result = []
for i in proxy.i_seqs:
sc = list(sites_cart[i])
for c in xrange(3):
scc0 = sc[c]
rs = []
for signed_eps in [eps, -eps]:
sc[c] = scc0 + signed_eps
sites_cart[i] = sc
rs.append(nbf.residual_and_gradients(
proxy=proxy,
sites_cart=sites_cart))
sc[c] = scc0
result.append((rs[0]-rs[1])/(2*eps))
sites_cart[i] = sc
return flex.vec3_double(flex.double(result))
def exercise_nonbonded_cos(verbose=0):
if (verbose): log = sys.stdout
else: log = null_out()
for exponent in [1, 2, 1.5]:
nbf = py_nonbonded_cos(max_residual=13, exponent=exponent)
sites_cart = flex.vec3_double([(1,2,3), (0.3,2.4,3.5)])
proxy = geometry_restraints.nonbonded_simple_proxy(
i_seqs=(0,1), vdw_distance=2)
gradient_array = flex.vec3_double(2, (0,0,0))
r = nbf.residual_and_gradients(
proxy=proxy,
sites_cart=sites_cart,
gradient_array=gradient_array)
fd_grads = nb_cos_finite_difference_gradients(
nbf=nbf, proxy=proxy, sites_cart=sites_cart)
print >> log, list(gradient_array)
print >> log, list(fd_grads)
assert approx_equal(gradient_array, fd_grads)
nc = geometry_restraints.nonbonded_cos(
sites=list(sites_cart),
vdw_distance=proxy.vdw_distance,
function=geometry_restraints.cos_repulsion_function(
max_residual=nbf.max_residual, exponent=nbf.exponent))
assert approx_equal(nc.residual(), r)
print >> log, nc.gradients()
assert approx_equal(nc.gradients(), gradient_array)
print >> log
sc0 = matrix.col(sites_cart[0])
v01 = matrix.col(sites_cart[1]) - sc0
v01 *= 1/abs(v01)
for i in xrange(21,-1,-1):
for eps in [0, 1.e-3, -1.e-3]:
d = i/10 + eps
sites_cart[1] = sc0 + d * v01
gradient_array = flex.vec3_double(2, (0,0,0))
r = nbf.residual_and_gradients(
proxy=proxy,
sites_cart=sites_cart,
gradient_array=gradient_array)
print >> log, "d, r:", d, r
if (d == 2):
assert abs(r) <= 1.e-8
else:
fd_grads = nb_cos_finite_difference_gradients(
nbf=nbf, proxy=proxy, sites_cart=sites_cart)
print >> log, list(gradient_array)
print >> log, list(fd_grads)
assert approx_equal(gradient_array, fd_grads)
nc = geometry_restraints.nonbonded_cos(
sites=list(sites_cart),
vdw_distance=proxy.vdw_distance,
function=geometry_restraints.cos_repulsion_function(
max_residual=nbf.max_residual, exponent=nbf.exponent))
assert approx_equal(nc.residual(), r)
print >> log, nc.gradients()
assert approx_equal(nc.gradients(), gradient_array)
print >> log
def exercise_nonbonded():
params = geometry_restraints.nonbonded_params()
assert params.distance_table.size() == 0
assert params.radius_table.size() == 0
assert approx_equal(params.factor_1_4_interactions, 2/3.)
assert approx_equal(params.const_shrink_1_4_interactions, 0)
assert approx_equal(params.default_distance, 0)
assert approx_equal(params.minimum_distance, 0)
params = geometry_restraints.nonbonded_params(
factor_1_4_interactions=.5,
const_shrink_1_4_interactions=.1,
default_distance=1,
minimum_distance=2)
assert approx_equal(params.factor_1_4_interactions, .5)
assert approx_equal(params.const_shrink_1_4_interactions, .1)
assert approx_equal(params.default_distance, 1)
assert approx_equal(params.minimum_distance, 2)
params.factor_1_4_interactions = .4
assert approx_equal(params.factor_1_4_interactions, .4)
params.const_shrink_1_4_interactions = .2
assert approx_equal(params.const_shrink_1_4_interactions, .2)
params.default_distance = .3
assert approx_equal(params.default_distance, .3)
params.minimum_distance = .6
assert approx_equal(params.minimum_distance, .6)
#
p = geometry_restraints.nonbonded_simple_proxy(
i_seqs=[0,1],
vdw_distance=5)
assert p.i_seqs == (0,1)
assert p.rt_mx_ji is None
assert approx_equal(p.vdw_distance, 5)
ps = geometry_restraints.nonbonded_simple_proxy(
i_seqs=[2,3],
rt_mx_ji=sgtbx.rt_mx("y,y-x,0.5+Z"),
vdw_distance=4)
assert ps.i_seqs == (2,3)
assert str(ps.rt_mx_ji) == "y,-x+y,z+1/2"
assert approx_equal(ps.vdw_distance, 4)
r = geometry_restraints.nonbonded_prolsq(
sites=[(1,2,3),(2,4,6)],
vdw_distance=5,
function=geometry_restraints.prolsq_repulsion_function())
assert approx_equal(r.sites, [(1,2,3),(2,4,6)])
assert approx_equal(r.vdw_distance, 5)
assert approx_equal(r.function.c_rep, 16)
assert approx_equal(r.delta, 3.74165738677)
assert approx_equal(r.residual(), 40.1158130612)
assert approx_equal(r.gradients(),
[(34.081026602378813, 68.162053204757626, 102.24307980713644),
(-34.081026602378813, -68.162053204757626, -102.24307980713644)])
sites_cart = flex.vec3_double([(1,2,3),(2,4,6)])
r = geometry_restraints.nonbonded_prolsq(
sites_cart=sites_cart,
proxy=p,
function=geometry_restraints.prolsq_repulsion_function())
assert approx_equal(r.sites, [(1,2,3),(2,4,6)])
assert approx_equal(r.vdw_distance, 5)
assert approx_equal(r.function.c_rep, 16)
assert approx_equal(r.delta, 3.74165738677)
proxies = geometry_restraints.shared_nonbonded_simple_proxy([p,p])
for proxy in proxies:
assert approx_equal(proxy.vdw_distance, 5)
assert approx_equal(geometry_restraints.nonbonded_deltas(
sites_cart=sites_cart,
proxies=proxies),
[3.74165738677]*2)
assert approx_equal(geometry_restraints.nonbonded_residuals(
sites_cart=sites_cart,
proxies=proxies,
function=geometry_restraints.prolsq_repulsion_function()),
[40.1158130612]*2)
assert approx_equal(geometry_restraints.nonbonded_residuals(
sites_cart=sites_cart,
proxies=proxies,
function=geometry_restraints.inverse_power_repulsion_function(10)),
[1.3363062095621219]*2)
assert approx_equal(geometry_restraints.nonbonded_residuals(
sites_cart=sites_cart,
proxies=proxies,
function=geometry_restraints.cos_repulsion_function(max_residual=13)),
[1.9279613709216095]*2)
assert approx_equal(geometry_restraints.nonbonded_residuals(
sites_cart=sites_cart,
proxies=proxies,
function=geometry_restraints.gaussian_repulsion_function(
max_residual=12, norm_height_at_vdw_distance=0.2)),
[4.8725695136639997]*2)
residual_sum = geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None,
function=geometry_restraints.prolsq_repulsion_function())
assert approx_equal(residual_sum, 2*40.1158130612)
residual_sum = geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None,
function=geometry_restraints.inverse_power_repulsion_function(10))
assert approx_equal(residual_sum, 2*1.3363062095621219)
residual_sum = geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None,
function=geometry_restraints.cos_repulsion_function(max_residual=13))
assert approx_equal(residual_sum, 2*1.9279613709216095)
residual_sum = geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None,
function=geometry_restraints.gaussian_repulsion_function(
max_residual=12, norm_height_at_vdw_distance=0.2))
assert approx_equal(residual_sum, 2*4.8725695136639997)
#
sites_cart = flex.vec3_double([[1,2,3],[2,3,4]])
asu_mappings = direct_space_asu.non_crystallographic_asu_mappings(
sites_cart=sites_cart)
pair_generator = crystal.neighbors_fast_pair_generator(
asu_mappings=asu_mappings,
distance_cutoff=5)
p = geometry_restraints.nonbonded_asu_proxy(
pair=pair_generator.next(),
vdw_distance=2)
assert pair_generator.at_end()
assert p.i_seq == 0
assert p.j_seq == 1
assert p.j_sym == 0
assert approx_equal(p.vdw_distance, 2)
p.vdw_distance = 3
assert approx_equal(p.vdw_distance, 3)
p.vdw_distance = 2
sym_proxies = geometry_restraints.shared_nonbonded_asu_proxy([p,p])
for proxy in sym_proxies:
assert approx_equal(proxy.vdw_distance, 2)
proxy.vdw_distance = 3
for proxy in sym_proxies:
assert approx_equal(proxy.vdw_distance, 3)
proxy.vdw_distance = 2
f = geometry_restraints.prolsq_repulsion_function(
c_rep=1, k_rep=4, irexp=2, rexp=3)
assert approx_equal(f.c_rep, 1)
assert approx_equal(f.k_rep, 4)
assert approx_equal(f.irexp, 2)
assert approx_equal(f.rexp, 3)
assert approx_equal(f.residual(vdw_distance=3, delta=2.9), 2492774.43588)
assert approx_equal(f.residual(vdw_distance=3, delta=3), 2460375.0)
r = geometry_restraints.nonbonded_prolsq(
sites=list(sites_cart),
vdw_distance=p.vdw_distance,
function=f)
assert approx_equal(r.function.c_rep, 1)
assert approx_equal(r.diff_vec, [-1,-1,-1])
assert approx_equal(r.delta**2, 3)
assert approx_equal(r.residual(), 226981)
assert approx_equal(r.gradients(),
[(22326.0, 22326.0, 22326.0), (-22326.0, -22326.0, -22326.0)])
f = geometry_restraints.prolsq_repulsion_function()
assert approx_equal(f.residual(vdw_distance=3, delta=2.9), 0.0016)
assert approx_equal(f.residual(vdw_distance=3, delta=3), 0)
r = geometry_restraints.nonbonded_prolsq(
sites=list(sites_cart),
vdw_distance=p.vdw_distance,
function=f)
assert approx_equal(r.function.c_rep, 16)
assert approx_equal(r.function.k_rep, 1)
assert approx_equal(r.function.irexp, 1)
assert approx_equal(r.function.rexp, 4)
assert approx_equal(r.diff_vec, [-1,-1,-1])
assert approx_equal(r.delta**2, 3)
assert approx_equal(r.residual(), 0.0824764182859)
assert approx_equal(r.gradients(),
[(0.71084793153727288, 0.71084793153727288, 0.71084793153727288),
(-0.71084793153727288, -0.71084793153727288, -0.71084793153727288)])
#
assert approx_equal(f.residual(vdw_distance=3, delta=2.9), 0.0016)
assert approx_equal(f.residual(vdw_distance=3, delta=3), 0)
for irexp in [1,2,3,4,5]:
f = geometry_restraints.inverse_power_repulsion_function(
nonbonded_distance_cutoff=100,
k_rep=4,
irexp=irexp)
assert approx_equal(f.k_rep, 4)
assert approx_equal(f.irexp, irexp)
if (irexp == 1):
assert approx_equal(f.residual(vdw_distance=3, delta=2.9), 4.13793103448)
assert approx_equal(f.residual(vdw_distance=3, delta=3), 4)
r = geometry_restraints.nonbonded_inverse_power(
sites=list(sites_cart),
vdw_distance=p.vdw_distance,
function=f)
assert approx_equal(r.diff_vec, [-1,-1,-1])
assert approx_equal(r.delta**2, 3)
assert approx_equal(r.residual(), f.k_rep*p.vdw_distance/r.delta**f.irexp)
g = -f.irexp*f.k_rep*p.vdw_distance/(r.delta**(f.irexp+1))/r.delta
assert approx_equal(r.gradients(), [(-g,-g,-g),(g,g,g)])
for nonbonded_distance_cutoff in [1,2]:
f = geometry_restraints.inverse_power_repulsion_function(
nonbonded_distance_cutoff=nonbonded_distance_cutoff,
k_rep=4,
irexp=2)
r = geometry_restraints.nonbonded_inverse_power(
sites=list(sites_cart),
vdw_distance=p.vdw_distance,
function=f)
if (nonbonded_distance_cutoff == 1):
assert approx_equal(r.residual(), 0)
assert approx_equal(r.gradients(), [(0,0,0),(0,0,0)])
else:
assert not_approx_equal(r.residual(), 0)
assert not_approx_equal(r.gradients(), [(0,0,0),(0,0,0)])
#
for exponent in [1,2,3]:
f = geometry_restraints.cos_repulsion_function(
max_residual=13,
exponent=exponent)
assert approx_equal(f.max_residual, 13)
assert approx_equal(f.exponent, exponent)
if (exponent == 1):
assert approx_equal(
f.residual(vdw_distance=3, delta=2.9), 0.0356076801062)
assert approx_equal(f.residual(vdw_distance=3, delta=3), 0)
r = geometry_restraints.nonbonded_cos(
sites=list(sites_cart),
vdw_distance=p.vdw_distance,
function=f)
assert approx_equal(r.diff_vec, [-1,-1,-1])
assert approx_equal(r.delta**2, 3)
pynbc = py_nonbonded_cos(max_residual=f.max_residual, exponent=f.exponent)
gradient_array = flex.vec3_double(sites_cart.size(), (0,0,0))
pr = pynbc.residual_and_gradients(
proxy=geometry_restraints.nonbonded_simple_proxy(
i_seqs=(0,1), vdw_distance=r.vdw_distance),
sites_cart=sites_cart,
gradient_array=gradient_array)
assert approx_equal(r.residual(), pr)
assert approx_equal(r.gradients(), gradient_array)
#
expected_residuals = iter([
1.39552236695,
2.66705891104,
3.89565157972])
expected_gradients = iter([
2.45678379644,
2.88800522497,
2.92825483126])
for norm_height_at_vdw_distance in [0.1, 0.2, 0.3]:
f = geometry_restraints.gaussian_repulsion_function(
max_residual=12,
norm_height_at_vdw_distance=norm_height_at_vdw_distance)
assert approx_equal(f.max_residual, 12)
assert approx_equal(
f.norm_height_at_vdw_distance(), norm_height_at_vdw_distance)
assert approx_equal(
f.residual(vdw_distance=3, delta=3), 12*norm_height_at_vdw_distance)
assert approx_equal(
f.residual(vdw_distance=3, delta=2.9), expected_residuals.next())
r = geometry_restraints.nonbonded_gaussian(
sites=list(sites_cart),
vdw_distance=p.vdw_distance,
function=f)
assert approx_equal(r.diff_vec, [-1,-1,-1])
assert approx_equal(r.delta**2, 3)
e = expected_gradients.next()
assert approx_equal(r.gradients(), [[e]*3,[-e]*3])
#
sorted_asu_proxies = geometry_restraints.nonbonded_sorted_asu_proxies(
asu_mappings=asu_mappings)
sorted_asu_proxies.process(proxies=sym_proxies)
assert approx_equal(
flex.pow2(geometry_restraints.nonbonded_deltas(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies)),
[3]*2)
assert approx_equal(
geometry_restraints.nonbonded_residuals(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
function=geometry_restraints.prolsq_repulsion_function()),
[0.0824764182859]*2)
assert approx_equal(
geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=None,
function=geometry_restraints.prolsq_repulsion_function()),
0.0824764182859*2)
for disable_cache in [False, True]:
gradient_array = flex.vec3_double(2, [0,0,0])
assert approx_equal(
geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=gradient_array,
function=geometry_restraints.prolsq_repulsion_function(),
disable_cache=disable_cache),
0.0824764182859*2)
assert approx_equal(gradient_array,
[(1.4216958630745458, 1.4216958630745458, 1.4216958630745458),
(-1.4216958630745458, -1.4216958630745458, -1.4216958630745458)])
#
sorted_asu_proxies = geometry_restraints.nonbonded_sorted_asu_proxies(
asu_mappings=asu_mappings)
sorted_asu_proxies.process(proxy=proxies[0])
sorted_asu_proxies.process(proxy=sym_proxies[0], sym_excl_flag=False)
assert sorted_asu_proxies.simple.size() == 2
assert sorted_asu_proxies.asu.size() == 0
assert sorted_asu_proxies.n_total() == 2
residual_0 = geometry_restraints.nonbonded_prolsq(
sites_cart=sites_cart,
proxy=proxies[0],
function=geometry_restraints.prolsq_repulsion_function()).residual()
residual_1 = geometry_restraints.nonbonded_prolsq(
sites_cart=sites_cart,
asu_mappings=asu_mappings,
proxy=sym_proxies[0],
function=geometry_restraints.prolsq_repulsion_function()).residual()
gradient_array = flex.vec3_double(2, [0,0,0])
assert approx_equal(geometry_restraints.nonbonded_residual_sum(
sites_cart=sites_cart,
sorted_asu_proxies=sorted_asu_proxies,
gradient_array=gradient_array,
function=geometry_restraints.prolsq_repulsion_function()),
residual_0+residual_1)
assert approx_equal(gradient_array,
[(1290.2817767146657, 1290.2817767146657, 1290.2817767146657),
(-1290.2817767146657, -1290.2817767146657, -1290.2817767146657)])
sorted_asu_proxies.process(sorted_asu_proxies.simple.deep_copy())
assert sorted_asu_proxies.simple.size() == 4
sorted_asu_proxies.process(sorted_asu_proxies.asu.deep_copy())
assert sorted_asu_proxies.asu.size() == 0
#
f = geometry_restraints.prolsq_repulsion_function()
assert approx_equal((f.c_rep, f.k_rep, f.irexp, f.rexp), (16,1,1,4))
c = f.customized_copy()
assert approx_equal((c.c_rep, c.k_rep, c.irexp, c.rexp), (16,1,1,4))
c = f.customized_copy(c_rep=8, k_rep=2, irexp=3, rexp=5)
assert approx_equal((c.c_rep, c.k_rep, c.irexp, c.rexp), (8,2,3,5))
def exercise_angle():
# test without symmetry operations
def check(p, i_seqs=(2,1,0), sym_ops=None, angle_ideal=95,
weight=1, slack=0.0, origin_id=0):
assert p.i_seqs == i_seqs
if p.sym_ops is not None:
for i in range(len(sym_ops)):
assert p.sym_ops[i].as_double_array == sym_ops[i].as_double_array
else:
assert sym_ops is None # while p.sym_ops IS None
assert approx_equal(p.angle_ideal, angle_ideal)
assert approx_equal(p.weight, weight)
assert p.slack == slack
assert p.origin_id == origin_id
p = geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
angle_ideal=95,
weight=1)
check(p)
p.angle_ideal = 86
p.weight = 78
p.slack = 12
p.origin_id = 3
check(p, angle_ideal=86, weight=78, slack=12, origin_id=3)
p.angle_ideal = 95
p.weight = 1
p.slack = 0
p.origin_id = 0
check(p)
p = geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
sym_ops=None,
angle_ideal=95,
weight=1)
check(p)
p = p.sort_i_seqs()
check(p, i_seqs=(0,1,2))
c = geometry_restraints.angle_proxy(
i_seqs=[3,4,5],
proxy=p)
check(c, i_seqs=(3,4,5))
c = p.scale_weight(factor=3.14)
check(c, i_seqs=(0,1,2), weight=3.14)
a = geometry_restraints.angle(
sites=[(1,0,0),(0,0,0),(0,1,0)],
angle_ideal=95,
weight=1)
assert approx_equal(a.sites, [(1,0,0),(0,0,0),(0,1,0)])
assert approx_equal(a.angle_ideal, 95)
assert approx_equal(a.weight, 1)
assert a.origin_id == 0
assert a.have_angle_model
assert approx_equal(a.angle_model, 90)
assert approx_equal(a.delta, 5)
assert approx_equal(a.residual(), 25)
assert approx_equal(a.gradients(epsilon=1.e-100),
((0.0, 572.95779513082323, 0.0),
(-572.95779513082323, -572.95779513082323, 0.0),
(572.95779513082323, 0.0, 0.0)))
assert a.slack == 0.0
sites_cart = flex.vec3_double([(1,0,0),(0,0,0),(0,1,0)])
p.origin_id = 3
a = geometry_restraints.angle(
sites_cart=sites_cart,
proxy=p)
assert approx_equal(a.sites, [(1,0,0),(0,0,0),(0,1,0)])
assert approx_equal(a.angle_ideal, 95)
assert approx_equal(a.weight, 1)
assert a.origin_id == 3
assert a.have_angle_model
assert approx_equal(a.angle_model, 90)
proxies = geometry_restraints.shared_angle_proxy([p,p])
for proxy in proxies:
assert approx_equal(proxy.weight, 1)
assert approx_equal(geometry_restraints.angle_deltas(
sites_cart=sites_cart,
proxies=proxies), [5]*2)
assert approx_equal(geometry_restraints.angle_residuals(
sites_cart=sites_cart,
proxies=proxies), [25]*2)
residual_sum = geometry_restraints.angle_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, 2*25)
# test with symmetry operations
sym_ops = (sgtbx.rt_mx('-1+x,+y,+z'),sgtbx.rt_mx(),sgtbx.rt_mx())
p = geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
sym_ops=sym_ops,
angle_ideal=95,
weight=1)
check(p, sym_ops=sym_ops)
c = geometry_restraints.angle_proxy(
i_seqs=[3,4,5],
proxy=p)
check(c, i_seqs=(3,4,5), sym_ops=sym_ops)
c = p.scale_weight(factor=5.82)
check(c, sym_ops=sym_ops, weight=5.82)
p = p.sort_i_seqs()
check(p,
i_seqs=(0,1,2),
sym_ops=(sgtbx.rt_mx(),sgtbx.rt_mx(),sgtbx.rt_mx('-1+x,+y,+z')))
unit_cell = uctbx.unit_cell([15,25,30,90,90,90])
sites_cart = flex.vec3_double([(1,0,0),(0,1,0),(14,0,0)])
p.origin_id = 3
a = geometry_restraints.angle(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
assert approx_equal(a.sites, [(1,0,0),(0,1,0),(-1,0,0)])
assert approx_equal(a.angle_ideal, 95)
assert approx_equal(a.weight, 1)
assert a.origin_id == 3
assert a.have_angle_model
assert approx_equal(a.angle_model, 90)
assert approx_equal(a.delta, 5)
assert approx_equal(a.residual(), a.weight*a.delta**2)
p.origin_ud = 0
proxies = geometry_restraints.shared_angle_proxy()
for i in range(10): proxies.append(p)
assert approx_equal(geometry_restraints.angle_deltas(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [5]*10)
assert approx_equal(geometry_restraints.angle_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [25]*10)
gradient_array = flex.double([0]*10)
residual_sum = geometry_restraints.angle_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, 10*25)
# check proxies with and without sym_ops are happy side-by-side
p_sym = geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
sym_ops=sym_ops,
angle_ideal=95,
weight=1)
check(p_sym, sym_ops=sym_ops)
angle_sym = geometry_restraints.angle(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p_sym)
p_no_sym = geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
angle_ideal=95,
weight=1,
origin_id=3)
check(p_no_sym, origin_id=3)
angle_no_sym = geometry_restraints.angle(
sites_cart=sites_cart,
proxy=p_no_sym)
proxies = geometry_restraints.shared_angle_proxy([p_sym,p_no_sym])
assert approx_equal(geometry_restraints.angle_deltas(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [angle_sym.delta,angle_no_sym.delta])
assert approx_equal(geometry_restraints.angle_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [angle_sym.residual(),angle_no_sym.residual()])
residual_sum = geometry_restraints.angle_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, angle_sym.residual() + angle_no_sym.residual())
#
proxies = geometry_restraints.shared_angle_proxy([
geometry_restraints.angle_proxy([0,1,2], 1, 2, 0, 0),
geometry_restraints.angle_proxy([1,2,3], 2, 3, 0, 1),
geometry_restraints.angle_proxy([2,3,0], 3, 4, 0, 1),
geometry_restraints.angle_proxy([3,1,2], 4, 5, 0, 3)])
selected = proxies.proxy_select(n_seq=4, iselection=flex.size_t([0,2]))
assert selected.size() == 0
selected = proxies.proxy_select(n_seq=4, iselection=flex.size_t([0,1,2]))
assert selected.size() == 1
check(selected[0], i_seqs=(0,1,2), angle_ideal=1, weight=2)
selected = proxies.proxy_select(n_seq=4, iselection=flex.size_t([0,2,3]))
assert selected.size() == 1
check(selected[0], i_seqs=(1,2,0), angle_ideal=3, weight=4, origin_id=1)
selected = proxies.proxy_select(n_seq=4, iselection=flex.size_t([1,2,3]))
assert selected.size() == 2
check(selected[0], i_seqs=(0,1,2), angle_ideal=2, weight=3, origin_id=1)
check(selected[1], i_seqs=(2,0,1), angle_ideal=4, weight=5, origin_id=3)
selected = proxies.proxy_select(n_seq=4, iselection=flex.size_t([0,1,2,3]))
assert selected.size() == 4
#
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True]))
assert rest.size() == 0
rest = proxies.proxy_remove(selection=flex.bool([False,True,True,True]))
assert rest.size() == 2
check(rest[0], i_seqs=(0,1,2), angle_ideal=1, weight=2, origin_id=0)
check(rest[1], i_seqs=(2,3,0), angle_ideal=3, weight=4, origin_id=1)
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,False]))
assert rest.size() == 3
rest = proxies.proxy_remove(origin_id=1)
assert rest.size() == 2
check(rest[0], i_seqs=(0,1,2), angle_ideal=1, weight=2, origin_id=0)
check(rest[1], i_seqs=(3,1,2), angle_ideal=4, weight=5, origin_id=3)
#
selected = proxies.proxy_select(origin_id=5)
assert selected.size() == 0
selected = proxies.proxy_select(origin_id=0)
assert selected.size() == 1
check(selected[0], i_seqs=(0,1,2), angle_ideal=1, weight=2)
selected = proxies.proxy_select(origin_id=1)
assert selected.size() == 2
check(selected[0], i_seqs=(1,2,3), angle_ideal=2, weight=3, origin_id=1)
check(selected[1], i_seqs=(2,3,0), angle_ideal=3, weight=4, origin_id=1)
#
unit_cell = uctbx.unit_cell([15,11.5,16.25,90,99.5,90])
sites_cart = flex.vec3_double(
[(12.87,0.10,9.04),(12.54,0.44,7.73),(13.47,0.34,6.71)])
rt_mx = sgtbx.rt_mx('2-X,-Y,1-Z')
u_mx = sgtbx.rt_mx()
i_seqs = [2,0,1]
a_sym_ops = [u_mx,rt_mx,rt_mx]
b_sym_ops = [rt_mx,u_mx,u_mx]
a_proxy = geometry_restraints.angle_proxy(
i_seqs=i_seqs,
sym_ops=a_sym_ops,
angle_ideal=120,
weight=1)
b_proxy = geometry_restraints.angle_proxy(
i_seqs=i_seqs,
sym_ops=b_sym_ops,
angle_ideal=120,
weight=1)
a_angle = geometry_restraints.angle(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=a_proxy)
b_angle = geometry_restraints.angle(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=b_proxy)
assert approx_equal(a_angle.delta, b_angle.delta)
assert approx_equal(a_angle.residual(), b_angle.residual())
assert not_approx_equal(a_angle.gradients(), b_angle.gradients())
assert approx_equal(a_angle.grads_and_curvs(), [
(82.47780595428577, 9.9176398449691465, -275.30239023150972),
(-287.7152475786869, 62.177141285587624, 345.71504772803047),
(205.23744162440113, -72.094781130556768, -70.412657496520751),
(548.64354849364315, -141.3499058192476, 4777.1475618734567),
(5313.4607140709404, -424.64322327702632, 7689.6945986452274),
(2641.6450110035271, 222.22748382126554, 413.72502597622014)])
a_gradient_array = flex.vec3_double(sites_cart.size())
a_residual_sum = geometry_restraints.angle_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=geometry_restraints.shared_angle_proxy([a_proxy]),
gradient_array=a_gradient_array)
b_gradient_array = flex.vec3_double(sites_cart.size())
b_residual_sum = geometry_restraints.angle_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=geometry_restraints.shared_angle_proxy([b_proxy]),
gradient_array=b_gradient_array)
for a,b in zip(a_gradient_array, b_gradient_array):
assert approx_equal(a, b)
fd_grads = finite_difference_gradients(
restraint_type=geometry_restraints.angle,
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=a_proxy)
for g,e in zip(a_gradient_array, fd_grads):
assert approx_equal(g, e)
#exercise slack parameter
# ans: delta, delta_slack, residual
ans = [(5,5,25),(5,2,4), (5,0,0), (5,0,0), (-5,-2,4), (-5,-5,25)]
for i, vals in enumerate(
[(95,0),(95,3),(95,5),(95,7),(85,3),(85,0)]):
a = geometry_restraints.angle(
sites=[(1,0,0),(0,0,0),(0,1,0)],
angle_ideal=vals[0],
weight=1,
slack=vals[1])
# print i, vals, a.residual()
assert approx_equal(a.angle_model, 90)
assert approx_equal(a.delta, ans[i][0])
assert approx_equal(a.delta_slack, ans[i][1])
assert approx_equal(a.residual(), ans[i][2])
def exercise_dihedral():
def check(p, i_seqs=None, sites=None, sym_ops=None, angle_ideal=-40,
alt_angle_ideals=None, weight=1, periodicity=0, limit=-1, top_out=False,
slack=0.0, origin_id=0, angle_model=0, angle_delta=0):
assert [i_seqs, sites].count(None) == 1 # check for correct usage of procedure
assert approx_equal(p.angle_ideal, angle_ideal)
assert p.alt_angle_ideals == alt_angle_ideals
assert approx_equal(p.weight, weight)
assert approx_equal(p.periodicity, periodicity)
assert approx_equal(p.limit, limit)
assert p.top_out == top_out
assert approx_equal(p.slack, slack)
if i_seqs is not None:
assert p.origin_id == origin_id
assert p.i_seqs == i_seqs
if p.sym_ops is not None:
for i in range(len(p.sym_ops)):
assert p.sym_ops[i].as_double_array == sym_ops[i].as_double_array
else:
assert sym_ops is None # while p.sym_ops IS None
else:
assert approx_equal(p.sites, sites)
assert p.have_angle_model
assert approx_equal(p.angle_model, angle_model)
assert approx_equal(p.delta, angle_delta)
# defaults:
p = geometry_restraints.dihedral_proxy(
i_seqs=[3,2,1,0],
angle_ideal=10,
weight=1)
check(p, i_seqs=(3,2,1,0), angle_ideal=10)
u_mx = sgtbx.rt_mx() # unit matrix
sym_ops = (u_mx, sgtbx.rt_mx('1+X,+Y,+Z'), u_mx, sgtbx.rt_mx('+X,-1+Y,2+Z'))
p = geometry_restraints.dihedral_proxy(
i_seqs=[3,2,1,0],
sym_ops=sym_ops,
angle_ideal=-40,
weight=1,
periodicity=2,
alt_angle_ideals=[180])
check(p, i_seqs=(3,2,1,0), sym_ops=sym_ops, alt_angle_ideals=(180,), periodicity=2)
c = geometry_restraints.dihedral_proxy(
i_seqs=[6,8,5,3],
proxy=p)
check(c, i_seqs=(6,8,5,3), sym_ops=sym_ops, alt_angle_ideals=(180,),
periodicity=2)
c = p.scale_weight(factor=0.37)
check(c, i_seqs=(3,2,1,0), sym_ops=sym_ops, weight=0.37,
alt_angle_ideals=(180,), periodicity=2)
p = p.sort_i_seqs()
check(p, i_seqs=(0,1,2,3),
sym_ops=(
sgtbx.rt_mx('+X,-1+Y,2+Z'), u_mx, sgtbx.rt_mx('1+X,+Y,+Z'), u_mx),
alt_angle_ideals=(180,), periodicity=2)
p.angle_ideal = 50
p.weight = 2
p.periodicity = 3
p.alt_angle_ideals = None
p.origin_id = 2
check(p, i_seqs=(0,1,2,3), angle_ideal=50,
sym_ops=(
sgtbx.rt_mx('+X,-1+Y,2+Z'), u_mx, sgtbx.rt_mx('1+X,+Y,+Z'), u_mx),
weight=2, periodicity=3, origin_id=2)
p.angle_ideal = -40
p.weight = 1
p.periodicity = 2
p.alt_angle_ideals = (25,-25)
p.origin_id=0
check(p, i_seqs=(0,1,2,3), angle_ideal=-40,
sym_ops=(
sgtbx.rt_mx('+X,-1+Y,2+Z'), u_mx, sgtbx.rt_mx('1+X,+Y,+Z'), u_mx),
weight=1, periodicity=2, alt_angle_ideals=(25,-25,), origin_id=0)
#
u_mx = sgtbx.rt_mx() # unit matrix
sym_ops = (u_mx, u_mx, sgtbx.rt_mx('+X,1+Y,+Z'), sgtbx.rt_mx('+X,1+Y,+Z'))
unit_cell = uctbx.unit_cell([15,25,30,90,90,90])
sites_cart = flex.vec3_double([(2,24,0),(1,24,0),(1,1,0),(0,1,0)])
for periodicity in [0, 1, -1]:
p = geometry_restraints.dihedral_proxy(
i_seqs=[0,1,2,3],
sym_ops=sym_ops,
angle_ideal=175,
weight=1,
periodicity=periodicity)
d = geometry_restraints.dihedral(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
check(d, sites=[(2,24,0),(1,24,0),(1,26,0),(0,26,0)], angle_ideal=175,
angle_model=180, angle_delta=-5, periodicity=periodicity)
if (periodicity <= 0):
assert approx_equal(d.residual(), 25)
assert approx_equal(d.gradients(epsilon=1.e-100),
((0, 0, 572.95779513082323),
(0, 0, -572.95779513082323),
(0, 0, -572.95779513082323),
(0, 0, 572.95779513082323)))
else:
assert approx_equal(d.residual(), 36.5308983192)
assert approx_equal(d.gradients(epsilon=1.e-100),
((0.0, 0.0, 836.69513037751835),
(0.0, 0.0, -836.69513037751835),
(0.0, 0.0, -836.69513037751835),
(0.0, 0.0, 836.69513037751835)))
#
p = geometry_restraints.dihedral_proxy(
i_seqs=[0,1,2,3],
sym_ops=sym_ops,
angle_ideal=175,
weight=1,
periodicity=0)
proxies = geometry_restraints.shared_dihedral_proxy([p,p])
assert proxies.count_harmonic() == 2
assert approx_equal(geometry_restraints.dihedral_deltas(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [-5]*2)
assert approx_equal(geometry_restraints.dihedral_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [25]*2)
residual_sum = geometry_restraints.dihedral_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, 2*25)
# check proxies with and without sym_ops are happy side-by-side
p_sym = geometry_restraints.dihedral_proxy(
i_seqs=[0,1,2,3],
sym_ops=sym_ops,
angle_ideal=175,
weight=1)
check(p_sym, i_seqs=(0,1,2,3), sym_ops=sym_ops, angle_ideal=175)
dihedral_sym = geometry_restraints.dihedral(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p_sym)
check(dihedral_sym, sites=[(2,24,0),(1,24,0),(1,26,0),(0,26,0)],
sym_ops=sym_ops, angle_ideal=175,
weight=1, angle_model=180, angle_delta=-5)
p_no_sym = geometry_restraints.dihedral_proxy(
i_seqs=[0,1,2,3],
angle_ideal=175,
weight=1)
check(p_no_sym, i_seqs=(0,1,2,3), sym_ops=None, angle_ideal=175)
dihedral_no_sym = geometry_restraints.dihedral(
sites_cart=sites_cart,
proxy=p_no_sym)
check(dihedral_no_sym, sites=[(2,24,0),(1,24,0),(1,1,0),(0,1,0)],
sym_ops=sym_ops, angle_ideal=175,
weight=1, angle_model=180, angle_delta=-5)
proxies = geometry_restraints.shared_dihedral_proxy([p_sym,p_no_sym])
assert approx_equal(geometry_restraints.dihedral_deltas(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [dihedral_sym.delta,dihedral_no_sym.delta])
assert approx_equal(geometry_restraints.dihedral_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [dihedral_sym.residual(),dihedral_no_sym.residual()])
residual_sum = geometry_restraints.dihedral_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(
residual_sum,
dihedral_sym.residual() + dihedral_no_sym.residual())
#
unit_cell = uctbx.unit_cell([15,11.5,16.25,90,99.5,90])
sites_cart = flex.vec3_double(
[(12.87,0.10,9.04),(12.54,0.44,7.73),(13.47,0.34,6.71)])
rt_mx = sgtbx.rt_mx('2-X,-Y,1-Z')
u_mx = sgtbx.rt_mx()
i_seqs = [2,0,1,2]
a_sym_ops = [u_mx,u_mx,u_mx,rt_mx]
b_sym_ops = [u_mx,rt_mx,rt_mx,rt_mx]
a_proxy = geometry_restraints.dihedral_proxy(
i_seqs=i_seqs,
sym_ops=a_sym_ops,
angle_ideal=0,
weight=1)
b_proxy = geometry_restraints.dihedral_proxy(
i_seqs=i_seqs,
sym_ops=b_sym_ops,
angle_ideal=0,
weight=1)
a_dihedral = geometry_restraints.dihedral(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=a_proxy)
b_dihedral = geometry_restraints.dihedral(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=b_proxy)
assert approx_equal(a_dihedral.delta, b_dihedral.delta)
assert approx_equal(a_dihedral.residual(), b_dihedral.residual())
assert not_approx_equal(a_dihedral.gradients(), b_dihedral.gradients())
a_gradient_array = flex.vec3_double(sites_cart.size())
a_residual_sum = geometry_restraints.dihedral_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=geometry_restraints.shared_dihedral_proxy([a_proxy]),
gradient_array=a_gradient_array)
b_gradient_array = flex.vec3_double(sites_cart.size())
b_residual_sum = geometry_restraints.dihedral_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=geometry_restraints.shared_dihedral_proxy([b_proxy]),
gradient_array=b_gradient_array)
for a,b in zip(a_gradient_array, b_gradient_array):
assert approx_equal(a, b)
fd_grads = finite_difference_gradients(
restraint_type=geometry_restraints.dihedral,
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=a_proxy)
for g,e in zip(a_gradient_array, fd_grads):
assert approx_equal(g, e)
#
p = geometry_restraints.dihedral_proxy(
i_seqs=[3,2,1,0],
angle_ideal=-40,
weight=1,
periodicity=-2,
alt_angle_ideals=None)
check(p, i_seqs=(3,2,1,0), periodicity=-2)
p = p.sort_i_seqs()
check(p, i_seqs=(0,1,2,3), periodicity=-2)
p.angle_ideal = 50
p.weight = 2
p.periodicity = 3
p.origin_id = 2
check(p, i_seqs=(0,1,2,3), angle_ideal=50, weight=2,
periodicity=3, origin_id=2)
p.angle_ideal = -40
p.weight = 1
p.periodicity = -2
p.origin_id = 0
check(p, i_seqs=(0,1,2,3), angle_ideal=-40, weight=1, periodicity=-2)
d = geometry_restraints.dihedral(
sites=[(1,0,0),(0,0,0),(0,1,0),(1,0,1)],
angle_ideal=-40,
weight=1)
check(d, sites=[(1,0,0),(0,0,0),(0,1,0),(1,0,1)],
angle_ideal=-40, angle_model=-45, angle_delta=5)
assert approx_equal(d.residual(), 25)
assert approx_equal(d.gradients(epsilon=1.e-100),
((0, 0, -572.95779513082323),
(286.47889756541161, 0, 286.47889756541161),
(0, 0, 0),
(-286.47889756541161, 0, 286.47889756541161)))
sites_cart = flex.vec3_double([(1,0,0),(0,0,0),(0,1,0),(-1,0,-1)])
d = geometry_restraints.dihedral(
sites_cart=sites_cart,
proxy=p)
check(d, sites=[(1,0,0),(0,0,0),(0,1,0),(-1,0,-1)],
angle_ideal=-40, periodicity=-2, angle_model=135, angle_delta=5)
proxies = geometry_restraints.shared_dihedral_proxy([p,p])
for proxy in proxies:
assert proxy.periodicity == -2
proxies[1].periodicity = 3
assert proxies[1].periodicity == 3
assert proxies[0].periodicity == -2
proxies[1].periodicity = -2
assert proxies[1].periodicity == -2
assert approx_equal(geometry_restraints.dihedral_deltas(
sites_cart=sites_cart,
proxies=proxies), [5]*2)
assert approx_equal(geometry_restraints.dihedral_residuals(
sites_cart=sites_cart,
proxies=proxies), [25]*2)
residual_sum = geometry_restraints.dihedral_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, 2*25)
#
sites_cart = flex.vec3_double((
(44.14, -3.376, 8.756),
(43.598, -2.045, 8.726),
(42.178, -2.036, 9.302),
(41.818, -0.984, 10.006)))
r_orig = geometry_restraints.dihedral(
sites=list(sites_cart), angle_ideal=0, weight=1)
perm = flex.size_t(xrange(4))
n_perms = 0
n_equiv = 0
n_equiv_direct = 0
while 1:
sites_perm = sites_cart.select(perm)
r = geometry_restraints.dihedral(
sites=list(sites_perm), angle_ideal=0, weight=1)
if ( abs(r.angle_model - r_orig.angle_model) < 1.e-6
or abs(r.angle_model + r_orig.angle_model) < 1.e-6):
n_equiv += 1
p = geometry_restraints.dihedral_proxy(
i_seqs=list(perm),
angle_ideal=r.angle_model,
weight=1)
rp = geometry_restraints.dihedral(
sites_cart=sites_cart,
proxy=p)
assert approx_equal(rp.angle_model, r.angle_model)
if (abs(p.angle_ideal - r_orig.angle_model) < 1.e-6):
n_equiv_direct += 1
p_sorted = p.sort_i_seqs()
assert p_sorted.i_seqs == (0,1,2,3)
assert approx_equal(p_sorted.angle_ideal, r_orig.angle_model)
#
p = geometry_restraints.dihedral_proxy(
i_seqs=list(perm),
angle_ideal=12,
weight=1,
alt_angle_ideals=[30,-40])
p_sorted = p.sort_i_seqs()
if (p_sorted.angle_ideal > 0):
assert approx_equal(p_sorted.alt_angle_ideals, [30,-40])
else:
assert approx_equal(p_sorted.alt_angle_ideals, [-30,40])
#
n_perms += 1
if (not perm.next_permutation()):
break
assert n_perms == 24
assert n_equiv == 4
assert n_equiv_direct == 2
#
proxies = geometry_restraints.shared_dihedral_proxy([
geometry_restraints.dihedral_proxy([0,1,2,3], 1, 2, 3),
geometry_restraints.dihedral_proxy([1,2,3,4], 2, 3, 4, origin_id=1),
geometry_restraints.dihedral_proxy([2,3,0,4], 3, 4, 5, origin_id=1),
geometry_restraints.dihedral_proxy([3,1,2,4], 4, 5, 6, origin_id=3)])
selected = proxies.proxy_select(n_seq=5, iselection=flex.size_t([0,2,4]))
assert selected.size() == 0
selected = proxies.proxy_select(n_seq=5, iselection=flex.size_t([1,2,3,4]))
assert selected.size() == 2 # 2nd and 4th
check(selected[0],
i_seqs=(0,1,2,3), angle_ideal=2, weight=3, periodicity=4, origin_id=1)
check(selected[1],
i_seqs=(2,0,1,3), angle_ideal=4, weight=5, periodicity=6, origin_id=3)
#
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True,True]))
assert rest.size() == 0
rest = proxies.proxy_remove(selection=flex.bool([False,True,True,True,True]))
assert rest.size() == 2 # 1st and 3rd
check(rest[0],
i_seqs=(0,1,2,3), angle_ideal=1, weight=2, periodicity=3, origin_id=0)
check(rest[1],
i_seqs=(2,3,0,4), angle_ideal=3, weight=4, periodicity=5, origin_id=1)
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True,False]))
assert rest.size() == 3 # all but 1st
check(rest[0],
i_seqs=(1,2,3,4), angle_ideal=2, weight=3, periodicity=4, origin_id=1)
check(rest[1],
i_seqs=(2,3,0,4), angle_ideal=3, weight=4, periodicity=5, origin_id=1)
check(rest[2],
i_seqs=(3,1,2,4), angle_ideal=4, weight=5, periodicity=6, origin_id=3)
rest = proxies.proxy_remove(origin_id=1)
assert rest.size() == 2 # 1st and 4th
check(rest[0],
i_seqs=(0,1,2,3), angle_ideal=1, weight=2, periodicity=3, origin_id=0)
check(rest[1],
i_seqs=(3,1,2,4), angle_ideal=4, weight=5, periodicity=6, origin_id=3)
#
selected = proxies.proxy_select(origin_id=5)
assert selected.size() == 0
selected = proxies.proxy_select(origin_id=0)
assert selected.size() == 1
check(selected[0],
i_seqs=(0,1,2,3), angle_ideal=1, weight=2, periodicity=3, origin_id=0)
selected = proxies.proxy_select(origin_id=1)
assert selected.size() == 2
check(selected[0],
i_seqs=(1,2,3,4), angle_ideal=2, weight=3, periodicity=4, origin_id=1)
check(selected[1],
i_seqs=(2,3,0,4), angle_ideal=3, weight=4, periodicity=5, origin_id=1)
#
def get_d(angle_ideal, angle_model, periodicity, alt_angle_ideals=None):
a = angle_model * math.pi / 180
c, s = math.cos(a), math.sin(a)
d = geometry_restraints.dihedral(
sites=[(1,0,-1),(0,0,-1),(0,0,0),(c,s,0)],
angle_ideal=angle_ideal,
weight=1/15**2,
periodicity=periodicity,
alt_angle_ideals=alt_angle_ideals)
v = math.fmod(d.angle_model-angle_model, 360)
if (v < 0): v += 360
if (v > 360-1e-8): v -= 360
assert approx_equal(v, 0)
return d
#
for periodicity in xrange(1,6):
f = open("plot_geo_restr_dihedral_periodicity_%d.xy" % periodicity, "w")
for signed_periodicity in [periodicity, -periodicity]:
for angle_model in xrange(0, 720+1, 1):
d = get_d(
angle_ideal=70,
angle_model=angle_model,
periodicity=signed_periodicity)
print >> f, angle_model, d.residual()
print >> f, "&"
f.close()
#
intersection_angle = 120
for angle_ideal in xrange(0, 720+5, 5):
for periodicity in xrange(1,6):
for signed_periodicity in [periodicity, -periodicity]:
residuals = []
for offset in [0, intersection_angle, -intersection_angle]:
d = get_d(
angle_ideal=angle_ideal,
angle_model=angle_ideal + offset / periodicity,
periodicity=signed_periodicity)
residuals.append(d.residual())
assert approx_equal(residuals[0], 0)
assert approx_equal(residuals[1], residuals[2])
assert approx_equal(residuals[1], d.weight * d.delta**2)
#
for offset in [intersection_angle, -intersection_angle]:
for offset2 in [30, -30]:
residuals = []
for signed_periodicity in [periodicity, -periodicity]:
d = get_d(
angle_ideal=angle_ideal,
angle_model=angle_ideal + (offset + offset2) / periodicity,
periodicity=signed_periodicity)
residuals.append(d.residual())
if ((offset > 0) == (offset2 < 0)):
assert residuals[0] > residuals[1]
else:
assert residuals[0] < residuals[1]
#test alt_angle_ideals
d = get_d(angle_ideal=180, angle_model=170, periodicity=2)
assert approx_equal(d.delta, 10.)
assert d.alt_angle_ideals is None
d = get_d(angle_ideal=180, angle_model=170, periodicity=2,
alt_angle_ideals=(25,))
assert approx_equal(d.delta, 10.)
d = get_d(angle_ideal=180, angle_model=10, periodicity=2,
alt_angle_ideals=(25,))
assert approx_equal(d.delta, -10.)
d = get_d(angle_ideal=180, angle_model=10, periodicity=2,
alt_angle_ideals=(15,))
assert approx_equal(d.delta, 5.)
d = get_d(angle_ideal=180, angle_model=10, periodicity=2,
alt_angle_ideals=(15,345))
assert approx_equal(d.delta, 5.)
d = get_d(angle_ideal=180, angle_model=-10, periodicity=2,
alt_angle_ideals=(15,345))
assert approx_equal(d.delta, -5.)
d = get_d(angle_ideal=30, angle_model=28, periodicity=1)
assert approx_equal(d.delta, 2.)
d = get_d(angle_ideal=30, angle_model=-30, periodicity=1)
assert approx_equal(d.delta, 60.)
d = get_d(angle_ideal=30, angle_model=28, periodicity=1,
alt_angle_ideals=(-30,))
assert approx_equal(d.delta, 2.)
d = get_d(angle_ideal=30, angle_model=-28, periodicity=1,
alt_angle_ideals=(-30,))
assert approx_equal(d.delta, -2.)
def exercise_chirality():
def check(p, i_seqs=None, sites=None, volume_ideal=0, both_signs=False,
weight=0, origin_id=0, volume_model=0, delta_sign=0, delta=0):
assert [i_seqs, sites].count(None) == 1 # check for correct usage of procedure
assert approx_equal(p.volume_ideal, volume_ideal)
assert p.both_signs == both_signs
assert approx_equal(p.weight, weight)
if i_seqs is not None:
assert p.origin_id == origin_id
assert p.i_seqs == i_seqs
else:
assert approx_equal(p.sites, sites)
assert approx_equal(p.volume_model, volume_model)
assert approx_equal(p.delta, delta)
assert approx_equal(p.delta_sign, delta_sign)
p = geometry_restraints.chirality_proxy(
i_seqs=[0,2,3,1],
volume_ideal=4,
both_signs=False,
weight=1,
origin_id=1)
check(p, i_seqs=(0,2,3,1), volume_ideal=4, both_signs=False, weight=1,
origin_id=1)
c = geometry_restraints.chirality_proxy(
i_seqs=[9,0,4,6],
proxy=p)
check(c, i_seqs=(9,0,4,6), volume_ideal=4, both_signs=False, weight=1,
origin_id=1)
c = p.scale_weight(factor=9.32)
check(c, i_seqs=(0,2,3,1), volume_ideal=4, both_signs=False, weight=9.32,
origin_id=1)
p = p.sort_i_seqs()
check(p, i_seqs=(0,1,2,3), volume_ideal=4, both_signs=False, weight=1,
origin_id=1)
p = geometry_restraints.chirality_proxy(
i_seqs=[0,2,1,3],
volume_ideal=-4,
both_signs=False,
weight=1)
check(p, i_seqs=(0,2,1,3), volume_ideal=-4, both_signs=False, weight=1)
p = p.sort_i_seqs()
check(p, i_seqs=(0,1,2,3), volume_ideal=4, both_signs=False, weight=1)
c = geometry_restraints.chirality(
sites=[(1,0,0),(0,0,0),(0,1,0),(1,0,1)],
volume_ideal=4,
both_signs=False,
weight=1)
check(c, sites=[(1,0,0),(0,0,0),(0,1,0),(1,0,1)], volume_ideal=4,
both_signs=False, weight=1, volume_model=-1, delta_sign=-1,
delta=5)
assert approx_equal(c.residual(), 25)
assert approx_equal(c.gradients(),
((10, 0, -10),
(-10, -10, 0),
(-0, 10, -0),
(-0, -0, 10)))
sites_cart = flex.vec3_double([(1,0,0),(0,0,0),(0,1,0),(-1,0,-1)])
c = geometry_restraints.chirality(
sites_cart=sites_cart,
proxy=p)
check(c, sites=[(1,0,0),(0,0,0),(0,1,0),(-1,0,-1)], volume_ideal=4,
both_signs=False, weight=1, volume_model=1, delta_sign=-1,
delta=3)
proxies = geometry_restraints.shared_chirality_proxy([p,p])
for proxy in proxies:
check(p, i_seqs=(0,1,2,3), volume_ideal=4, both_signs=False, weight=1)
assert approx_equal(geometry_restraints.chirality_deltas(
sites_cart=sites_cart,
proxies=proxies), [3]*2)
assert approx_equal(geometry_restraints.chirality_residuals(
sites_cart=sites_cart,
proxies=proxies), [9]*2)
residual_sum = geometry_restraints.chirality_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, 2*9)
#
proxies = geometry_restraints.shared_chirality_proxy([
geometry_restraints.chirality_proxy([0,1,2,3], 1, False, 2, 0),
geometry_restraints.chirality_proxy([1,2,3,4], 2, True, 3, 1),
geometry_restraints.chirality_proxy([2,3,0,4], 3, True, 4, 1),
geometry_restraints.chirality_proxy([3,1,2,4], 4, False, 5, 3)])
selected = proxies.proxy_select(n_seq=5, iselection=flex.size_t([0,2,4]))
assert selected.size() == 0
selected = proxies.proxy_select(n_seq=5, iselection=flex.size_t([1,2,3,4]))
assert selected.size() == 2 # 2nd and 4th
check(selected[0], i_seqs=(0,1,2,3),
volume_ideal=2, both_signs=True, weight=3, origin_id=1)
check(selected[1], i_seqs=(2,0,1,3),
volume_ideal=4, both_signs=False, weight=5, origin_id=3)
#
selected = proxies.proxy_select(origin_id=5)
assert selected.size() == 0
selected = proxies.proxy_select(origin_id=0)
assert selected.size() == 1
check(selected[0], i_seqs=(0,1,2,3),
volume_ideal=1, both_signs=False, weight=2, origin_id=0)
selected = proxies.proxy_select(origin_id=1)
assert selected.size() == 2
check(selected[0], i_seqs=(1,2,3,4),
volume_ideal=2, both_signs=True, weight=3, origin_id=1)
check(selected[1], i_seqs=(2,3,0,4),
volume_ideal=3, both_signs=True, weight=4, origin_id=1)
#
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True,True]))
assert rest.size() == 0
rest = proxies.proxy_remove(selection=flex.bool([False,True,True,True,True]))
assert rest.size() == 2 # 1st and 3rd
check(rest[0], i_seqs=(0,1,2,3),
volume_ideal=1, both_signs=False, weight=2, origin_id=0)
check(rest[1], i_seqs=(2,3,0,4),
volume_ideal=3, both_signs=True, weight=4, origin_id=1)
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True,False]))
assert rest.size() == 3 # all but 1st
check(rest[0], i_seqs=(1,2,3,4),
volume_ideal=2, both_signs=True, weight=3, origin_id=1)
check(rest[1], i_seqs=(2,3,0,4),
volume_ideal=3, both_signs=True, weight=4, origin_id=1)
check(rest[2], i_seqs=(3,1,2,4),
volume_ideal=4, both_signs=False, weight=5, origin_id=3)
rest = proxies.proxy_remove(origin_id=1)
assert rest.size() == 2 # 1st and 4th
check(rest[0], i_seqs=(0,1,2,3),
volume_ideal=1, both_signs=False, weight=2, origin_id=0)
check(rest[1], i_seqs=(3,1,2,4),
volume_ideal=4, both_signs=False, weight=5, origin_id=3)
def exercise_planarity():
def check(p, i_seqs=None, sites=None, sym_ops=None, weights=[1,2,3,4],
origin_id=0):
assert [i_seqs, sites].count(None) == 1 # check for correct usage of procedure
assert approx_equal(p.weights, weights)
if i_seqs is not None:
assert p.origin_id == origin_id
approx_equal(p.i_seqs, i_seqs)
if p.sym_ops is not None:
for i in range(len(p.sym_ops)):
assert p.sym_ops[i].as_double_array == sym_ops[i].as_double_array
else:
assert sym_ops is None # while p.sym_ops IS None
weights = flex.double([1, 2, 3, 4])
u_mx = sgtbx.rt_mx()
sym_ops = (u_mx, sgtbx.rt_mx('1+x,y,z'), u_mx, sgtbx.rt_mx('1+x,1+y,z'))
p = geometry_restraints.planarity_proxy(
i_seqs=flex.size_t([3,1,0,2]),
sym_ops=sym_ops,
weights=weights)
check(p, i_seqs=(3,1,0,2), sym_ops=sym_ops, weights=[1, 2, 3, 4])
c = geometry_restraints.planarity_proxy(
i_seqs=flex.size_t([8,6,3,1]),
proxy=p)
check(c, i_seqs=[8,6,3,1], sym_ops=sym_ops, weights=[1, 2, 3, 4])
p.origin_id=1
c = p.scale_weights(factor=4.94)
check(c, i_seqs=[3,1,0,2], sym_ops=sym_ops,
weights=[1*4.94, 2*4.94, 3*4.94, 4*4.94], origin_id=1)
p.origin_id=0
assert c.i_seqs.id() == p.i_seqs.id()
assert c.weights.id() != p.weights.id()
p = p.sort_i_seqs()
check(p, i_seqs=(0,1,2,3),
sym_ops=(u_mx, sgtbx.rt_mx('1+x,y,z'), sgtbx.rt_mx('1+x,1+y,z'), u_mx),
weights=(3,2,4,1))
#
unit_cell = uctbx.unit_cell([15,25,30,90,90,90])
sites_cart = flex.vec3_double([(1,24,1.1),(1,1,1),(14,1,1),(14,24,0.9)])
expected_residual = 0.04
expected_gradients = [(0,0,0.4), (0,0,0), (0,0,0), (0,0,-0.4)]
p = geometry_restraints.planarity_proxy(
i_seqs=(0,1,2,3),
sym_ops=[u_mx, sgtbx.rt_mx('x,y,z'), sgtbx.rt_mx('x,1+y,z'), sgtbx.rt_mx('1-x,y,z')],
weights=(2,2,2,2))
planarity = geometry_restraints.planarity(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
check(p, i_seqs=(0,1,2,3),
sym_ops=[u_mx, sgtbx.rt_mx('x,y,z'), sgtbx.rt_mx('x,1+y,z'), sgtbx.rt_mx('1-x,y,z')],
weights=(2,2,2,2))
check(planarity, sites=[(1.0, 24.0, 1.1), (1.0, 1.0, 1.0), (14., 26.0, 1.0), (1., 24.0, 0.9)],
sym_ops=[u_mx, sgtbx.rt_mx('x,y,z'), sgtbx.rt_mx('x,1+y,z'), sgtbx.rt_mx('1-x,y,z')],
weights=(2,2,2,2))
assert approx_equal(planarity.deltas(), (0.1, 0, 0, -0.1))
assert approx_equal(planarity.residual(), 0.04)
assert approx_equal(planarity.gradients(), expected_gradients)
#
proxies = geometry_restraints.shared_planarity_proxy([p,p])
for proxy in proxies:
check(p, i_seqs=(0,1,2,3),
sym_ops=[u_mx, sgtbx.rt_mx('x,y,z'), sgtbx.rt_mx('x,1+y,z'), sgtbx.rt_mx('1-x,y,z')],
weights=(2,2,2,2))
assert eps_eq(geometry_restraints.planarity_deltas_rms(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [0.070710678118654821]*2)
assert eps_eq(geometry_restraints.planarity_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [expected_residual]*2)
residual_sum = geometry_restraints.planarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert eps_eq(residual_sum, 2*expected_residual)
gradient_array = flex.vec3_double(proxy.i_seqs.size(), (0,0,0))
residual_sum = geometry_restraints.planarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=gradient_array)
assert eps_eq(residual_sum, 2*0.04)
for g,e in zip(gradient_array, expected_gradients):
assert eps_eq(g, matrix.col(e)*2)
#
sites_cart = flex.vec3_double([
(-6.9010753374697966, 1.3017288659588333, -1.4469233441387523),
(-4.947324488687852, -1.0193474269570115, 0.16296067326855093),
(-6.9598378855214706, -0.66835111494675281, -1.7153810358296142),
(-4.846552160625774, 0.96315156534510438, 0.51500258491293438)])
weights = flex.double([1, 2, 3, 4])
expected_residual = 0.000428526964094
expected_gradients = [
(0.019669677238598002, 0.0024733761183690019, -0.020428665017957027),
(0.020015197633649708, 0.0025168238009701843, -0.020787517902107686),
(-0.020270795833584803, -0.002548964159754365, 0.0210529787910856),
(-0.019414079038662237, -0.0024412357595847371, 0.020163204128978415)]
p = geometry_restraints.planarity_proxy(
i_seqs=flex.size_t([0,1,2,3]),
weights=weights)
assert tuple(p.i_seqs) == (0,1,2,3)
assert approx_equal(p.weights, weights)
perm = flex.size_t([3,1,0,2])
p = geometry_restraints.planarity_proxy(
i_seqs=flex.size_t([0,1,2,3]).select(perm),
weights=weights.select(perm))
assert tuple(p.i_seqs) == (3,1,0,2)
assert not_approx_equal(p.weights, weights)
p = p.sort_i_seqs()
assert tuple(p.i_seqs) == (0,1,2,3)
assert approx_equal(p.weights, weights)
for i_constructor in xrange(2):
if (i_constructor == 0):
l = geometry_restraints.planarity(
sites=sites_cart,
weights=weights)
else:
l = geometry_restraints.planarity(
sites_cart=sites_cart,
proxy=p)
assert approx_equal(l.sites, sites_cart)
assert approx_equal(l.weights, weights)
assert eps_eq(l.deltas(), (0.014233272168667327, 0.007241647943016986,
-0.0048894168534149443, -0.0035120793736139956))
assert eps_eq(l.rms_deltas(), 0.00853329655308)
assert eps_eq(l.residual(), expected_residual)
assert eps_eq(l.gradients(), expected_gradients)
assert eps_eq(l.normal(),
(0.69097523765119184, 0.086887122267422581, -0.71763768639680903))
assert eps_eq(l.residual(), l.lambda_min())
assert eps_eq(l.center_of_mass(),
(-5.7061446613913009, 0.11105869285849694, -0.42071347654387559))
assert eps_eq(l.center_of_mass(), l.sites.mean_weighted(weights=l.weights))
assert eps_eq(l.residual_tensor(),
(10.250312599815825, 8.7000194514224525, 10.265208176541265,
2.7229147081229312, 10.19874296603952, 3.6750425846794936))
assert eps_eq(l.eigensystem().values(),
[21.998140770294835, 7.2169709305206142, 0.00042852696409348911])
proxies = geometry_restraints.shared_planarity_proxy([p,p])
for proxy in proxies:
assert tuple(proxy.i_seqs) == (0,1,2,3)
assert eps_eq(geometry_restraints.planarity_deltas_rms(
sites_cart=sites_cart,
proxies=proxies), [0.0085332965530764398]*2)
assert eps_eq(geometry_restraints.planarity_residuals(
sites_cart=sites_cart,
proxies=proxies), [expected_residual]*2)
residual_sum = geometry_restraints.planarity_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert eps_eq(residual_sum, 2*expected_residual)
gradient_array = flex.vec3_double(proxy.i_seqs.size(), (0,0,0))
residual_sum = geometry_restraints.planarity_residual_sum(
sites_cart=sites_cart,
proxies=proxies,
gradient_array=gradient_array)
assert eps_eq(residual_sum, 2*expected_residual)
for g,e in zip(gradient_array, expected_gradients):
assert eps_eq(g, matrix.col(e)*2)
#
def make_proxy(i_seqs, weights, origin_id):
return geometry_restraints.planarity_proxy(
flex.size_t(i_seqs),
flex.double(weights),
origin_id)
proxies = geometry_restraints.shared_planarity_proxy([
make_proxy([0,1,2,3], [2,3,4,5], 0),
make_proxy([1,2,3,4], [3,4,5,6], 1),
make_proxy([2,3,0,4], [4,5,6,7], 1),
make_proxy([3,1,2,4], [5,6,7,8], 3)])
selected = proxies.proxy_select(n_seq=5, iselection=flex.size_t([0,2,4]))
assert selected.size() == 0
selected = proxies.proxy_select(n_seq=5, iselection=flex.size_t([1,2,3,4]))
assert selected.size() == 2 # 2nd, 4th
check(selected[0], i_seqs=[0,1,2,3], weights=[3,4,5,6], origin_id=1)
check(selected[1], i_seqs=[2,0,1,3], weights=[5,6,7,8], origin_id=3)
#
selected = proxies.proxy_select(origin_id=5)
assert selected.size() == 0
selected = proxies.proxy_select(origin_id=0)
assert selected.size() == 1
check(selected[0], i_seqs=[0,1,2,3], weights=[2,3,4,5], origin_id=0)
selected = proxies.proxy_select(origin_id=1)
assert selected.size() == 2
check(selected[0], i_seqs=[1,2,3,4], weights=[3,4,5,6], origin_id=1)
check(selected[1], i_seqs=[2,3,0,4], weights=[4,5,6,7], origin_id=1)
#
for i_remove in range(10,15):
sel = flex.size_t(range(10,i_remove)+range(i_remove+1,15))
pp = geometry_restraints.planarity_proxy(
i_seqs=flex.size_t([10, 11, 12, 13, 14]),
weights=flex.double(range(5))+13)
pps = geometry_restraints.shared_planarity_proxy()
pps.append(pp)
selected = pps.proxy_select(20, sel)
assert list(selected[0].i_seqs) == [0,1,2,3]
assert approx_equal(selected[0].weights,
pp.weights[:i_remove-10].concatenate(pp.weights[i_remove+1-10:]))
#
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True,True]))
assert rest.size() == 0
rest = proxies.proxy_remove(selection=flex.bool([False,True,True,True,True]))
assert rest.size() == 2 # 1st and 3rd
check(rest[0], i_seqs=[0,1,2,3], weights=[2,3,4,5], origin_id=0)
check(rest[1], i_seqs=[2,3,0,4], weights=[4,5,6,7], origin_id=1)
rest = proxies.proxy_remove(selection=flex.bool([True,True,True,True,False]))
assert rest.size() == 3
rest = proxies.proxy_remove(origin_id=1)
assert rest.size() == 2 # 1st and 4th
check(rest[0], i_seqs=[0,1,2,3], weights=[2,3,4,5], origin_id=0)
check(rest[1], i_seqs=[3,1,2,4], weights=[5,6,7,8], origin_id=3)
#
unit_cell = uctbx.unit_cell([15,11.5,16.25,90,99.5,90])
sites_cart = flex.vec3_double(
[(12.87,0.10,9.04),(12.54,0.44,7.73),(13.47,0.34,6.71)])
rt_mx = sgtbx.rt_mx('2-X,-Y,1-Z')
u_mx = sgtbx.rt_mx()
i_seqs = flex.size_t([0,1,2,0,1,2])
sym_ops = (u_mx,u_mx,u_mx,rt_mx,rt_mx,rt_mx)
weights = flex.double([1]*6)
p = geometry_restraints.planarity_proxy(
i_seqs=i_seqs,
sym_ops=sym_ops,
weights=weights)
planarity = geometry_restraints.planarity(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
gradient_array = flex.vec3_double(sites_cart.size())
residual_sum = geometry_restraints.planarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=geometry_restraints.shared_planarity_proxy([p]),
gradient_array=gradient_array)
fd_grads = finite_difference_gradients(
restraint_type=geometry_restraints.planarity,
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p)
for g,e in zip(gradient_array, fd_grads):
assert approx_equal(g, e)
# check proxies with and without sym_ops are happy side-by-side
p_sym = geometry_restraints.planarity_proxy(
i_seqs=i_seqs,
sym_ops=sym_ops,
weights=weights)
assert p_sym.sym_ops == sym_ops
restraint_sym = geometry_restraints.planarity(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxy=p_sym)
p_no_sym = geometry_restraints.planarity_proxy(
i_seqs=i_seqs,
weights=weights)
assert p_no_sym.sym_ops == None
restraint_no_sym = geometry_restraints.planarity(
sites_cart=sites_cart,
proxy=p_no_sym)
proxies = geometry_restraints.shared_planarity_proxy([p_sym,p_no_sym])
assert approx_equal(geometry_restraints.planarity_deltas_rms(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [restraint_sym.rms_deltas(),restraint_no_sym.rms_deltas()])
assert approx_equal(geometry_restraints.planarity_residuals(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies), [restraint_sym.residual(),restraint_no_sym.residual()])
residual_sum = geometry_restraints.planarity_residual_sum(
unit_cell=unit_cell,
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
assert approx_equal(residual_sum, restraint_sym.residual() + restraint_no_sym.residual())
def exercise_planarity_top_out():
# exercise finite-difference for top-out potential
i_seqs = [0,1,2,3]
sites_cart = flex.vec3_double(
[(1,1,0),(1,2,0),(2,1,0),(1,1,2)])
# [(12,54,23),(234,235,345),(234,433,287),(1541,3452,7677)])
weights = flex.double([1]*4)
p = geometry_restraints.planarity_proxy(
i_seqs=i_seqs,
weights=weights,
limit=1,
top_out=True)
planarity = geometry_restraints.planarity(
sites_cart=sites_cart,
proxy=p)
# manual_gradient(planarity)
print "Normal:", planarity.normal()
# print dir(planarity)
print "deltas:", list(planarity.deltas())
# res = planarity.residual()
# print "residual:",res
sc1 = sites_cart.deep_copy()
sc2 = sites_cart.deep_copy()
h=1.e-5
sc1[0] = (1+h,1,0)
sc2[0] = (1-h,1,0)
pl1 = geometry_restraints.planarity(
sites_cart=sc1,
proxy=p)
pl2 = geometry_restraints.planarity(
sites_cart=sc2,
proxy=p)
pl1_residual = pl1.residual()
pl2_residual = pl2.residual()
gr = (pl1_residual - pl2_residual) / (2*h)
# print list(sc1)
# print list(sc2)
# print pl1_residual, pl2_residual
# print "GRADIENT:",gr
# STOP()
gradient_array = flex.vec3_double(sites_cart.size())
residual_sum = geometry_restraints.planarity_residual_sum(
sites_cart=sites_cart,
proxies=geometry_restraints.shared_planarity_proxy([p]),
gradient_array=gradient_array)
fd_grads = finite_difference_gradients(
restraint_type=geometry_restraints.planarity,
sites_cart=sites_cart,
proxy=p, eps=1.e-5)
for g,e in zip(gradient_array, fd_grads):
print "grads from proxy:", g
print "grads from finit:", e
# assert approx_equal(g, e)
def exercise_proxy_show():
if sys.platform.startswith("win") and sys.version_info[:2] < (2,6):
# This appears to be a windows-specific bug with string formatting
# for python versions prior to 2.6, where the exponent is printed
# with 3 digits rather than 2.
print "Skipping exercise_proxy_show()"
return
# zeolite AHT
crystal_symmetry = crystal.symmetry(
unit_cell=(15.794, 9.206, 8.589, 90, 90, 90),
space_group_symbol="C m c m")
sites_cart_cry = crystal_symmetry.unit_cell().orthogonalization_matrix() \
* flex.vec3_double([(0.1681, 0.6646, 0.4372), (0.0000, 0.6644, 0.5629)])
asu_mappings = crystal_symmetry.asu_mappings(buffer_thickness=3.0)
asu_mappings.process_sites_cart(original_sites=sites_cart_cry)
pair_asu_table = crystal.pair_asu_table(asu_mappings=asu_mappings)
pair_asu_table.add_all_pairs(distance_cutoff=2.9)
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
asu_mappings=asu_mappings)
sio = StringIO()
sorted_asu_proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart_cry,
f=sio)
assert not show_diff(sio.getvalue(), """\
Bond restraints: 0
""")
sorted_asu_proxies = geometry_restraints.bond_sorted_asu_proxies(
pair_asu_table=pair_asu_table)
mt = flex.mersenne_twister(seed=5)
for proxy in sorted_asu_proxies.asu:
proxy.distance_ideal = 2.9 + (mt.random_double()-0.5)*0.2
proxy.weight = 1+mt.random_double()*100
if (mt.random_double() > 0.5):
proxy.slack = mt.random_double()*0.1
sio = StringIO()
sorted_asu_proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart_cry,
site_labels=["Si1", "Si2"],
f=sio)
assert not show_diff(sio.getvalue(), """\
Bond restraints: 3
Sorted by residual:
bond Si1
Si2
ideal model slack delta sigma weight residual sym.op.
2.979 2.866 0.004 0.112 1.71e-01 3.42e+01 4.01e-01 x,y,z
bond Si2
Si1
ideal model slack delta sigma weight residual sym.op.
2.822 2.866 0.042 -0.045 1.29e-01 6.05e+01 4.35e-04 x,y,z
bond Si2
Si1
ideal model delta sigma weight residual sym.op.
2.867 2.866 0.001 1.26e-01 6.33e+01 6.17e-05 -x,y,z
""")
sio = StringIO()
sorted_asu_proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart_cry,
f=sio,
prefix="&",
max_items=2)
assert not show_diff(sio.getvalue(), """\
&Bond restraints: 3
&Sorted by residual:
&bond 0
& 1
& ideal model slack delta sigma weight residual sym.op.
& 2.979 2.866 0.004 0.112 1.71e-01 3.42e+01 4.01e-01 x,y,z
&bond 1
& 0
& ideal model slack delta sigma weight residual sym.op.
& 2.822 2.866 0.042 -0.045 1.29e-01 6.05e+01 4.35e-04 x,y,z
&... (remaining 1 not shown)
""")
sio = StringIO()
sorted_asu_proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart_cry,
f=sio,
prefix="*",
max_items=0)
gv = sio.getvalue()
assert not show_diff(sio.getvalue(), """\
*Bond restraints: 3
*Sorted by residual:
*... (remaining 3 not shown)
""")
#
for unit_cell in [None, uctbx.unit_cell([15,11.5,16.25,90,99.5,90])]:
simple_proxies = geometry_restraints.shared_bond_simple_proxy([
geometry_restraints.bond_simple_proxy((0,1), 2.979, 10),
geometry_restraints.bond_simple_proxy(
(1,0), sgtbx.rt_mx("x,y,z"), 2.822, 250)
])
sio = StringIO()
simple_proxies.show_sorted(
"residual", sites_cart_cry, unit_cell=unit_cell, f=sio)
assert not show_diff(sio.getvalue(), """\
Bond restraints: 2
Sorted by residual:
bond 1
0
ideal model delta sigma weight residual sym.op.
2.822 2.866 -0.044 6.32e-02 2.50e+02 4.86e-01 x,y,z
bond 0
1
ideal model delta sigma weight residual
2.979 2.866 0.113 3.16e-01 1.00e+01 1.27e-01
""")
mt = flex.mersenne_twister(seed=73)
sites_cart = flex.vec3_double(mt.random_double(size=18))
site_labels = ["a", "ba", "c", "dada", "e", "f"]
#
proxies = geometry_restraints.shared_angle_proxy()
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=flex.vec3_double(),
f=sio)
assert not show_diff(sio.getvalue(), """\
Bond angle restraints: 0
""")
proxies = geometry_restraints.shared_angle_proxy([
geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
angle_ideal=59,
weight=2),
geometry_restraints.angle_proxy(
i_seqs=[3,0,1],
angle_ideal=99,
weight=8)])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
prefix="+")
assert not show_diff(sio.getvalue(), """\
+Bond angle restraints: 2
+Sorted by residual:
+angle 3
+ 0
+ 1
+ ideal model delta sigma weight residual
+ 99.00 99.72 -0.72 3.54e-01 8.00e+00 4.19e+00
+angle 2
+ 1
+ 0
+ ideal model delta sigma weight residual
+ 59.00 58.06 0.94 7.07e-01 2.00e+00 1.76e+00
""")
sio = StringIO()
proxies.show_sorted(
by_value="delta",
sites_cart=sites_cart,
site_labels=site_labels,
f=sio,
prefix="@",
max_items=1)
assert not show_diff(sio.getvalue(), """\
@Bond angle restraints: 2
@Sorted by delta:
@angle c
@ ba
@ a
@ ideal model delta sigma weight residual
@ 59.00 58.06 0.94 7.07e-01 2.00e+00 1.76e+00
@... (remaining 1 not shown)
""")
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
max_items=0)
assert not show_diff(sio.getvalue(), """\
Bond angle restraints: 2
""")
#
proxies = geometry_restraints.shared_dihedral_proxy()
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=flex.vec3_double(),
f=sio)
assert not show_diff(sio.getvalue(), """\
Dihedral angle restraints: 0
""")
proxies = geometry_restraints.shared_dihedral_proxy([
geometry_restraints.dihedral_proxy(
i_seqs=[0,1,3,4],
angle_ideal=59,
weight=2,
periodicity=-1),
geometry_restraints.dihedral_proxy(
i_seqs=[3,2,0,5],
angle_ideal=99,
weight=8,
periodicity=-1)])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
prefix="-")
assert not show_diff(sio.getvalue(), """\
-Dihedral angle restraints: 2
- sinusoidal: 0
- harmonic: 2
-Sorted by residual:
-dihedral 3
- 2
- 0
- 5
- ideal model delta harmonic sigma weight residual
- 99.00 16.67 82.33 -1 3.54e-01 8.00e+00 5.42e+04
-dihedral 0
- 1
- 3
- 4
- ideal model delta harmonic sigma weight residual
- 59.00 -159.79 -141.21 -1 7.07e-01 2.00e+00 3.99e+04
""")
sio = StringIO()
proxies.show_sorted(
by_value="delta",
sites_cart=sites_cart,
site_labels=site_labels,
f=sio,
prefix="^",
max_items=1)
assert not show_diff(sio.getvalue(), """\
^Dihedral angle restraints: 2
^ sinusoidal: 0
^ harmonic: 2
^Sorted by delta:
^dihedral a
^ ba
^ dada
^ e
^ ideal model delta harmonic sigma weight residual
^ 59.00 -159.79 -141.21 -1 7.07e-01 2.00e+00 3.99e+04
^... (remaining 1 not shown)
""")
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
max_items=1)
assert not show_diff(sio.getvalue(), """\
Dihedral angle restraints: 2
sinusoidal: 0
harmonic: 2
Sorted by residual:
dihedral 3
2
0
5
ideal model delta harmonic sigma weight residual
99.00 16.67 82.33 -1 3.54e-01 8.00e+00 5.42e+04
... (remaining 1 not shown)
""")
#
proxies = geometry_restraints.shared_chirality_proxy()
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=flex.vec3_double(),
f=sio)
assert not show_diff(sio.getvalue(), """\
Chirality restraints: 0
""")
proxies = geometry_restraints.shared_chirality_proxy([
geometry_restraints.chirality_proxy(
i_seqs=[0,1,3,4],
volume_ideal=0.09,
both_signs=False,
weight=2),
geometry_restraints.chirality_proxy(
i_seqs=[3,2,0,5],
volume_ideal=0.16,
both_signs=True,
weight=8)])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
prefix="$")
assert not show_diff(sio.getvalue(), """\
$Chirality restraints: 2
$Sorted by residual:
$chirality 3
$ 2
$ 0
$ 5
$ both_signs ideal model delta sigma weight residual
$ True 0.16 0.05 0.11 3.54e-01 8.00e+00 9.34e-02
$chirality 0
$ 1
$ 3
$ 4
$ both_signs ideal model delta sigma weight residual
$ False 0.09 -0.04 0.13 7.07e-01 2.00e+00 3.33e-02
""")
sio = StringIO()
proxies.show_sorted(
by_value="delta",
sites_cart=sites_cart,
site_labels=site_labels,
f=sio,
prefix="*",
max_items=1)
assert not show_diff(sio.getvalue(), """\
*Chirality restraints: 2
*Sorted by delta:
*chirality a
* ba
* dada
* e
* both_signs ideal model delta sigma weight residual
* False 0.09 -0.04 0.13 7.07e-01 2.00e+00 3.33e-02
*... (remaining 1 not shown)
""")
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
max_items=0)
assert not show_diff(sio.getvalue(), """\
Chirality restraints: 2
""")
#
proxies = geometry_restraints.shared_planarity_proxy()
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=flex.vec3_double(),
f=sio)
if 0: # n=0 restraints removed
assert not show_diff(sio.getvalue(), """\
Planarity restraints: 0
""")
proxies = geometry_restraints.shared_planarity_proxy([
geometry_restraints.planarity_proxy(
i_seqs=[0,2,4,1],
weights=[0.31,0.2,0.31,0.4]),
geometry_restraints.planarity_proxy(
i_seqs=[0,2,3,4,5],
weights=[0.01,0.11,0.21,0.31,0.41])])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
prefix=":")
assert not show_diff(sio.getvalue(), """\
:Planarity restraints: 2
:Sorted by residual:
: delta sigma weight rms_deltas residual
:plane 0 0.004 1.80e+00 3.10e-01 1.46e-01 2.52e-02
: 2 -0.196 2.24e+00 2.00e-01
: 4 -0.115 1.80e+00 3.10e-01
: 1 0.184 1.58e+00 4.00e-01
: delta sigma weight rms_deltas residual
:plane 0 -0.332 1.00e+01 1.00e-02 1.78e-01 9.86e-03
: 2 0.152 3.02e+00 1.10e-01
: 3 -0.143 2.18e+00 2.10e-01
: 4 -0.030 1.80e+00 3.10e-01
: 5 0.063 1.56e+00 4.10e-01
""")
sio = StringIO()
proxies.show_sorted(
by_value="rms_deltas",
sites_cart=sites_cart,
site_labels=site_labels,
f=sio,
prefix="<",
max_items=1)
assert not show_diff(sio.getvalue(), """\
<Planarity restraints: 2
<Sorted by rms_deltas:
< delta sigma weight rms_deltas residual
<plane a -0.332 1.00e+01 1.00e-02 1.78e-01 9.86e-03
< c 0.152 3.02e+00 1.10e-01
< dada -0.143 2.18e+00 2.10e-01
< e -0.030 1.80e+00 3.10e-01
< f 0.063 1.56e+00 4.10e-01
<... (remaining 1 not shown)
""")
sio = StringIO()
proxies.show_sorted(
by_value="residual",
sites_cart=sites_cart,
f=sio,
max_items=1)
assert not show_diff(sio.getvalue(), """\
Planarity restraints: 2
Sorted by residual:
delta sigma weight rms_deltas residual
plane 0 0.004 1.80e+00 3.10e-01 1.46e-01 2.52e-02
2 -0.196 2.24e+00 2.00e-01
4 -0.115 1.80e+00 3.10e-01
1 0.184 1.58e+00 4.00e-01
... (remaining 1 not shown)
""")
#
unit_cell = uctbx.unit_cell([15,11.5,16.25,90,99.5,90])
sites_cart = flex.vec3_double(
[(12.87,0.10,9.04),(12.54,0.44,7.73),(13.47,0.34,6.71),(1,2,3)])
rt_mx = sgtbx.rt_mx('2-X,-Y,1-Z')
u_mx = sgtbx.rt_mx()
p = geometry_restraints.angle_proxy(
i_seqs=[2,0,1],
sym_ops=[u_mx,rt_mx,rt_mx],
angle_ideal=120,
weight=1)
proxies = geometry_restraints.shared_angle_proxy([p,p])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
unit_cell=unit_cell,
sites_cart=sites_cart,
f=sio,
prefix='!')
assert not show_diff(sio.getvalue(), """\
!Bond angle restraints: 2
!Sorted by residual:
!angle 2
! 0 -x+2,-y,-z+1
! 1 -x+2,-y,-z+1
! ideal model delta sigma weight residual
! 120.00 122.78 -2.78 1.00e+00 1.00e+00 7.73e+00
!angle 2
! 0 -x+2,-y,-z+1
! 1 -x+2,-y,-z+1
! ideal model delta sigma weight residual
! 120.00 122.78 -2.78 1.00e+00 1.00e+00 7.73e+00
""")
# test proxies with and without proxy.sym_ops side by side
p2 = geometry_restraints.angle_proxy(
i_seqs=[2,1,0],
angle_ideal=59,
weight=2)
proxies = geometry_restraints.shared_angle_proxy([p,p2])
sio = StringIO()
proxies.show_sorted(
by_value="delta",
unit_cell=unit_cell,
sites_cart=sites_cart,
f=sio,
prefix='~')
assert not show_diff(sio.getvalue(), """\
~Bond angle restraints: 2
~Sorted by delta:
~angle 2
~ 1
~ 0
~ ideal model delta sigma weight residual
~ 59.00 121.08 -62.08 7.07e-01 2.00e+00 7.71e+03
~angle 2
~ 0 -x+2,-y,-z+1
~ 1 -x+2,-y,-z+1
~ ideal model delta sigma weight residual
~ 120.00 122.78 -2.78 1.00e+00 1.00e+00 7.73e+00
""")
#
p = geometry_restraints.dihedral_proxy(
i_seqs=[2,0,1,2],
sym_ops=[u_mx,u_mx,u_mx,rt_mx],
angle_ideal=0,
weight=1)
proxies = geometry_restraints.shared_dihedral_proxy([p,p])
sio = StringIO()
proxies.show_sorted(
by_value="delta",
unit_cell=unit_cell,
sites_cart=sites_cart,
f=sio,
prefix='%')
assert not show_diff(sio.getvalue(), """\
%Dihedral angle restraints: 2
% sinusoidal: 0
% harmonic: 2
%Sorted by delta:
%dihedral 2
% 0
% 1
% 2 -x+2,-y,-z+1
% ideal model delta harmonic sigma weight residual
% 0.00 5.16 -5.16 0 1.00e+00 1.00e+00 2.67e+01
%dihedral 2
% 0
% 1
% 2 -x+2,-y,-z+1
% ideal model delta harmonic sigma weight residual
% 0.00 5.16 -5.16 0 1.00e+00 1.00e+00 2.67e+01
""")
#
p = geometry_restraints.planarity_proxy(
i_seqs=flex.size_t([0,1,2,0,1,2]),
sym_ops=[u_mx,u_mx,u_mx,rt_mx,rt_mx,rt_mx],
weights=flex.double([1]*6))
proxies = geometry_restraints.shared_planarity_proxy([p,p])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
unit_cell=unit_cell,
sites_cart=sites_cart,
f=sio,
prefix=">")
assert not show_diff(sio.getvalue(), """\
>Planarity restraints: 2
>Sorted by residual:
> delta sigma weight rms_deltas residual sym.op.
>plane 0 0.017 1.00e+00 1.00e+00 1.60e-02 1.53e-03
> 1 -0.015 1.00e+00 1.00e+00
> 2 0.016 1.00e+00 1.00e+00
> 0 -0.017 1.00e+00 1.00e+00 -x+2,-y,-z+1
> 1 0.015 1.00e+00 1.00e+00 -x+2,-y,-z+1
> 2 -0.016 1.00e+00 1.00e+00 -x+2,-y,-z+1
> delta sigma weight rms_deltas residual sym.op.
>plane 0 0.017 1.00e+00 1.00e+00 1.60e-02 1.53e-03
> 1 -0.015 1.00e+00 1.00e+00
> 2 0.016 1.00e+00 1.00e+00
> 0 -0.017 1.00e+00 1.00e+00 -x+2,-y,-z+1
> 1 0.015 1.00e+00 1.00e+00 -x+2,-y,-z+1
> 2 -0.016 1.00e+00 1.00e+00 -x+2,-y,-z+1
""")
# test proxies with and without proxy.sym_ops side by side
p2 = geometry_restraints.planarity_proxy(
i_seqs=[0,2,3,1],
weights=[0.31,0.2,0.31,0.4])
proxies = geometry_restraints.shared_planarity_proxy([p,p2])
sio = StringIO()
proxies.show_sorted(
by_value="rms_deltas",
unit_cell=unit_cell,
sites_cart=sites_cart,
f=sio,
prefix="#")
assert not show_diff(sio.getvalue(), """\
#Planarity restraints: 2
#Sorted by rms_deltas:
# delta sigma weight rms_deltas residual sym.op.
#plane 0 -0.053 1.80e+00 3.10e-01 6.02e-02 4.53e-03
# 2 -0.071 2.24e+00 2.00e-01
# 3 -0.005 1.80e+00 3.10e-01
# 1 0.081 1.58e+00 4.00e-01
# delta sigma weight rms_deltas residual sym.op.
#plane 0 0.017 1.00e+00 1.00e+00 1.60e-02 1.53e-03
# 1 -0.015 1.00e+00 1.00e+00
# 2 0.016 1.00e+00 1.00e+00
# 0 -0.017 1.00e+00 1.00e+00 -x+2,-y,-z+1
# 1 0.015 1.00e+00 1.00e+00 -x+2,-y,-z+1
# 2 -0.016 1.00e+00 1.00e+00 -x+2,-y,-z+1
""")
#
unit_cell = uctbx.unit_cell([15,25,30,90,90,90])
sites_cart = flex.vec3_double(
[(1,2,3),(2,4,6),(1,2,4.5),(2,5.6,6),(14,24,29),(0.5,24,29)])
p = geometry_restraints.bond_similarity_proxy(
i_seqs=[(0,2),(1,3),(4,5)],
sym_ops=[u_mx,u_mx,sgtbx.rt_mx('1+x,y,z')],
weights=(1,2,3))
proxies = geometry_restraints.shared_bond_similarity_proxy([p,p])
sio = StringIO()
proxies.show_sorted(
by_value="residual",
unit_cell=unit_cell,
sites_cart=sites_cart,
f=sio,
prefix=">")
assert not show_diff(sio.getvalue(), """\
>Bond similarity restraints: 2
>Sorted by residual:
> delta sigma weight rms_deltas residual sym.op.
>bond 0-2 -0.033 1.00e+00 1.00e+00 4.71e-02 2.22e-03
> 1-3 0.067 7.07e-01 2.00e+00
> 4-5 -0.033 5.77e-01 3.00e+00 x+1,y,z
> delta sigma weight rms_deltas residual sym.op.
>bond 0-2 -0.033 1.00e+00 1.00e+00 4.71e-02 2.22e-03
> 1-3 0.067 7.07e-01 2.00e+00
> 4-5 -0.033 5.77e-01 3.00e+00 x+1,y,z
""")
sio = StringIO()
p = geometry_restraints.bond_similarity_proxy(
i_seqs=[(0,2),(1,3),(4,5)],
weights=(1,2,3))
proxies = geometry_restraints.shared_bond_similarity_proxy([p,p])
proxies.show_sorted(
by_value="rms_deltas",
sites_cart=sites_cart,
site_labels=site_labels,
f=sio,
prefix=">")
assert not show_diff(sio.getvalue(), """\
>Bond similarity restraints: 2
>Sorted by rms_deltas:
> delta sigma weight rms_deltas residual
>bond a-c -6.033 1.00e+00 1.00e+00 5.98e+00 3.56e+01
> ba-dada -5.933 7.07e-01 2.00e+00
> e-f 5.967 5.77e-01 3.00e+00
> delta sigma weight rms_deltas residual
>bond a-c -6.033 1.00e+00 1.00e+00 5.98e+00 3.56e+01
> ba-dada -5.933 7.07e-01 2.00e+00
> e-f 5.967 5.77e-01 3.00e+00
""")
def exercise_bonds_with_symops():
from cctbx.crystal.tst_ext import trial_structure
xs = trial_structure(choice_of_coordinates=1)
pst = xs.pair_asu_table(distance_cutoff=3.2).extract_pair_sym_table()
bp = geometry_restraints.bond_params(
distance_ideal=3.1,
weight=1/0.01**2)
proxies = geometry_restraints.shared_bond_simple_proxy([
geometry_restraints.bond_simple_proxy(
i_seqs=[_.i_seq, _.j_seq],
rt_mx_ji=_.rt_mx_ji,
params=bp)
for _ in pst.iterator()])
assert not show_diff(
"\n".join([p.rt_mx_ji.as_xyz() for p in proxies]),
"""\
-y+1,-x+1,-z+1/2
x,x-y+2,-z+1/2
x,y,z
x-y+1,x+1,-z+1
x,y,z
-y+1,-x+1,z
-x+y,y,z""")
deltas = geometry_restraints.bond_deltas(
unit_cell=xs.unit_cell(),
sites_cart=xs.sites_cart(),
proxies=proxies)
assert approx_equal(deltas, [
0.0495812, -0.0821728, -0.07030907, -0.0093944,
-0.09861225, 0.082206, -0.0658604])
residuals = geometry_restraints.bond_residuals(
unit_cell=xs.unit_cell(),
sites_cart=xs.sites_cart(),
proxies=proxies)
assert approx_equal(residuals, [
24.58295, 67.52369, 49.43365, 0.8825475, 97.24376, 67.57826, 43.37592])
grads_ana = flex.vec3_double(xs.scatterers().size(), (0,0,0))
sites_cart = xs.sites_cart()
residual_sum = geometry_restraints.bond_residual_sum(
unit_cell=xs.unit_cell(),
sites_cart=sites_cart,
proxies=proxies,
gradient_array=grads_ana)
eps = 1e-6
grads_fin = flex.vec3_double()
for i_site in xrange(sites_cart.size()):
sori = sites_cart[i_site]
gs = []
for ix in xrange(3):
fs = []
for signed_eps in [eps, -eps]:
seps = list(sori)
seps[ix] += signed_eps
sites_cart[i_site] = seps
residual_sum = geometry_restraints.bond_residual_sum(
unit_cell=xs.unit_cell(),
sites_cart=sites_cart,
proxies=proxies,
gradient_array=None)
fs.append(residual_sum)
gs.append((fs[0]-fs[1])/(2*eps))
grads_fin.append(gs)
sites_cart[i_site] = sori
assert approx_equal(grads_ana, grads_fin)
def exercise_parallelity():
def check(p, i_seqs=(1,2,3,4,5), j_seqs=(6,7,8,9,10), weight=1,
target_angle_deg=0, slack=0, top_out=False, limit=1, origin_id=0):
assert approx_equal(p.i_seqs, i_seqs)
assert approx_equal(p.j_seqs, j_seqs)
assert approx_equal(p.weight, weight)
assert approx_equal(p.target_angle_deg, target_angle_deg)
assert approx_equal(p.slack, slack)
assert p.top_out == top_out
assert approx_equal(p.limit, limit)
assert p.origin_id == origin_id
def test_exact_values(test_sites, weight, top_out, limit, residual,
delta, origin_id=0, eps=1e-6):
p = geometry_restraints.parallelity(i_sites=test_sites[0],
j_sites=test_sites[1],
weight=weight,
top_out=top_out,
limit=limit)
assert approx_equal(p.residual(), residual, eps)
assert approx_equal(p.delta, delta, eps)
p = geometry_restraints.parallelity_proxy(
i_seqs=(1,2,3,4,5),
j_seqs=(6,7,8,9,10),
weight=1)
check(p)
p.origin_id = 1
check(p, origin_id=1)
c = geometry_restraints.parallelity_proxy(
i_seqs=(5,4,3,2,1),
j_seqs=(10,9,8,7,6),
proxy=p)
check(c, i_seqs=(5,4,3,2,1), j_seqs=(10,9,8,7,6), weight=1, origin_id=1)
c = c.sort_ij_seqs()
check(p, origin_id=1)
# values
test_sites_1 = ([(1,1,0), (2,3,0), (1,2,0)],
[(1,1,1), (2,2,1), (1,2,1)])
test_sites_2 = ([(1,0,0), (2,0,0), (1,1,0)],
[(1,0,0), (2,0,0), (1,0,1)])
# 60 degrees
test_sites_21 = ([(1,0,0), (2,0,0), (1,1.732050807568877,-1)],
[(1,0,0), (2,0,0), (1,0,1)])
test_sites_3 = ([(1,0,0), (2,0,0), (1,1,0),(3,0,0), (3,3,0), (1,1,0)],
[(1,0,0), (2,0,0), (1,0,1)])
test_sites_4 = ([(1,0,0), (2,0,0) ],
[(1,0,0), (2,0,0), (1,0,1)])
# test_data=[(test_sites, weight, top_out, limit, residual, delta)]
test_data = [(test_sites_1, 1, False,1, 0, 0 , 0),
(test_sites_1, 1, True, 1, 0, 0 , 0),
(test_sites_1, 1, True, 1000, 0, 0 , 0),
(test_sites_2, 1, False,1, 1, 90, 0),
(test_sites_2, 1, True, 1, 0.632120558829, 90, 0),
(test_sites_2, 1, True, 1000, 0.999999499984, 90, 1),
(test_sites_2, 1300, False,1, 1300, 90, 1),
(test_sites_2, 1300, True, 1, 821.756726477, 90, 1),
(test_sites_2, 1300, True, 1000, 1299.99934998, 90, 1),
(test_sites_21,1000, False,1, 500, 60, 1),
(test_sites_21,1000, True, 1, 393.469340287, 60, 2),
(test_sites_21,1000, True, 1000, 499.999874948, 60, 2),
(test_sites_3, 1, False,1, 1, 90, 2),
(test_sites_3, 1, True, 1, 0.632120558829, 90, 2),
(test_sites_3, 1, True, 1000, 0.999999499984, 90, 2)]
for (test_sites, weight, top_out, limit, residual,
delta, origin_id) in test_data:
test_exact_values(test_sites, weight, top_out, limit, residual,
delta, origin_id)
# gradients
def make_points(one_d):
result = []
for i in range(int(len(one_d)/3)):
result.append(tuple(one_d[i*3:i*3+3]))
return result
h=1.e-7
test_sites= [(1,0,0), (2,0,0), (1,1.732050807568877,-1),
(1,0,0), (2,0,0), (1,0,1)]
test_sites_1d = [1,0,0, 2,0,0, 1,1.732050807568877,-1,
1,0,0, 2,0,0, 1,0,1]
# for target_angle_deg in [0,10]:
for target_angle_deg in range(0,360,4):
for slack in range(0,90,4):
for top_out in [False, True]:
limit = 10
weight = 10
# print target_angle_deg, slack
p_original = geometry_restraints.parallelity(i_sites=test_sites[:3],
j_sites=test_sites[3:],
weight=1,
target_angle_deg=target_angle_deg,
slack=slack,
limit=limit,
top_out=top_out)
grad = list(p_original.gradients())
fin_dif_grad = []
for i in range(len(test_sites_1d)):
test_sites_1d[i]+=h
points = make_points(test_sites_1d)
p1 = geometry_restraints.parallelity(i_sites=points[:3],
j_sites=points[3:],
weight=1,
target_angle_deg=target_angle_deg,
slack=slack,
limit=limit,
top_out=top_out)
test_sites_1d[i]-=2*h
points = make_points(test_sites_1d)
p2 = geometry_restraints.parallelity(i_sites=points[:3],
j_sites=points[3:],
weight=1,
target_angle_deg=target_angle_deg,
slack=slack,
limit=limit,
top_out=top_out)
test_sites_1d[i]+=h
# print p1.residual(), p2.residual()
fin_dif_grad.append((p1.residual()-p2.residual())/(2.0*h))
sites_fdg = make_points(fin_dif_grad)
assert approx_equal(grad, sites_fdg, 1.e-6)
# Proxy selections
def make_proxy(i_seqs, j_seqs, weight, target_angle_deg=0, slack=0,
limit=-1, top_out=False, origin_id=0):
return geometry_restraints.parallelity_proxy(
flex.size_t(i_seqs),
flex.size_t(j_seqs),
weight, target_angle_deg, slack, limit, top_out, origin_id)
proxies = geometry_restraints.shared_parallelity_proxy([
make_proxy([0,1,2,3], [2,3,4,5], 1, 11, 1, 1, True, 0),
make_proxy([1,2,3,4], [3,4,5,6], 2, 12, 2, 2, True, 1),
make_proxy([2,3,10,11], [4,5,12,13], 3, 13, 3, 3, True, 1),
make_proxy([3,1,12,14], [5,6,14,15], 4, 14, 4, 4, True, 3)])
selected = proxies.proxy_select(n_seq=16, iselection=flex.size_t([0,2,4]))
assert selected.size() == 0
selected = proxies.proxy_select(n_seq=16,
iselection=flex.size_t([1,2,3,4,6]))
assert selected.size() == 2
check(selected[0], i_seqs=(0,1,2), j_seqs=(1,2,3), weight=1,
target_angle_deg=11, slack=1, top_out=True, limit=1, origin_id=0)
check(selected[1], i_seqs=(0, 1, 2, 3), j_seqs=(2, 3, 4), weight=2,
target_angle_deg=12, slack=2, top_out=True, limit=2, origin_id=1)
#
selected = proxies.proxy_select(origin_id=5)
assert selected.size() == 0
selected = proxies.proxy_select(origin_id=0)
assert selected.size() == 1
check(selected[0], i_seqs=(0,1,2,3), j_seqs=(2,3,4,5), weight=1,
target_angle_deg=11, slack=1, top_out=True, limit=1, origin_id=0)
selected = proxies.proxy_select(origin_id=1)
assert selected.size() == 2
check(selected[0], i_seqs=(1,2,3,4), j_seqs=(3,4,5,6), weight=2,
target_angle_deg=12, slack=2, top_out=True, limit=2, origin_id=1)
check(selected[1], i_seqs=(2,3,10,11), j_seqs=(4,5,12,13), weight=3,
target_angle_deg=13, slack=3, top_out=True, limit=3, origin_id=1)
# - geometry_restraints.remove.parallelities
rest = proxies.proxy_remove(selection=flex.bool([True]*16))
assert len(rest) == 0
rest = proxies.proxy_remove(selection=flex.bool([True]*6+[False]*10))
assert len(rest) == 2 # 3rd and 4th
check(rest[0], i_seqs=(2, 3, 10, 11), j_seqs=(4, 5, 12, 13), weight=3,
target_angle_deg=13, slack=3, top_out=True, limit=3, origin_id=1)
check(rest[1], i_seqs=(3, 1, 12, 14), j_seqs=(5, 6, 14, 15), weight=4,
target_angle_deg=14, slack=4, top_out=True, limit=4, origin_id=3)
rest = proxies.proxy_remove(origin_id=1)
assert len(rest) == 2 # 1st and 4th
check(rest[0], i_seqs=(0,1,2,3), j_seqs=(2,3,4,5), weight=1,
target_angle_deg=11, slack=1, top_out=True, limit=1, origin_id=0)
check(rest[1], i_seqs=(3, 1, 12, 14), j_seqs=(5, 6, 14, 15), weight=4,
target_angle_deg=14, slack=4, top_out=True, limit=4, origin_id=3)
def exercise_origin_id_selections_for_bonds():
p_array = []
for i in range(10):
p = geometry_restraints.bond_simple_proxy(
i_seqs=[i,i+1],
distance_ideal=3.5,
weight=1,
slack=1,
limit=1,
top_out=False,
origin_id=0)
p_array.append(p)
for i in range(10,20):
p = geometry_restraints.bond_simple_proxy(
i_seqs=[i,i+1],
distance_ideal=3.5,
weight=1,
slack=1,
limit=1,
top_out=False,
origin_id=1)
p_array.append(p)
for i in range(20,30):
p = geometry_restraints.bond_simple_proxy(
i_seqs=[i,i+1],
distance_ideal=3.5,
weight=1,
slack=1,
limit=1,
top_out=False,
origin_id=2)
p_array.append(p)
proxies = geometry_restraints.shared_bond_simple_proxy(p_array)
new_p2 = proxies.proxy_select(origin_id=1)
new_p3 = proxies.proxy_select(origin_id=2)
new_p4 = proxies.proxy_select(origin_id=3)
assert len(new_p2) == 10
assert len(new_p3) == 10
assert len(new_p4) == 0
sites_cart = flex.vec3_double([[1,2,3],[2,3,4]])
asu_mappings = direct_space_asu.non_crystallographic_asu_mappings(
sites_cart=sites_cart)
pair_generator = crystal.neighbors_fast_pair_generator(
asu_mappings=asu_mappings,
distance_cutoff=5)
pgn = pair_generator.next()
p_array = []
for i in range(10):
p = geometry_restraints.bond_asu_proxy(
pair=pgn,
distance_ideal=2,
weight=10, slack=2, limit=1, top_out=True, origin_id=0)
p_array.append(p)
for i in range(10,20):
p = geometry_restraints.bond_asu_proxy(
pair=pgn,
distance_ideal=2,
weight=10, slack=2, limit=1, top_out=True, origin_id=1)
p_array.append(p)
for i in range(20,30):
p = geometry_restraints.bond_asu_proxy(
pair=pgn,
distance_ideal=2,
weight=10, slack=2, limit=1, top_out=True, origin_id=2)
p_array.append(p)
proxies = geometry_restraints.shared_bond_asu_proxy(p_array)
new_p2 = proxies.proxy_select(origin_id=1)
new_p3 = proxies.proxy_select(origin_id=2)
new_p4 = proxies.proxy_select(origin_id=3)
assert len(new_p2) == 10
assert len(new_p3) == 10
assert len(new_p4) == 0
def exercise():
exercise_bond_similarity()
exercise_bond()
exercise_bonds_with_symops()
exercise_nonbonded()
exercise_nonbonded_cos()
exercise_angle()
exercise_dihedral()
exercise_chirality()
exercise_planarity()
# exercise_planarity_top_out()
exercise_proxy_show()
exercise_parallelity()
exercise_origin_id_selections_for_bonds()
print "OK"
if (__name__ == "__main__"):
exercise()
| 36.793637
| 96
| 0.672669
| 19,875
| 123,737
| 3.954264
| 0.040101
| 0.043402
| 0.07679
| 0.032752
| 0.837373
| 0.788411
| 0.735593
| 0.697739
| 0.660088
| 0.631116
| 0
| 0.09727
| 0.185935
| 123,737
| 3,362
| 97
| 36.804581
| 0.682945
| 0.014701
| 0
| 0.579013
| 0
| 0.014366
| 0.075059
| 0.00051
| 0
| 0
| 0
| 0
| 0.185509
| 0
| null | null | 0
| 0.004372
| null | null | 0.005309
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
719618aabe62add2e42d6ecb347934bf2c39871f
| 114
|
py
|
Python
|
tests/conftest.py
|
sander76/mspyteams
|
b8755afe8533fd1b9c059ca5d176b335f8070310
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
sander76/mspyteams
|
b8755afe8533fd1b9c059ca5d176b335f8070310
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
sander76/mspyteams
|
b8755afe8533fd1b9c059ca5d176b335f8070310
|
[
"MIT"
] | null | null | null |
import pytest
from mspyteams import Card
@pytest.fixture
def card():
"""Card fixture."""
return Card()
| 11.4
| 26
| 0.666667
| 14
| 114
| 5.428571
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 114
| 9
| 27
| 12.666667
| 0.844444
| 0.114035
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
71c2c4269d4d5141a41b767cc11d249efe26e935
| 156
|
py
|
Python
|
GOFevaluation/__init__.py
|
XENONnT/GOFevaluation
|
ef34b2b0ee911d74cee0ba7c1fdcfc25dedb7350
|
[
"BSD-3-Clause"
] | null | null | null |
GOFevaluation/__init__.py
|
XENONnT/GOFevaluation
|
ef34b2b0ee911d74cee0ba7c1fdcfc25dedb7350
|
[
"BSD-3-Clause"
] | 3
|
2021-10-19T13:46:42.000Z
|
2021-12-28T15:06:59.000Z
|
GOFevaluation/__init__.py
|
XENONnT/GOFevaluation
|
ef34b2b0ee911d74cee0ba7c1fdcfc25dedb7350
|
[
"BSD-3-Clause"
] | null | null | null |
__version__ = '0.1.0'
from .utils import *
from .evaluator_base import *
from .evaluators_nd import *
from .evaluators_1d import *
from .gof_test import *
| 19.5
| 29
| 0.75
| 23
| 156
| 4.73913
| 0.565217
| 0.366972
| 0.366972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.153846
| 156
| 7
| 30
| 22.285714
| 0.795455
| 0
| 0
| 0
| 0
| 0
| 0.032051
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
71e3e0f186abfd10e7cc8646753d5bff9a25310c
| 2,493
|
py
|
Python
|
roles/wagtail/wagtail_setup/files/local_encrypted.py
|
allthatilk/deployment
|
0985547fcd4312609c908dcaf4dcb5c1a41bcdd5
|
[
"MIT"
] | null | null | null |
roles/wagtail/wagtail_setup/files/local_encrypted.py
|
allthatilk/deployment
|
0985547fcd4312609c908dcaf4dcb5c1a41bcdd5
|
[
"MIT"
] | null | null | null |
roles/wagtail/wagtail_setup/files/local_encrypted.py
|
allthatilk/deployment
|
0985547fcd4312609c908dcaf4dcb5c1a41bcdd5
|
[
"MIT"
] | null | null | null |
$ANSIBLE_VAULT;1.1;AES256
30376339613033613465376137656266636566663462633936333832343563666434313731303566
3631643935323863333763386132356465316163626239660a343530343634653034616334393231
61626566653664376165383334336131333761323563643536633465623632373032303537636631
3362336662363730660a396530306565323065393436616566663762623863383732653337616461
32636330636330623031626362313861366266356532353033366563343662666366643433356361
66343966616466323435313565663062343761356463656135393138313238303830656639343834
34326332333261346561376638383537313331393638343934383563393436626365346335353031
32306534393863306334623666316536333933636338343935363734383939366136323738363564
34316661366262643761363961306463333331653264323739303964623730346566636230313163
30323832393738373338313833383738663630383837343238363962623838363235343937346536
35363164623534353333633538373765383730343637613635663163376363653365373862363337
63373938636565346331623566396234396538626566653164303332663762663530323361633035
38316137626662353330356437313231663865633239386235623065613233393734383463646462
39326533346664626331653639303832633534373433313131326435663133313139613333333461
32626638383261353932323730656364653531616635363339396165323238656334393536626135
32393839316662633538303338366435353363386464653934383364633764303166366532643733
38393763316662663433303966356439383564633839613661663165313430376236643636306232
37326232383639326234313065333336303036663737323234383739643164613433383534376261
61373932353432303866663661356363373862663762366562333734366361663761313432353733
66623761313564373538346135363666653139313665346533626538663035316332343931366333
61626664303332313932356235663138373538346663326361336239343737346535376365626636
65316231656538623163383436653030376539373965343231353134303635393662323733396163
37333232313561633532633335386531386162646539333633626439343036353463303036613464
31346166663837386561656663633831616333326335393032353235303963613361636562396338
63643336376562363931353332653033323161373766396463396362363532363762313730396438
34353138666233313238333864343235396532323135663361336562623639303731376230303031
61643961373433646161633063646139306533636139653538313536313565353761383232636331
33343039643939376230626665393730356265663437643238316138376466616134316230363735
37643461376134363961653138323237633435353566363333313535633235373262333836376135
35663131396138663933383539613033336266326264383166653337666432313062336239353539
643366656536356339333036383437393935
| 75.545455
| 80
| 0.98556
| 36
| 2,493
| 68.222222
| 0.972222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.991061
| 0.012836
| 2,493
| 32
| 81
| 77.90625
| 0.006908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e084862d4e8d2a7c044aeebe3c94fac5a80bfed6
| 99
|
py
|
Python
|
app/download/__init__.py
|
Toxe/simple-file-upload-server
|
8f0473e76d739836910e27498bc837f78801a386
|
[
"MIT"
] | null | null | null |
app/download/__init__.py
|
Toxe/simple-file-upload-server
|
8f0473e76d739836910e27498bc837f78801a386
|
[
"MIT"
] | null | null | null |
app/download/__init__.py
|
Toxe/simple-file-upload-server
|
8f0473e76d739836910e27498bc837f78801a386
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
bp = Blueprint("download", __name__)
from app.download import routes
| 16.5
| 36
| 0.787879
| 13
| 99
| 5.692308
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141414
| 99
| 5
| 37
| 19.8
| 0.870588
| 0
| 0
| 0
| 0
| 0
| 0.080808
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 5
|
e08b85901d5a0ceb840d7f255ec2de39a72592c2
| 37
|
py
|
Python
|
python/lander.py
|
xt0fer/Lander
|
3827ce2c751d6643c59a4342a0669c8fdf205840
|
[
"MIT"
] | null | null | null |
python/lander.py
|
xt0fer/Lander
|
3827ce2c751d6643c59a4342a0669c8fdf205840
|
[
"MIT"
] | null | null | null |
python/lander.py
|
xt0fer/Lander
|
3827ce2c751d6643c59a4342a0669c8fdf205840
|
[
"MIT"
] | null | null | null |
# Left as an exercise for the Reader.
| 37
| 37
| 0.756757
| 7
| 37
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189189
| 37
| 1
| 37
| 37
| 0.933333
| 0.945946
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e09da393ea77de1455dc7dd6c22537cccc544419
| 104
|
py
|
Python
|
vortexasdk/config.py
|
VorTECHsa/python-sdk
|
d85aabd8d9843e4d04d857360492bea002c2b24b
|
[
"Apache-2.0"
] | 9
|
2020-01-22T15:36:16.000Z
|
2022-03-14T10:05:19.000Z
|
vortexasdk/config.py
|
VorTECHsa/python-sdk
|
d85aabd8d9843e4d04d857360492bea002c2b24b
|
[
"Apache-2.0"
] | 114
|
2020-01-08T11:08:24.000Z
|
2022-03-30T16:42:23.000Z
|
vortexasdk/config.py
|
V0RT3X4/python-sdk
|
4cffae83b90a58a56f1a534057fa1ca1c8671e05
|
[
"Apache-2.0"
] | 6
|
2020-05-28T00:09:02.000Z
|
2022-03-14T03:52:44.000Z
|
import os
LOG_LEVEL = os.getenv("LOG_LEVEL", "WARNING").upper()
LOG_FILE = os.getenv("LOG_FILE", None)
| 20.8
| 53
| 0.711538
| 17
| 104
| 4.117647
| 0.529412
| 0.228571
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105769
| 104
| 4
| 54
| 26
| 0.752688
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e0a5893b1ec05a9b4ecf2eacc5a48d15ae37c3eb
| 64
|
py
|
Python
|
echoes/reservoir/__init__.py
|
fabridamicelli/echoes
|
99fd1b1daf20b45bea21091a741ccbf711393294
|
[
"MIT"
] | 11
|
2020-08-03T23:19:00.000Z
|
2022-03-27T10:44:29.000Z
|
echoes/reservoir/__init__.py
|
fabridamicelli/echoes
|
99fd1b1daf20b45bea21091a741ccbf711393294
|
[
"MIT"
] | null | null | null |
echoes/reservoir/__init__.py
|
fabridamicelli/echoes
|
99fd1b1daf20b45bea21091a741ccbf711393294
|
[
"MIT"
] | 4
|
2021-01-01T14:34:26.000Z
|
2022-03-21T18:03:47.000Z
|
from echoes.reservoir._leaky_numba import ReservoirLeakyNeurons
| 32
| 63
| 0.90625
| 7
| 64
| 8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 64
| 1
| 64
| 64
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0ba8f4c1badf95f87554a881323837de381989c
| 294
|
py
|
Python
|
torch/distributed/fsdp/__init__.py
|
cdeepali/pytorch
|
a8e45b596910f90013637ccc735d6ca21a93d852
|
[
"Intel"
] | null | null | null |
torch/distributed/fsdp/__init__.py
|
cdeepali/pytorch
|
a8e45b596910f90013637ccc735d6ca21a93d852
|
[
"Intel"
] | null | null | null |
torch/distributed/fsdp/__init__.py
|
cdeepali/pytorch
|
a8e45b596910f90013637ccc735d6ca21a93d852
|
[
"Intel"
] | null | null | null |
from .flatten_params_wrapper import FlatParameter
from .fully_sharded_data_parallel import FullyShardedDataParallel
from .fully_sharded_data_parallel import CPUOffload, BackwardPrefetch, ShardingStrategy, MixedPrecision
from .fully_sharded_data_parallel import StateDictType, OptimStateKeyType
| 58.8
| 103
| 0.904762
| 31
| 294
| 8.225806
| 0.548387
| 0.105882
| 0.188235
| 0.235294
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068027
| 294
| 4
| 104
| 73.5
| 0.930657
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
e0c84098aea5dd01fa7df3ce3c488249570fac6b
| 194
|
py
|
Python
|
exception.py
|
cjpit/selpi
|
00d5aa02f75d1862c54245136b4ab457134516d0
|
[
"MIT"
] | 5
|
2020-08-08T00:13:38.000Z
|
2022-03-01T23:49:45.000Z
|
exception.py
|
cjpit/selpi
|
00d5aa02f75d1862c54245136b4ab457134516d0
|
[
"MIT"
] | 5
|
2021-09-23T20:51:27.000Z
|
2022-01-19T04:54:45.000Z
|
exception.py
|
cjpit/selpi
|
00d5aa02f75d1862c54245136b4ab457134516d0
|
[
"MIT"
] | 2
|
2021-04-24T12:12:31.000Z
|
2021-07-17T12:56:50.000Z
|
class ValidationException(Exception):
pass
class OutOfBoundsException(Exception):
pass
class NotFoundException(Exception):
pass
class ConnectionLostException(Exception):
pass
| 16.166667
| 41
| 0.778351
| 16
| 194
| 9.4375
| 0.4375
| 0.344371
| 0.357616
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159794
| 194
| 11
| 42
| 17.636364
| 0.92638
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e0ec7d4bc5313851c4741bc3fa8c77a0c2c488cf
| 8,835
|
py
|
Python
|
code/python/TimeSeriesAPIforDigitalPortals/v3/fds/sdk/TimeSeriesAPIforDigitalPortals/models/__init__.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/TimeSeriesAPIforDigitalPortals/v3/fds/sdk/TimeSeriesAPIforDigitalPortals/models/__init__.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/TimeSeriesAPIforDigitalPortals/v3/fds/sdk/TimeSeriesAPIforDigitalPortals/models/__init__.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
# import all models into this package
# if you have many models here with many references from one model to another this may
# raise a RecursionError
# to avoid this, import only the models that you directly need like:
# from from fds.sdk.TimeSeriesAPIforDigitalPortals.model.pet import Pet
# or import this package, but before doing it, use:
# import sys
# sys.setrecursionlimit(n)
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.attributes_member import AttributesMember
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.cursor_based_pagination_output_object import CursorBasedPaginationOutputObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.cursor_based_pagination_output_object_without_total import CursorBasedPaginationOutputObjectWithoutTotal
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.error_meta_object import ErrorMetaObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.error_object import ErrorObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object import InlineObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object1 import InlineObject1
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object10 import InlineObject10
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object2 import InlineObject2
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object3 import InlineObject3
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object4 import InlineObject4
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object5 import InlineObject5
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object6 import InlineObject6
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object7 import InlineObject7
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object8 import InlineObject8
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_object9 import InlineObject9
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response200 import InlineResponse200
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2001 import InlineResponse2001
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response20010 import InlineResponse20010
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response20010_data import InlineResponse20010Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response20010_data_subsamples import InlineResponse20010DataSubsamples
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2001_data import InlineResponse2001Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2001_meta import InlineResponse2001Meta
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2002 import InlineResponse2002
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2002_data import InlineResponse2002Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2003 import InlineResponse2003
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2003_data import InlineResponse2003Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2004 import InlineResponse2004
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2004_data import InlineResponse2004Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2004_data_range import InlineResponse2004DataRange
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2005 import InlineResponse2005
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2005_data import InlineResponse2005Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2005_data_prices import InlineResponse2005DataPrices
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2006 import InlineResponse2006
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2006_data import InlineResponse2006Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2006_data_subsample import InlineResponse2006DataSubsample
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2007 import InlineResponse2007
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2007_data import InlineResponse2007Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2007_data_subsamples import InlineResponse2007DataSubsamples
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2008 import InlineResponse2008
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2008_data import InlineResponse2008Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2008_data_prices import InlineResponse2008DataPrices
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2009 import InlineResponse2009
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2009_data import InlineResponse2009Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response2009_data_subsample import InlineResponse2009DataSubsample
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response200_data import InlineResponse200Data
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response200_data_range import InlineResponse200DataRange
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.inline_response200_meta import InlineResponse200Meta
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.language_member import LanguageMember
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.offset_based_pagination_output_object import OffsetBasedPaginationOutputObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.offset_based_pagination_output_object_without_total import OffsetBasedPaginationOutputObjectWithoutTotal
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.partial_output_object import PartialOutputObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_eod_subsample_get_data import PricesTimeSeriesEodSubsampleGetData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_eod_subsample_get_data_interval import PricesTimeSeriesEodSubsampleGetDataInterval
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_eod_subsample_list_data import PricesTimeSeriesEodSubsampleListData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_intraday_list_data import PricesTimeSeriesIntradayListData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_intraday_list_data_range import PricesTimeSeriesIntradayListDataRange
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_intraday_list_meta import PricesTimeSeriesIntradayListMeta
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.prices_time_series_intraday_list_meta_pagination import PricesTimeSeriesIntradayListMetaPagination
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.status_object import StatusObject
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_list_data import VendorChartIQTimeSeriesEodListData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_list_data_adjustments import VendorChartIQTimeSeriesEodListDataAdjustments
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_list_data_range import VendorChartIQTimeSeriesEodListDataRange
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_list_meta import VendorChartIQTimeSeriesEodListMeta
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_list_meta_pagination import VendorChartIQTimeSeriesEodListMetaPagination
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_subsample_get_data import VendorChartIQTimeSeriesEodSubsampleGetData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_subsample_get_data_interval import VendorChartIQTimeSeriesEodSubsampleGetDataInterval
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_subsample_get_meta import VendorChartIQTimeSeriesEodSubsampleGetMeta
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_eod_subsample_list_data import VendorChartIQTimeSeriesEodSubsampleListData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_intraday_subsample_get_data import VendorChartIQTimeSeriesIntradaySubsampleGetData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_intraday_subsample_get_data_adjustments import VendorChartIQTimeSeriesIntradaySubsampleGetDataAdjustments
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_intraday_subsample_list_data import VendorChartIQTimeSeriesIntradaySubsampleListData
from fds.sdk.TimeSeriesAPIforDigitalPortals.model.vendor_chart_iq_time_series_intraday_subsample_list_data_range import VendorChartIQTimeSeriesIntradaySubsampleListDataRange
| 103.941176
| 183
| 0.923826
| 892
| 8,835
| 8.874439
| 0.190583
| 0.065437
| 0.093482
| 0.373926
| 0.63618
| 0.606241
| 0.522739
| 0.385927
| 0.246968
| 0.246968
| 0
| 0.032681
| 0.040634
| 8,835
| 84
| 184
| 105.178571
| 0.901251
| 0.042898
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
46091a1963c55e448ef0cb97eb8816ad636f74c1
| 24
|
py
|
Python
|
desktopmagic/__init__.py
|
p3rd1x/Desktopmagic
|
906cbe16f4e6c5fea7121c186ff8c6e7c997b2f7
|
[
"MIT"
] | 49
|
2015-05-20T08:19:42.000Z
|
2021-11-12T17:20:40.000Z
|
desktopmagic/__init__.py
|
p3rd1x/Desktopmagic
|
906cbe16f4e6c5fea7121c186ff8c6e7c997b2f7
|
[
"MIT"
] | 9
|
2015-06-17T22:47:21.000Z
|
2022-03-15T09:00:28.000Z
|
desktopmagic/__init__.py
|
p3rd1x/Desktopmagic
|
906cbe16f4e6c5fea7121c186ff8c6e7c997b2f7
|
[
"MIT"
] | 16
|
2015-12-13T18:34:45.000Z
|
2021-02-17T13:50:17.000Z
|
__version__ = '14.3.11'
| 12
| 23
| 0.666667
| 4
| 24
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238095
| 0.125
| 24
| 1
| 24
| 24
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
460a4c59537503516d54bcd5539b59b3d16cd316
| 62
|
py
|
Python
|
python/atomgen.py
|
tbedford/code-snippets
|
9afe36c2726829f14fa5ec11acb8214bed704938
|
[
"MIT"
] | null | null | null |
python/atomgen.py
|
tbedford/code-snippets
|
9afe36c2726829f14fa5ec11acb8214bed704938
|
[
"MIT"
] | null | null | null |
python/atomgen.py
|
tbedford/code-snippets
|
9afe36c2726829f14fa5ec11acb8214bed704938
|
[
"MIT"
] | 1
|
2018-10-09T02:03:12.000Z
|
2018-10-09T02:03:12.000Z
|
# Code to generate Atom feed for coffeeandcode.neocities.org
| 20.666667
| 60
| 0.806452
| 9
| 62
| 5.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 62
| 2
| 61
| 31
| 0.943396
| 0.935484
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
460d61f266cfe9f8225f89b5e60b646b8afc9f16
| 112
|
py
|
Python
|
neuralprocesses/architectures/__init__.py
|
tom-andersson/neuralprocesses
|
7696dc1c8bbe922fb2a1ba18fe0cdda041fc9cfd
|
[
"MIT"
] | null | null | null |
neuralprocesses/architectures/__init__.py
|
tom-andersson/neuralprocesses
|
7696dc1c8bbe922fb2a1ba18fe0cdda041fc9cfd
|
[
"MIT"
] | null | null | null |
neuralprocesses/architectures/__init__.py
|
tom-andersson/neuralprocesses
|
7696dc1c8bbe922fb2a1ba18fe0cdda041fc9cfd
|
[
"MIT"
] | null | null | null |
from .agnp import *
from .climate import *
from .convgnp import *
from .fullconvgnp import *
from .gnp import *
| 18.666667
| 26
| 0.732143
| 15
| 112
| 5.466667
| 0.466667
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 112
| 5
| 27
| 22.4
| 0.891304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1cd1871614293d919728fa816dfbf4ad4bdff2cc
| 156
|
py
|
Python
|
exp.py
|
mahendra1904/pythod-programs
|
d4d75dac65e9795ea5728f75d90aa0b39296b25e
|
[
"bzip2-1.0.6"
] | null | null | null |
exp.py
|
mahendra1904/pythod-programs
|
d4d75dac65e9795ea5728f75d90aa0b39296b25e
|
[
"bzip2-1.0.6"
] | null | null | null |
exp.py
|
mahendra1904/pythod-programs
|
d4d75dac65e9795ea5728f75d90aa0b39296b25e
|
[
"bzip2-1.0.6"
] | null | null | null |
x=eval(input("Enter the value of x "))
y=eval(input("Enter the value of y"))
z=eval(input("Enter the value of z "))
exp=4*x**4+3*y**3+9*z+6*3.14
print(exp)
| 26
| 38
| 0.647436
| 38
| 156
| 2.657895
| 0.421053
| 0.267327
| 0.415842
| 0.504951
| 0.712871
| 0.712871
| 0
| 0
| 0
| 0
| 0
| 0.065693
| 0.121795
| 156
| 5
| 39
| 31.2
| 0.671533
| 0
| 0
| 0
| 0
| 0
| 0.397436
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1cfa3e5f7e38388b0bb759c26c484a06692ac1f3
| 56
|
py
|
Python
|
thingsdb/util/__init__.py
|
thingsdb/python-thingsdb
|
19938f8d42d20a0ddc22f331140aba6e42c6e0e7
|
[
"MIT"
] | 4
|
2020-05-13T20:06:20.000Z
|
2021-06-01T08:21:52.000Z
|
thingsdb/util/__init__.py
|
thingsdb/python-thingsdb
|
19938f8d42d20a0ddc22f331140aba6e42c6e0e7
|
[
"MIT"
] | 22
|
2020-05-29T13:01:01.000Z
|
2022-03-02T10:08:21.000Z
|
thingsdb/util/__init__.py
|
thingsdb/python-thingsdb
|
19938f8d42d20a0ddc22f331140aba6e42c6e0e7
|
[
"MIT"
] | 1
|
2020-08-12T09:34:59.000Z
|
2020-08-12T09:34:59.000Z
|
from .cnscope import cnscope
from .access import Access
| 18.666667
| 28
| 0.821429
| 8
| 56
| 5.75
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 2
| 29
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e817e29d57bbe8e3e26183237e7bc10e5375289f
| 666
|
py
|
Python
|
src/models/customer.py
|
ehizman/eden
|
d231f614078c7031eaba63f7864fb7996927cc6d
|
[
"MIT"
] | null | null | null |
src/models/customer.py
|
ehizman/eden
|
d231f614078c7031eaba63f7864fb7996927cc6d
|
[
"MIT"
] | 1
|
2021-09-09T13:09:41.000Z
|
2021-09-10T13:31:00.000Z
|
src/models/customer.py
|
ehizman/eden
|
d231f614078c7031eaba63f7864fb7996927cc6d
|
[
"MIT"
] | 2
|
2021-08-19T15:03:22.000Z
|
2021-09-09T10:03:40.000Z
|
from models.cart import Cart
from src.models.user import User
class Customer(User):
def __init__(self, first_name, last_name, email, user_name, password):
super().__init__(first_name, last_name, email, user_name, password)
self.__cart = Cart()
self.__list_of_billing_info = []
def add_to_cart(self, item):
self.__cart.add_item_to_cart(item=item)
@property
def get__cart(self):
return self.__cart
def add_to_list_billing_info(self, billing_info):
self.__list_of_billing_info.append(billing_info)
@property
def get__list_of_billing_info(self):
return self.__list_of_billing_info
| 28.956522
| 75
| 0.71021
| 96
| 666
| 4.375
| 0.291667
| 0.183333
| 0.12381
| 0.161905
| 0.330952
| 0.180952
| 0.180952
| 0.180952
| 0
| 0
| 0
| 0
| 0.202703
| 666
| 23
| 76
| 28.956522
| 0.79096
| 0
| 0
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0.117647
| 0.117647
| 0.117647
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
e81d58cf59aca25d5a131acc9c29a885b423a9db
| 3,253
|
py
|
Python
|
InspirationHub.py
|
Saberlion/InspirationHub
|
f793ad364c4644ffc779030e4d8e840b805c9341
|
[
"MIT"
] | null | null | null |
InspirationHub.py
|
Saberlion/InspirationHub
|
f793ad364c4644ffc779030e4d8e840b805c9341
|
[
"MIT"
] | null | null | null |
InspirationHub.py
|
Saberlion/InspirationHub
|
f793ad364c4644ffc779030e4d8e840b805c9341
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 01 19:47:26 2015
@author: sherlockye
"""
import re
import sys
pinyin_dict = {}
def LoadPinyinDict():
global pinyin_dict
for line in open('pinyin.txt'):
line = line.replace('\n','')
line = line.split(":")
pinyin_dict[unicode(line[0],"utf-8")] = line[1].split(",")
def UniformPinyin(pinyin):
pinyin, number = re.subn('(.)h(.*)', r'\1\2', pinyin)
pinyin, number = re.subn('(.*)ng', r'\1n', pinyin)
return pinyin
def GetPinyin(word):
if pinyin_dict.has_key(word):
return pinyin_dict[word][0]
else:
return "-"
def convertPinyin(word):
# word = word.replace(",","")
return map(lambda x:UniformPinyin(GetPinyin(x)),word)
def ValidCheck(wordList_py,dstWord_py):
idx = []
for wordidx,wordpy in enumerate(wordList_py):
inflag = False
for pyidx,py in enumerate(wordpy):
if py in dstWord_py:
dstidx = dstWord_py.index(py)
idx.append([wordidx,pyidx,dstidx])
inflag = True
if not inflag:
return []
return idx
def PrintValidResult(pinyinSet,oriText,wordList):
line = oriText
try:
dstWord_py = convertPinyin(line)
except:
print "Error:%s"%(oriText)
return
ReplaceRule = ValidCheck(pinyinSet,dstWord_py)
if len(ReplaceRule):
result = list(line)
for replace_idx in ReplaceRule:
dst = wordList[replace_idx[0]][replace_idx[1]]
src = result[replace_idx[2]]
line = line.replace(src,dst)
print line,'<<----',oriText
def ReadGushi(filename,pinyinSet,wordList):
for (linenum,oriText) in enumerate(open(filename)):
oriText = unicode(oriText.replace('\n',''),"utf-8")
regexp = "<<(.*)>>:(.*)$"
result = re.findall(regexp,oriText)
for (title,poet) in result:
poet = poet.replace(unicode(',',"utf-8"),'---')
poet = poet.replace(unicode('。',"utf-8"),'---')
poet = poet.replace(unicode('?',"utf-8"),'---')
poet = poet.replace(unicode('、',"utf-8"),'---')
poet = poet.replace(unicode('!',"utf-8"),'---')
poet = poet.replace(unicode(';',"utf-8"),'---')
poet = poet.replace(unicode(':',"utf-8"),'---')
poet = poet.replace(unicode(':',"utf-8"),'---')
poet = poet.replace(unicode('”',"utf-8"),'---')
poet = poet.replace(unicode('“',"utf-8"),'---')
poet = poet.split('---')
for p in poet:
PrintValidResult(pinyinSet,p,wordList)
def main(argv1,argv2):
wordList = [argv1,argv2]
wordList = map(lambda x:unicode(x,"gb2312"),wordList)
pinyinSet = map(lambda x:convertPinyin(x),wordList)
# ReadGushi("tangshi.txt",pinyinSet,wordList)
#ReadGushi("songci.txt",pinyinSet,wordList)
for (linenum,oriText) in enumerate(open('chengyu.txt')):
oriText = unicode(oriText.replace('\n',''),"utf-8")
PrintValidResult(pinyinSet,oriText,wordList)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "python InspirationHub.py input1 input2"
else:
LoadPinyinDict()
main(sys.argv[1],sys.argv[2])
| 3,253
| 3,253
| 0.569935
| 375
| 3,253
| 4.877333
| 0.277333
| 0.030618
| 0.082012
| 0.120284
| 0.262438
| 0.236195
| 0.236195
| 0.200109
| 0.146528
| 0.146528
| 0
| 0.01964
| 0.248694
| 3,253
| 1
| 3,253
| 3,253
| 0.728723
| 0.998463
| 0
| 0.050633
| 0
| 0
| 0.078361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.025316
| null | null | 0.037975
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e82bb7d531cb4c787076e7841da09b64256b2256
| 1,893
|
py
|
Python
|
portalsdmz/scenarios/form.py
|
larc-usp/data-transfer-tester
|
fd94f120961d6016f58247d31b5d04b2f2b2cd07
|
[
"AFL-2.1"
] | null | null | null |
portalsdmz/scenarios/form.py
|
larc-usp/data-transfer-tester
|
fd94f120961d6016f58247d31b5d04b2f2b2cd07
|
[
"AFL-2.1"
] | 2
|
2017-12-13T20:57:34.000Z
|
2017-12-13T20:57:35.000Z
|
portalsdmz/scenarios/form.py
|
larc-usp/data-transfer-tester
|
fd94f120961d6016f58247d31b5d04b2f2b2cd07
|
[
"AFL-2.1"
] | null | null | null |
from django import forms
import datetime
SIZE_CHOICES = (
("1G", '1G '),
("10G", '10G '),
("100G", '100G ')
)
class ScenarioForm(forms.Form):
nome = forms.CharField(max_length = 120, initial='Teste', widget=forms.TextInput(attrs={'class': 'col-sm-5'}))
data = forms.DateTimeField(initial = datetime.datetime.now(), widget=forms.HiddenInput())
tamanho = forms.ChoiceField(choices=SIZE_CHOICES, widget=forms.Select(attrs={'class': 'col-sm-5'}))
ip_remoto = forms.CharField(max_length = 120, initial = '172.20.5.38', widget=forms.TextInput(attrs={'class': 'col-sm-5'}))
limite = forms.IntegerField(initial = 1, min_value=1, max_value=10, widget=forms.NumberInput(attrs={'class': 'col-sm-5'}))
destino = forms.CharField(max_length = 120, initial = 'dados/area-teste', widget=forms.TextInput(attrs={'class': 'col-sm-5'}))
origem = forms.CharField(max_length = 120, initial = 'dados/area-teste', widget=forms.TextInput(attrs={'class': 'col-sm-5'}))
scp = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
wget = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
udr = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
iperf = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
gridftp = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
axel = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
aria2c = forms.BooleanField(required=False, initial=False, widget=forms.CheckboxInput(attrs={'class': ''}))
fluxo = forms.IntegerField(initial = 1, min_value=1, max_value=10, widget=forms.NumberInput(attrs={'class': 'col-sm-5'}))
| 70.111111
| 128
| 0.700475
| 236
| 1,893
| 5.572034
| 0.258475
| 0.125475
| 0.069202
| 0.079848
| 0.776426
| 0.764259
| 0.714068
| 0.714068
| 0.686692
| 0.655513
| 0
| 0.028554
| 0.111992
| 1,893
| 26
| 129
| 72.807692
| 0.753718
| 0
| 0
| 0
| 0
| 0
| 0.122029
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.782609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
1c36755e5436d63d8309c37538f048289c08c336
| 229
|
py
|
Python
|
condominios/models.py
|
mpeyrotc/govector
|
5429d538d0bcee4d95d9069dd397b3b5b35b504c
|
[
"MIT"
] | null | null | null |
condominios/models.py
|
mpeyrotc/govector
|
5429d538d0bcee4d95d9069dd397b3b5b35b504c
|
[
"MIT"
] | null | null | null |
condominios/models.py
|
mpeyrotc/govector
|
5429d538d0bcee4d95d9069dd397b3b5b35b504c
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.db import models
class Avisos (models.Model):
Id = models.IntegerField()
Nombre = models.CharField(max_length=50)
Descripcion = models.CharField(max_length=500)
| 20.818182
| 50
| 0.759825
| 29
| 229
| 5.758621
| 0.689655
| 0.179641
| 0.215569
| 0.287425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025907
| 0.157205
| 229
| 10
| 51
| 22.9
| 0.839378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1c787777b2ae3d3014b0ec5feace971a57281961
| 1,415
|
py
|
Python
|
tests/test_querystring.py
|
sneakypete81/pytest-httpserver
|
f98c5022019b8875240cbf31cfd0c85dd15007a6
|
[
"MIT"
] | null | null | null |
tests/test_querystring.py
|
sneakypete81/pytest-httpserver
|
f98c5022019b8875240cbf31cfd0c85dd15007a6
|
[
"MIT"
] | null | null | null |
tests/test_querystring.py
|
sneakypete81/pytest-httpserver
|
f98c5022019b8875240cbf31cfd0c85dd15007a6
|
[
"MIT"
] | null | null | null |
import requests
from pytest import approx, raises
from pytest_httpserver import HTTPServer
def test_querystring_str(httpserver: HTTPServer):
httpserver.expect_request("/foobar", query_string="foo=bar", method="GET").respond_with_data(
"example_response"
)
response = requests.get(httpserver.url_for("/foobar?foo=bar"))
httpserver.check_assertions()
assert response.text == "example_response"
assert response.status_code == 200
def test_querystring_bytes(httpserver: HTTPServer):
httpserver.expect_request("/foobar", query_string=b"foo=bar", method="GET").respond_with_data(
"example_response"
)
response = requests.get(httpserver.url_for("/foobar?foo=bar"))
httpserver.check_assertions()
assert response.text == "example_response"
assert response.status_code == 200
def test_querystring_dict(httpserver: HTTPServer):
httpserver.expect_request("/foobar", query_string={"k1": "v1", "k2": "v2"}, method="GET").respond_with_data(
"example_response"
)
response = requests.get(httpserver.url_for("/foobar?k1=v1&k2=v2"))
httpserver.check_assertions()
assert response.text == "example_response"
assert response.status_code == 200
response = requests.get(httpserver.url_for("/foobar?k2=v2&k1=v1"))
httpserver.check_assertions()
assert response.text == "example_response"
assert response.status_code == 200
| 37.236842
| 112
| 0.729329
| 170
| 1,415
| 5.847059
| 0.241176
| 0.112676
| 0.076459
| 0.1167
| 0.866197
| 0.866197
| 0.866197
| 0.82495
| 0.643863
| 0.643863
| 0
| 0.019818
| 0.14417
| 1,415
| 37
| 113
| 38.243243
| 0.800991
| 0
| 0
| 0.548387
| 0
| 0
| 0.163958
| 0
| 0
| 0
| 0
| 0
| 0.387097
| 1
| 0.096774
| false
| 0
| 0.096774
| 0
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1c7e1888a0171272023e97a155919bcb2d2fd51d
| 154
|
py
|
Python
|
makewiki/forms.py
|
franklin-phan/makewikiv2
|
5bb2dce694e2a2fffcab5a30355be5fa57718c55
|
[
"MIT"
] | null | null | null |
makewiki/forms.py
|
franklin-phan/makewikiv2
|
5bb2dce694e2a2fffcab5a30355be5fa57718c55
|
[
"MIT"
] | 5
|
2020-06-06T01:32:33.000Z
|
2022-02-10T09:25:44.000Z
|
makewiki/forms.py
|
franklin-phan/makewikiv2
|
5bb2dce694e2a2fffcab5a30355be5fa57718c55
|
[
"MIT"
] | null | null | null |
from django import forms
class FriendlyForm(forms.Form):
first_name = forms.CharField(max_length=100)
last_name = forms.CharField(max_length=100)
| 30.8
| 48
| 0.779221
| 22
| 154
| 5.272727
| 0.636364
| 0.155172
| 0.310345
| 0.362069
| 0.517241
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0.044776
| 0.12987
| 154
| 5
| 49
| 30.8
| 0.820896
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
c70e62e8a6bb2ea3aa56d43b8cb7b187c42702a6
| 6,580
|
py
|
Python
|
spyql/nulltype.py
|
alin23/spyql
|
41105b7c536ae21139d0d89cfb6b2a8b6deebf1e
|
[
"MIT"
] | 432
|
2021-08-17T16:52:36.000Z
|
2022-03-30T15:33:26.000Z
|
spyql/nulltype.py
|
alin23/spyql
|
41105b7c536ae21139d0d89cfb6b2a8b6deebf1e
|
[
"MIT"
] | 48
|
2021-07-27T17:18:05.000Z
|
2022-03-15T09:33:01.000Z
|
spyql/nulltype.py
|
alin23/spyql
|
41105b7c536ae21139d0d89cfb6b2a8b6deebf1e
|
[
"MIT"
] | 16
|
2021-11-27T15:42:42.000Z
|
2022-02-16T11:36:37.000Z
|
# Defines the NULL datatype that mimics SQL's NULL
# Unlike None, which throws exceptions, operations with NULLs return NULL
# Allows getting items so that `a.get('b', NULL).get('c', NULL)`` returns NULL
# when `b` does not exist, and `NULL[x]` returns NULL
import spyql.log
class NullType:
def __new__(cls):
return NULL
def __reduce__(self):
return (NullType, ())
def __copy__(self):
return NULL
def __deepcopy__(self, a):
return NULL
def __call__(self, default):
pass
def __repr__(self):
return "NULL"
def __str__(self):
return ""
def __hash__(self):
return hash("NULL")
def __bool__(self):
return False
def __lt__(self, other):
return self
def __le__(self, other):
return self
def __eq__(self, other):
return self
def __ne__(self, other):
return self
def __ge__(self, other):
return self
def __gt__(self, other):
return self
def __abs__(self):
return self
def __add__(self, other):
return self
def __and__(self, other):
return self
def __floordiv__(self, other):
return self
def __invert__(self):
return self
def __lshift__(self, other):
return self
def __mod__(self, other):
return self
def __mul__(self, other):
return self
def __matmul__(self, other):
return self
def __neg__(self):
return self
def __or__(self, other):
return self
def __pos__(self):
return self
def __pow__(self, other):
return self
def __rshift__(self, other):
return self
def __sub__(self, other):
return self
def __truediv__(self, other):
return self
def __xor__(self, other):
return self
def __contains__(self, other):
return False
def __delitem__(self, other):
pass
def __getitem__(self, other):
return self
def __setitem__(self, other, val):
pass
def __radd__(self, other):
return self
def __rand__(self, other):
return self
def __rfloordiv__(self, other):
return self
def __rlshift__(self, other):
return self
def __rmod__(self, other):
return self
def __rmul__(self, other):
return self
def __rmatmul__(self, other):
return self
def __ror__(self, other):
return self
def __rpow__(self, other):
return self
def __rrshift__(self, other):
return self
def __rsub__(self, other):
return self
def __rtruediv__(self, other):
return self
def __rxor__(self, other):
return self
def __iadd__(self, other):
return self
def __iand__(self, other):
return self
def __ifloordiv__(self, other):
return self
def __ilshift__(self, other):
return self
def __imod__(self, other):
return self
def __imul__(self, other):
return self
__array_priority__ = 10000
def __imatmul__(self, other):
return self
def __ior__(self, other):
return self
def __ipow__(self, other):
return self
def __irshift__(self, other):
return self
def __isub__(self, other):
return self
def __itruediv__(self, other):
return self
def __ixor__(self, other):
return self
def __len__(self):
return 0
def __iter__(self):
return [].__iter__()
def __round__(self, ndigits=0):
return self
def __trunc__(self):
return self
def __floor__(self):
return self
def __ceil__(self):
return self
def get(self, *args, **kwargs):
return self
# singleton
try:
NULL
# explanation here:
# https://stackoverflow.com/questions/41048643/how-to-create-a-second-none-in-python-making-a-singleton-object-where-the-id-is
except NameError:
NULL = object.__new__(NullType)
Null = NULL # alias
null = NULL # alias
# functions that support NULLs (and that need to be replaced in the query)
NULL_SAFE_FUNCS = {
"int": "int_",
"float": "float_",
"str": "str_",
"complex": "complex_",
}
class NullSafeDict(dict):
__slots__ = () # no __dict__
@staticmethod
def none2null(adic):
def none2null_el(el):
return [NULL if x is None else x for x in el] if type(el) is list else el
return {k: NULL if v is None else none2null_el(v) for k, v in adic.items()}
def __init__(self, adic, **kwargs):
super().__init__(
# converts None -> NULL
NullSafeDict.none2null(adic),
**kwargs
)
# returns NULL when key is not found
def __missing__(self, key):
spyql.log.user_warning4func("key not found", KeyError(key), key)
return NULL
def __hash__(self):
# TODO make dict immutable
import json
# TODO check if this is sufficienly efficient...
# This only needs to guarantee that two equivalent dicts have the same hash
return hash(json.dumps(self, default=lambda x: str(x), sort_keys=True))
# returns default if val is NULL otherwise returns val
def coalesce(val, default):
if val is NULL:
return default
return val
ifnull = coalesce # alias
# returns NULL if a equals b otherwise returns a
def nullif(a, b):
if a == b:
return NULL
return a
# returns NULL if any argument equals NULL
def null_safe_call(fun, *args, **kwargs):
if NULL in args or NULL in kwargs.values():
return NULL
return fun(*args, **kwargs)
# NULL-safe functions
def float_(a):
if a is NULL:
return NULL
try:
return float(a)
except ValueError as e:
spyql.log.conversion_warning("float", e, a)
return NULL
def int_(a, *args, **kwargs):
if a is NULL or NULL in args or NULL in kwargs.values():
return NULL
try:
return int(a, *args, **kwargs)
except ValueError as e:
spyql.log.conversion_warning("int", e, a, **kwargs)
return NULL
def complex_(*args):
if NULL in args:
return NULL
try:
return complex(*args)
except ValueError as e:
spyql.log.conversion_warning("complex", e, *args)
return NULL
def str_(*args, **kwargs):
if NULL in args or NULL in kwargs.values():
return NULL
return str(*args, **kwargs)
| 20
| 130
| 0.60076
| 827
| 6,580
| 4.383313
| 0.256348
| 0.151724
| 0.190069
| 0.241103
| 0.362483
| 0.079448
| 0.079448
| 0.079448
| 0.043034
| 0.043034
| 0
| 0.004405
| 0.31003
| 6,580
| 328
| 131
| 20.060976
| 0.794053
| 0.132219
| 0
| 0.396226
| 0
| 0
| 0.013361
| 0
| 0
| 0
| 0
| 0.003049
| 0
| 1
| 0.382075
| false
| 0.014151
| 0.009434
| 0.316038
| 0.820755
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c7110728097fd3e8ab3d6085d4b8661da66309bc
| 236,624
|
py
|
Python
|
pyboto3/redshift.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/redshift.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/redshift.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def authorize_cluster_security_group_ingress(ClusterSecurityGroupName=None, CIDRIP=None, EC2SecurityGroupName=None, EC2SecurityGroupOwnerId=None):
"""
Adds an inbound (ingress) rule to an Amazon Redshift security group. Depending on whether the application accessing your cluster is running on the Internet or an Amazon EC2 instance, you can authorize inbound access to either a Classless Interdomain Routing (CIDR)/Internet Protocol (IP) range or to an Amazon EC2 security group. You can add as many as 20 ingress rules to an Amazon Redshift security group.
If you authorize access to an Amazon EC2 security group, specify EC2SecurityGroupName and EC2SecurityGroupOwnerId . The Amazon EC2 security group and Amazon Redshift cluster must be in the same AWS region.
If you authorize access to a CIDR/IP address range, specify CIDRIP . For an overview of CIDR blocks, see the Wikipedia article on Classless Inter-Domain Routing .
You must also associate the security group with a cluster so that clients running on these IP addresses or the EC2 instance are authorized to connect to the cluster. For information about managing security groups, go to Working with Security Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.authorize_cluster_security_group_ingress(
ClusterSecurityGroupName='string',
CIDRIP='string',
EC2SecurityGroupName='string',
EC2SecurityGroupOwnerId='string'
)
:type ClusterSecurityGroupName: string
:param ClusterSecurityGroupName: [REQUIRED]
The name of the security group to which the ingress rule is added.
:type CIDRIP: string
:param CIDRIP: The IP range to be added the Amazon Redshift security group.
:type EC2SecurityGroupName: string
:param EC2SecurityGroupName: The EC2 security group to be added the Amazon Redshift security group.
:type EC2SecurityGroupOwnerId: string
:param EC2SecurityGroupOwnerId: The AWS account number of the owner of the security group specified by the EC2SecurityGroupName parameter. The AWS Access Key ID is not an acceptable value.
Example: 111122223333
:rtype: dict
:return: {
'ClusterSecurityGroup': {
'ClusterSecurityGroupName': 'string',
'Description': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupOwnerId': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def authorize_snapshot_access(SnapshotIdentifier=None, SnapshotClusterIdentifier=None, AccountWithRestoreAccess=None):
"""
Authorizes the specified AWS customer account to restore the specified snapshot.
For more information about working with snapshots, go to Amazon Redshift Snapshots in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.authorize_snapshot_access(
SnapshotIdentifier='string',
SnapshotClusterIdentifier='string',
AccountWithRestoreAccess='string'
)
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The identifier of the snapshot the account is authorized to restore.
:type SnapshotClusterIdentifier: string
:param SnapshotClusterIdentifier: The identifier of the cluster the snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
:type AccountWithRestoreAccess: string
:param AccountWithRestoreAccess: [REQUIRED]
The identifier of the AWS customer account authorized to restore the specified snapshot.
To share a snapshot with AWS support, specify amazon-redshift-support.
:rtype: dict
:return: {
'Snapshot': {
'SnapshotIdentifier': 'string',
'ClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'ClusterVersion': 'string',
'SnapshotType': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'DBName': 'string',
'VpcId': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'EncryptedWithHSM': True|False,
'AccountsWithRestoreAccess': [
{
'AccountId': 'string',
'AccountAlias': 'string'
},
],
'OwnerAccount': 'string',
'TotalBackupSizeInMegaBytes': 123.0,
'ActualIncrementalBackupSizeInMegaBytes': 123.0,
'BackupProgressInMegaBytes': 123.0,
'CurrentBackupRateInMegaBytesPerSecond': 123.0,
'EstimatedSecondsToCompletion': 123,
'ElapsedTimeInSeconds': 123,
'SourceRegion': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'RestorableNodeTypes': [
'string',
],
'EnhancedVpcRouting': True|False
}
}
:returns:
CreateClusterSnapshot and CopyClusterSnapshot returns status as "creating".
DescribeClusterSnapshots returns status as "creating", "available", "final snapshot", or "failed".
DeleteClusterSnapshot returns status as "deleted".
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def copy_cluster_snapshot(SourceSnapshotIdentifier=None, SourceSnapshotClusterIdentifier=None, TargetSnapshotIdentifier=None):
"""
Copies the specified automated cluster snapshot to a new manual cluster snapshot. The source must be an automated snapshot and it must be in the available state.
When you delete a cluster, Amazon Redshift deletes any automated snapshots of the cluster. Also, when the retention period of the snapshot expires, Amazon Redshift automatically deletes it. If you want to keep an automated snapshot for a longer period, you can make a manual copy of the snapshot. Manual snapshots are retained until you delete them.
For more information about working with snapshots, go to Amazon Redshift Snapshots in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.copy_cluster_snapshot(
SourceSnapshotIdentifier='string',
SourceSnapshotClusterIdentifier='string',
TargetSnapshotIdentifier='string'
)
:type SourceSnapshotIdentifier: string
:param SourceSnapshotIdentifier: [REQUIRED]
The identifier for the source snapshot.
Constraints:
Must be the identifier for a valid automated snapshot whose state is available .
:type SourceSnapshotClusterIdentifier: string
:param SourceSnapshotClusterIdentifier: The identifier of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
Constraints:
Must be the identifier for a valid cluster.
:type TargetSnapshotIdentifier: string
:param TargetSnapshotIdentifier: [REQUIRED]
The identifier given to the new manual snapshot.
Constraints:
Cannot be null, empty, or blank.
Must contain from 1 to 255 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique for the AWS account that is making the request.
:rtype: dict
:return: {
'Snapshot': {
'SnapshotIdentifier': 'string',
'ClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'ClusterVersion': 'string',
'SnapshotType': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'DBName': 'string',
'VpcId': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'EncryptedWithHSM': True|False,
'AccountsWithRestoreAccess': [
{
'AccountId': 'string',
'AccountAlias': 'string'
},
],
'OwnerAccount': 'string',
'TotalBackupSizeInMegaBytes': 123.0,
'ActualIncrementalBackupSizeInMegaBytes': 123.0,
'BackupProgressInMegaBytes': 123.0,
'CurrentBackupRateInMegaBytesPerSecond': 123.0,
'EstimatedSecondsToCompletion': 123,
'ElapsedTimeInSeconds': 123,
'SourceRegion': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'RestorableNodeTypes': [
'string',
],
'EnhancedVpcRouting': True|False
}
}
:returns:
CreateClusterSnapshot and CopyClusterSnapshot returns status as "creating".
DescribeClusterSnapshots returns status as "creating", "available", "final snapshot", or "failed".
DeleteClusterSnapshot returns status as "deleted".
"""
pass
def create_cluster(DBName=None, ClusterIdentifier=None, ClusterType=None, NodeType=None, MasterUsername=None, MasterUserPassword=None, ClusterSecurityGroups=None, VpcSecurityGroupIds=None, ClusterSubnetGroupName=None, AvailabilityZone=None, PreferredMaintenanceWindow=None, ClusterParameterGroupName=None, AutomatedSnapshotRetentionPeriod=None, Port=None, ClusterVersion=None, AllowVersionUpgrade=None, NumberOfNodes=None, PubliclyAccessible=None, Encrypted=None, HsmClientCertificateIdentifier=None, HsmConfigurationIdentifier=None, ElasticIp=None, Tags=None, KmsKeyId=None, EnhancedVpcRouting=None, AdditionalInfo=None, IamRoles=None):
"""
Creates a new cluster.
To create the cluster in Virtual Private Cloud (VPC), you must provide a cluster subnet group name. The cluster subnet group identifies the subnets of your VPC that Amazon Redshift uses when creating the cluster. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.create_cluster(
DBName='string',
ClusterIdentifier='string',
ClusterType='string',
NodeType='string',
MasterUsername='string',
MasterUserPassword='string',
ClusterSecurityGroups=[
'string',
],
VpcSecurityGroupIds=[
'string',
],
ClusterSubnetGroupName='string',
AvailabilityZone='string',
PreferredMaintenanceWindow='string',
ClusterParameterGroupName='string',
AutomatedSnapshotRetentionPeriod=123,
Port=123,
ClusterVersion='string',
AllowVersionUpgrade=True|False,
NumberOfNodes=123,
PubliclyAccessible=True|False,
Encrypted=True|False,
HsmClientCertificateIdentifier='string',
HsmConfigurationIdentifier='string',
ElasticIp='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
],
KmsKeyId='string',
EnhancedVpcRouting=True|False,
AdditionalInfo='string',
IamRoles=[
'string',
]
)
:type DBName: string
:param DBName: The name of the first database to be created when the cluster is created.
To create additional databases after the cluster is created, connect to the cluster with a SQL client and use SQL commands to create a database. For more information, go to Create a Database in the Amazon Redshift Database Developer Guide.
Default: dev
Constraints:
Must contain 1 to 64 alphanumeric characters.
Must contain only lowercase letters.
Cannot be a word that is reserved by the service. A list of reserved words can be found in Reserved Words in the Amazon Redshift Database Developer Guide.
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
A unique identifier for the cluster. You use this identifier to refer to the cluster for any subsequent cluster operations such as deleting or modifying. The identifier also appears in the Amazon Redshift console.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
Alphabetic characters must be lowercase.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique for all clusters within an AWS account.
Example: myexamplecluster
:type ClusterType: string
:param ClusterType: The type of the cluster. When cluster type is specified as
single-node , the NumberOfNodes parameter is not required.
multi-node , the NumberOfNodes parameter is required.
Valid Values: multi-node | single-node
Default: multi-node
:type NodeType: string
:param NodeType: [REQUIRED]
The node type to be provisioned for the cluster. For information about node types, go to Working with Clusters in the Amazon Redshift Cluster Management Guide .
Valid Values: ds1.xlarge | ds1.8xlarge | ds2.xlarge | ds2.8xlarge | dc1.large | dc1.8xlarge .
:type MasterUsername: string
:param MasterUsername: [REQUIRED]
The user name associated with the master user account for the cluster that is being created.
Constraints:
Must be 1 - 128 alphanumeric characters.
First character must be a letter.
Cannot be a reserved word. A list of reserved words can be found in Reserved Words in the Amazon Redshift Database Developer Guide.
:type MasterUserPassword: string
:param MasterUserPassword: [REQUIRED]
The password associated with the master user account for the cluster that is being created.
Constraints:
Must be between 8 and 64 characters in length.
Must contain at least one uppercase letter.
Must contain at least one lowercase letter.
Must contain one number.
Can be any printable ASCII character (ASCII code 33 to 126) except ' (single quote), ' (double quote), , /, @, or space.
:type ClusterSecurityGroups: list
:param ClusterSecurityGroups: A list of security groups to be associated with this cluster.
Default: The default cluster security group for Amazon Redshift.
(string) --
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
Default: The default VPC security group is associated with the cluster.
(string) --
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: The name of a cluster subnet group to be associated with this cluster.
If this parameter is not provided the resulting cluster will be deployed outside virtual private cloud (VPC).
:type AvailabilityZone: string
:param AvailabilityZone: The EC2 Availability Zone (AZ) in which you want Amazon Redshift to provision the cluster. For example, if you have several EC2 instances running in a specific Availability Zone, then you might want the cluster to be provisioned in the same zone in order to decrease network latency.
Default: A random, system-chosen Availability Zone in the region that is specified by the endpoint.
Example: us-east-1d
Constraint: The specified Availability Zone must be in the same region as the current endpoint.
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range (in UTC) during which automated cluster maintenance can occur.
Format: ddd:hh24:mi-ddd:hh24:mi
Default: A 30-minute window selected at random from an 8-hour block of time per region, occurring on a random day of the week. For more information about the time blocks for each region, see Maintenance Windows in Amazon Redshift Cluster Management Guide.
Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun
Constraints: Minimum 30-minute window.
:type ClusterParameterGroupName: string
:param ClusterParameterGroupName: The name of the parameter group to be associated with this cluster.
Default: The default Amazon Redshift cluster parameter group. For information about the default parameter group, go to Working with Amazon Redshift Parameter Groups
Constraints:
Must be 1 to 255 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:type AutomatedSnapshotRetentionPeriod: integer
:param AutomatedSnapshotRetentionPeriod: The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Even if automated snapshots are disabled, you can still create manual snapshots when you want with CreateClusterSnapshot .
Default: 1
Constraints: Must be a value from 0 to 35.
:type Port: integer
:param Port: The port number on which the cluster accepts incoming connections.
The cluster is accessible only via the JDBC and ODBC connection strings. Part of the connection string requires the port on which the cluster will listen for incoming connections.
Default: 5439
Valid Values: 1150-65535
:type ClusterVersion: string
:param ClusterVersion: The version of the Amazon Redshift engine software that you want to deploy on the cluster.
The version selected runs on all the nodes in the cluster.
Constraints: Only version 1.0 is currently available.
Example: 1.0
:type AllowVersionUpgrade: boolean
:param AllowVersionUpgrade: If true , major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster.
When a new major version of the Amazon Redshift engine is released, you can request that the service automatically apply upgrades during the maintenance window to the Amazon Redshift engine that is running on your cluster.
Default: true
:type NumberOfNodes: integer
:param NumberOfNodes: The number of compute nodes in the cluster. This parameter is required when the ClusterType parameter is specified as multi-node .
For information about determining how many nodes you need, go to Working with Clusters in the Amazon Redshift Cluster Management Guide .
If you don't specify this parameter, you get a single-node cluster. When requesting a multi-node cluster, you must specify the number of nodes that you want in the cluster.
Default: 1
Constraints: Value must be at least 1 and no more than 100.
:type PubliclyAccessible: boolean
:param PubliclyAccessible: If true , the cluster can be accessed from a public network.
:type Encrypted: boolean
:param Encrypted: If true , the data in the cluster is encrypted at rest.
Default: false
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM.
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
:type ElasticIp: string
:param ElasticIp: The Elastic IP (EIP) address for the cluster.
Constraints: The cluster must be provisioned in EC2-VPC and publicly-accessible through an Internet gateway. For more information about provisioning clusters in EC2-VPC, go to Supported Platforms to Launch Your Cluster in the Amazon Redshift Cluster Management Guide.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:type KmsKeyId: string
:param KmsKeyId: The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster.
:type EnhancedVpcRouting: boolean
:param EnhancedVpcRouting: An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
:type AdditionalInfo: string
:param AdditionalInfo: Reserved.
:type IamRoles: list
:param IamRoles: A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request.
A cluster can have up to 10 IAM roles associated with it at any time.
(string) --
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def create_cluster_parameter_group(ParameterGroupName=None, ParameterGroupFamily=None, Description=None, Tags=None):
"""
Creates an Amazon Redshift parameter group.
Creating parameter groups is independent of creating clusters. You can associate a cluster with a parameter group when you create the cluster. You can also associate an existing cluster with a parameter group after the cluster is created by using ModifyCluster .
Parameters in the parameter group define specific behavior that applies to the databases you create on the cluster. For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.create_cluster_parameter_group(
ParameterGroupName='string',
ParameterGroupFamily='string',
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ParameterGroupName: string
:param ParameterGroupName: [REQUIRED]
The name of the cluster parameter group.
Constraints:
Must be 1 to 255 alphanumeric characters or hyphens
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique withing your AWS account.
Note
This value is stored as a lower-case string.
:type ParameterGroupFamily: string
:param ParameterGroupFamily: [REQUIRED]
The Amazon Redshift engine version to which the cluster parameter group applies. The cluster engine version determines the set of parameters.
To get a list of valid parameter group family names, you can call DescribeClusterParameterGroups . By default, Amazon Redshift returns a list of all the parameter groups that are owned by your AWS account, including the default parameter groups for each Amazon Redshift engine version. The parameter group family names associated with the default parameter groups provide you the valid values. For example, a valid family name is 'redshift-1.0'.
:type Description: string
:param Description: [REQUIRED]
A description of the parameter group.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'ClusterParameterGroup': {
'ParameterGroupName': 'string',
'ParameterGroupFamily': 'string',
'Description': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def create_cluster_security_group(ClusterSecurityGroupName=None, Description=None, Tags=None):
"""
Creates a new Amazon Redshift security group. You use security groups to control access to non-VPC clusters.
For information about managing security groups, go to Amazon Redshift Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.create_cluster_security_group(
ClusterSecurityGroupName='string',
Description='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ClusterSecurityGroupName: string
:param ClusterSecurityGroupName: [REQUIRED]
The name for the security group. Amazon Redshift stores the value as a lowercase string.
Constraints:
Must contain no more than 255 alphanumeric characters or hyphens.
Must not be 'Default'.
Must be unique for all security groups that are created by your AWS account.
Example: examplesecuritygroup
:type Description: string
:param Description: [REQUIRED]
A description for the security group.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'ClusterSecurityGroup': {
'ClusterSecurityGroupName': 'string',
'Description': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupOwnerId': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def create_cluster_snapshot(SnapshotIdentifier=None, ClusterIdentifier=None, Tags=None):
"""
Creates a manual snapshot of the specified cluster. The cluster must be in the available state.
For more information about working with snapshots, go to Amazon Redshift Snapshots in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.create_cluster_snapshot(
SnapshotIdentifier='string',
ClusterIdentifier='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
A unique identifier for the snapshot that you are requesting. This identifier must be unique for all snapshots within the AWS account.
Constraints:
Cannot be null, empty, or blank
Must contain from 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
Example: my-snapshot-id
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The cluster identifier for which you want a snapshot.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'Snapshot': {
'SnapshotIdentifier': 'string',
'ClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'ClusterVersion': 'string',
'SnapshotType': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'DBName': 'string',
'VpcId': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'EncryptedWithHSM': True|False,
'AccountsWithRestoreAccess': [
{
'AccountId': 'string',
'AccountAlias': 'string'
},
],
'OwnerAccount': 'string',
'TotalBackupSizeInMegaBytes': 123.0,
'ActualIncrementalBackupSizeInMegaBytes': 123.0,
'BackupProgressInMegaBytes': 123.0,
'CurrentBackupRateInMegaBytesPerSecond': 123.0,
'EstimatedSecondsToCompletion': 123,
'ElapsedTimeInSeconds': 123,
'SourceRegion': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'RestorableNodeTypes': [
'string',
],
'EnhancedVpcRouting': True|False
}
}
:returns:
CreateClusterSnapshot and CopyClusterSnapshot returns status as "creating".
DescribeClusterSnapshots returns status as "creating", "available", "final snapshot", or "failed".
DeleteClusterSnapshot returns status as "deleted".
"""
pass
def create_cluster_subnet_group(ClusterSubnetGroupName=None, Description=None, SubnetIds=None, Tags=None):
"""
Creates a new Amazon Redshift subnet group. You must provide a list of one or more subnets in your existing Amazon Virtual Private Cloud (Amazon VPC) when creating Amazon Redshift subnet group.
For information about subnet groups, go to Amazon Redshift Cluster Subnet Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.create_cluster_subnet_group(
ClusterSubnetGroupName='string',
Description='string',
SubnetIds=[
'string',
],
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: [REQUIRED]
The name for the subnet group. Amazon Redshift stores the value as a lowercase string.
Constraints:
Must contain no more than 255 alphanumeric characters or hyphens.
Must not be 'Default'.
Must be unique for all subnet groups that are created by your AWS account.
Example: examplesubnetgroup
:type Description: string
:param Description: [REQUIRED]
A description for the subnet group.
:type SubnetIds: list
:param SubnetIds: [REQUIRED]
An array of VPC subnet IDs. A maximum of 20 subnets can be modified in a single request.
(string) --
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'ClusterSubnetGroup': {
'ClusterSubnetGroupName': 'string',
'Description': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def create_event_subscription(SubscriptionName=None, SnsTopicArn=None, SourceType=None, SourceIds=None, EventCategories=None, Severity=None, Enabled=None, Tags=None):
"""
Creates an Amazon Redshift event notification subscription. This action requires an ARN (Amazon Resource Name) of an Amazon SNS topic created by either the Amazon Redshift console, the Amazon SNS console, or the Amazon SNS API. To obtain an ARN with Amazon SNS, you must create a topic in Amazon SNS and subscribe to the topic. The ARN is displayed in the SNS console.
You can specify the source type, and lists of Amazon Redshift source IDs, event categories, and event severities. Notifications will be sent for all events you want that match those criteria. For example, you can specify source type = cluster, source ID = my-cluster-1 and mycluster2, event categories = Availability, Backup, and severity = ERROR. The subscription will only send notifications for those ERROR events in the Availability and Backup categories for the specified clusters.
If you specify both the source type and source IDs, such as source type = cluster and source identifier = my-cluster-1, notifications will be sent for all the cluster events for my-cluster-1. If you specify a source type but do not specify a source identifier, you will receive notice of the events for the objects of that type in your AWS account. If you do not specify either the SourceType nor the SourceIdentifier, you will be notified of events generated from all Amazon Redshift sources belonging to your AWS account. You must specify a source type if you specify a source ID.
See also: AWS API Documentation
:example: response = client.create_event_subscription(
SubscriptionName='string',
SnsTopicArn='string',
SourceType='string',
SourceIds=[
'string',
],
EventCategories=[
'string',
],
Severity='string',
Enabled=True|False,
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the event subscription to be created.
Constraints:
Cannot be null, empty, or blank.
Must contain from 1 to 255 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:type SnsTopicArn: string
:param SnsTopicArn: [REQUIRED]
The Amazon Resource Name (ARN) of the Amazon SNS topic used to transmit the event notifications. The ARN is created by Amazon SNS when you create a topic and subscribe to it.
:type SourceType: string
:param SourceType: The type of source that will be generating the events. For example, if you want to be notified of events generated by a cluster, you would set this parameter to cluster. If this value is not specified, events are returned for all Amazon Redshift objects in your AWS account. You must specify a source type in order to specify source IDs.
Valid values: cluster, cluster-parameter-group, cluster-security-group, and cluster-snapshot.
:type SourceIds: list
:param SourceIds: A list of one or more identifiers of Amazon Redshift source objects. All of the objects must be of the same type as was specified in the source type parameter. The event subscription will return only events generated by the specified objects. If not specified, then events are returned for all objects within the source type specified.
Example: my-cluster-1, my-cluster-2
Example: my-snapshot-20131010
(string) --
:type EventCategories: list
:param EventCategories: Specifies the Amazon Redshift event categories to be published by the event notification subscription.
Values: Configuration, Management, Monitoring, Security
(string) --
:type Severity: string
:param Severity: Specifies the Amazon Redshift event severity to be published by the event notification subscription.
Values: ERROR, INFO
:type Enabled: boolean
:param Enabled: A Boolean value; set to true to activate the subscription, set to false to create the subscription but not active it.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': datetime(2015, 1, 1),
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Severity': 'string',
'Enabled': True|False,
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
:returns:
Can be one of the following: active | no-permission | topic-not-exist
The status "no-permission" indicates that Amazon Redshift no longer has permission to post to the Amazon SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created.
"""
pass
def create_hsm_client_certificate(HsmClientCertificateIdentifier=None, Tags=None):
"""
Creates an HSM client certificate that an Amazon Redshift cluster will use to connect to the client's HSM in order to store and retrieve the keys used to encrypt the cluster databases.
The command returns a public key, which you must store in the HSM. In addition to creating the HSM certificate, you must create an Amazon Redshift HSM configuration that provides a cluster the information needed to store and use encryption keys in the HSM. For more information, go to Hardware Security Modules in the Amazon Redshift Cluster Management Guide.
See also: AWS API Documentation
:example: response = client.create_hsm_client_certificate(
HsmClientCertificateIdentifier='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: [REQUIRED]
The identifier to be assigned to the new HSM client certificate that the cluster will use to connect to the HSM to use the database encryption keys.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'HsmClientCertificate': {
'HsmClientCertificateIdentifier': 'string',
'HsmClientCertificatePublicKey': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def create_hsm_configuration(HsmConfigurationIdentifier=None, Description=None, HsmIpAddress=None, HsmPartitionName=None, HsmPartitionPassword=None, HsmServerPublicCertificate=None, Tags=None):
"""
Creates an HSM configuration that contains the information required by an Amazon Redshift cluster to store and use database encryption keys in a Hardware Security Module (HSM). After creating the HSM configuration, you can specify it as a parameter when creating a cluster. The cluster will then store its encryption keys in the HSM.
In addition to creating an HSM configuration, you must also create an HSM client certificate. For more information, go to Hardware Security Modules in the Amazon Redshift Cluster Management Guide.
See also: AWS API Documentation
:example: response = client.create_hsm_configuration(
HsmConfigurationIdentifier='string',
Description='string',
HsmIpAddress='string',
HsmPartitionName='string',
HsmPartitionPassword='string',
HsmServerPublicCertificate='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: [REQUIRED]
The identifier to be assigned to the new Amazon Redshift HSM configuration.
:type Description: string
:param Description: [REQUIRED]
A text description of the HSM configuration to be created.
:type HsmIpAddress: string
:param HsmIpAddress: [REQUIRED]
The IP address that the Amazon Redshift cluster must use to access the HSM.
:type HsmPartitionName: string
:param HsmPartitionName: [REQUIRED]
The name of the partition in the HSM where the Amazon Redshift clusters will store their database encryption keys.
:type HsmPartitionPassword: string
:param HsmPartitionPassword: [REQUIRED]
The password required to access the HSM partition.
:type HsmServerPublicCertificate: string
:param HsmServerPublicCertificate: [REQUIRED]
The HSMs public certificate file. When using Cloud HSM, the file name is server.pem.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'HsmConfiguration': {
'HsmConfigurationIdentifier': 'string',
'Description': 'string',
'HsmIpAddress': 'string',
'HsmPartitionName': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def create_snapshot_copy_grant(SnapshotCopyGrantName=None, KmsKeyId=None, Tags=None):
"""
Creates a snapshot copy grant that permits Amazon Redshift to use a customer master key (CMK) from AWS Key Management Service (AWS KMS) to encrypt copied snapshots in a destination region.
For more information about managing snapshot copy grants, go to Amazon Redshift Database Encryption in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.create_snapshot_copy_grant(
SnapshotCopyGrantName='string',
KmsKeyId='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type SnapshotCopyGrantName: string
:param SnapshotCopyGrantName: [REQUIRED]
The name of the snapshot copy grant. This name must be unique in the region for the AWS account.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
Alphabetic characters must be lowercase.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique for all clusters within an AWS account.
:type KmsKeyId: string
:param KmsKeyId: The unique identifier of the customer master key (CMK) to which to grant Amazon Redshift permission. If no key is specified, the default key is used.
:type Tags: list
:param Tags: A list of tag instances.
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
:rtype: dict
:return: {
'SnapshotCopyGrant': {
'SnapshotCopyGrantName': 'string',
'KmsKeyId': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def create_tags(ResourceName=None, Tags=None):
"""
Adds one or more tags to a specified resource.
A resource can have up to 10 tags. If you try to create more than 10 tags for a resource, you will receive an error and the attempt will fail.
If you specify a key that already exists for the resource, the value for that key will be updated with the new value.
See also: AWS API Documentation
:example: response = client.create_tags(
ResourceName='string',
Tags=[
{
'Key': 'string',
'Value': 'string'
},
]
)
:type ResourceName: string
:param ResourceName: [REQUIRED]
The Amazon Resource Name (ARN) to which you want to add the tag or tags. For example, arn:aws:redshift:us-east-1:123456789:cluster:t1 .
:type Tags: list
:param Tags: [REQUIRED]
One or more name/value pairs to add as tags to the specified resource. Each tag name is passed in with the parameter Key and the corresponding value is passed in with the parameter Value . The Key and Value parameters are separated by a comma (,). Separate multiple tags with a space. For example, --tags 'Key'='owner','Value'='admin' 'Key'='environment','Value'='test' 'Key'='version','Value'='1.0' .
(dict) --A tag consisting of a name/value pair for a resource.
Key (string) --The key, or name, for the resource tag.
Value (string) --The value for the resource tag.
"""
pass
def delete_cluster(ClusterIdentifier=None, SkipFinalClusterSnapshot=None, FinalClusterSnapshotIdentifier=None):
"""
Deletes a previously provisioned cluster. A successful response from the web service indicates that the request was received correctly. Use DescribeClusters to monitor the status of the deletion. The delete operation cannot be canceled or reverted once submitted. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
If you want to shut down the cluster and retain it for future use, set SkipFinalClusterSnapshot to false and specify a name for FinalClusterSnapshotIdentifier . You can later restore this snapshot to resume using the cluster. If a final cluster snapshot is requested, the status of the cluster will be "final-snapshot" while the snapshot is being taken, then it's "deleting" once Amazon Redshift begins deleting the cluster.
For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.delete_cluster(
ClusterIdentifier='string',
SkipFinalClusterSnapshot=True|False,
FinalClusterSnapshotIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the cluster to be deleted.
Constraints:
Must contain lowercase characters.
Must contain from 1 to 63 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:type SkipFinalClusterSnapshot: boolean
:param SkipFinalClusterSnapshot: Determines whether a final snapshot of the cluster is created before Amazon Redshift deletes the cluster. If true , a final cluster snapshot is not created. If false , a final cluster snapshot is created before the cluster is deleted.
Note
The FinalClusterSnapshotIdentifier parameter must be specified if SkipFinalClusterSnapshot is false .
Default: false
:type FinalClusterSnapshotIdentifier: string
:param FinalClusterSnapshotIdentifier: The identifier of the final snapshot that is to be created immediately before deleting the cluster. If this parameter is provided, SkipFinalClusterSnapshot must be false .
Constraints:
Must be 1 to 255 alphanumeric characters.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def delete_cluster_parameter_group(ParameterGroupName=None):
"""
Deletes a specified Amazon Redshift parameter group.
See also: AWS API Documentation
:example: response = client.delete_cluster_parameter_group(
ParameterGroupName='string'
)
:type ParameterGroupName: string
:param ParameterGroupName: [REQUIRED]
The name of the parameter group to be deleted.
Constraints:
Must be the name of an existing cluster parameter group.
Cannot delete a default cluster parameter group.
"""
pass
def delete_cluster_security_group(ClusterSecurityGroupName=None):
"""
Deletes an Amazon Redshift security group.
For information about managing security groups, go to Amazon Redshift Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.delete_cluster_security_group(
ClusterSecurityGroupName='string'
)
:type ClusterSecurityGroupName: string
:param ClusterSecurityGroupName: [REQUIRED]
The name of the cluster security group to be deleted.
"""
pass
def delete_cluster_snapshot(SnapshotIdentifier=None, SnapshotClusterIdentifier=None):
"""
Deletes the specified manual snapshot. The snapshot must be in the available state, with no other users authorized to access the snapshot.
Unlike automated snapshots, manual snapshots are retained even after you delete your cluster. Amazon Redshift does not delete your manual snapshots. You must delete manual snapshot explicitly to avoid getting charged. If other accounts are authorized to access the snapshot, you must revoke all of the authorizations before you can delete the snapshot.
See also: AWS API Documentation
:example: response = client.delete_cluster_snapshot(
SnapshotIdentifier='string',
SnapshotClusterIdentifier='string'
)
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The unique identifier of the manual snapshot to be deleted.
Constraints: Must be the name of an existing snapshot that is in the available state.
:type SnapshotClusterIdentifier: string
:param SnapshotClusterIdentifier: The unique identifier of the cluster the snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
Constraints: Must be the name of valid cluster.
:rtype: dict
:return: {
'Snapshot': {
'SnapshotIdentifier': 'string',
'ClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'ClusterVersion': 'string',
'SnapshotType': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'DBName': 'string',
'VpcId': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'EncryptedWithHSM': True|False,
'AccountsWithRestoreAccess': [
{
'AccountId': 'string',
'AccountAlias': 'string'
},
],
'OwnerAccount': 'string',
'TotalBackupSizeInMegaBytes': 123.0,
'ActualIncrementalBackupSizeInMegaBytes': 123.0,
'BackupProgressInMegaBytes': 123.0,
'CurrentBackupRateInMegaBytesPerSecond': 123.0,
'EstimatedSecondsToCompletion': 123,
'ElapsedTimeInSeconds': 123,
'SourceRegion': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'RestorableNodeTypes': [
'string',
],
'EnhancedVpcRouting': True|False
}
}
:returns:
CreateClusterSnapshot and CopyClusterSnapshot returns status as "creating".
DescribeClusterSnapshots returns status as "creating", "available", "final snapshot", or "failed".
DeleteClusterSnapshot returns status as "deleted".
"""
pass
def delete_cluster_subnet_group(ClusterSubnetGroupName=None):
"""
Deletes the specified cluster subnet group.
See also: AWS API Documentation
:example: response = client.delete_cluster_subnet_group(
ClusterSubnetGroupName='string'
)
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: [REQUIRED]
The name of the cluster subnet group name to be deleted.
"""
pass
def delete_event_subscription(SubscriptionName=None):
"""
Deletes an Amazon Redshift event notification subscription.
See also: AWS API Documentation
:example: response = client.delete_event_subscription(
SubscriptionName='string'
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the Amazon Redshift event notification subscription to be deleted.
"""
pass
def delete_hsm_client_certificate(HsmClientCertificateIdentifier=None):
"""
Deletes the specified HSM client certificate.
See also: AWS API Documentation
:example: response = client.delete_hsm_client_certificate(
HsmClientCertificateIdentifier='string'
)
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: [REQUIRED]
The identifier of the HSM client certificate to be deleted.
"""
pass
def delete_hsm_configuration(HsmConfigurationIdentifier=None):
"""
Deletes the specified Amazon Redshift HSM configuration.
See also: AWS API Documentation
:example: response = client.delete_hsm_configuration(
HsmConfigurationIdentifier='string'
)
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: [REQUIRED]
The identifier of the Amazon Redshift HSM configuration to be deleted.
"""
pass
def delete_snapshot_copy_grant(SnapshotCopyGrantName=None):
"""
Deletes the specified snapshot copy grant.
See also: AWS API Documentation
:example: response = client.delete_snapshot_copy_grant(
SnapshotCopyGrantName='string'
)
:type SnapshotCopyGrantName: string
:param SnapshotCopyGrantName: [REQUIRED]
The name of the snapshot copy grant to delete.
"""
pass
def delete_tags(ResourceName=None, TagKeys=None):
"""
Deletes a tag or tags from a resource. You must provide the ARN of the resource from which you want to delete the tag or tags.
See also: AWS API Documentation
:example: response = client.delete_tags(
ResourceName='string',
TagKeys=[
'string',
]
)
:type ResourceName: string
:param ResourceName: [REQUIRED]
The Amazon Resource Name (ARN) from which you want to remove the tag or tags. For example, arn:aws:redshift:us-east-1:123456789:cluster:t1 .
:type TagKeys: list
:param TagKeys: [REQUIRED]
The tag key that you want to delete.
(string) --
"""
pass
def describe_cluster_parameter_groups(ParameterGroupName=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns a list of Amazon Redshift parameter groups, including parameter groups you created and the default parameter group. For each parameter group, the response includes the parameter group name, description, and parameter group family name. You can optionally specify a name to retrieve the description of a specific parameter group.
For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all parameter groups that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all parameter groups that have any combination of those values are returned.
If both tag keys and values are omitted from the request, parameter groups are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_cluster_parameter_groups(
ParameterGroupName='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type ParameterGroupName: string
:param ParameterGroupName: The name of a specific parameter group for which to return details. By default, details about all parameter groups and the default parameter group are returned.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterParameterGroups request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching cluster parameter groups that are associated with the specified key or keys. For example, suppose that you have parameter groups that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the parameter groups that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching cluster parameter groups that are associated with the specified tag value or values. For example, suppose that you have parameter groups that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the parameter groups that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'ParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterGroupFamily': 'string',
'Description': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
"""
pass
def describe_cluster_parameters(ParameterGroupName=None, Source=None, MaxRecords=None, Marker=None):
"""
Returns a detailed list of parameters contained within the specified Amazon Redshift parameter group. For each parameter the response includes information such as parameter name, description, data type, value, whether the parameter value is modifiable, and so on.
You can specify source filter to retrieve parameters of only specific type. For example, to retrieve parameters that were modified by a user action such as from ModifyClusterParameterGroup , you can specify source equal to user .
For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.describe_cluster_parameters(
ParameterGroupName='string',
Source='string',
MaxRecords=123,
Marker='string'
)
:type ParameterGroupName: string
:param ParameterGroupName: [REQUIRED]
The name of a cluster parameter group for which to return details.
:type Source: string
:param Source: The parameter types to return. Specify user to show parameters that are different form the default. Similarly, specify engine-default to show parameters that are the same as the default parameter group.
Default: All parameter types returned.
Valid Values: user | engine-default
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterParameters request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'Parameters': [
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'ApplyType': 'static'|'dynamic',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string'
},
],
'Marker': 'string'
}
"""
pass
def describe_cluster_security_groups(ClusterSecurityGroupName=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns information about Amazon Redshift security groups. If the name of a security group is specified, the response will contain only information about only that security group.
For information about managing security groups, go to Amazon Redshift Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all security groups that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all security groups that have any combination of those values are returned.
If both tag keys and values are omitted from the request, security groups are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_cluster_security_groups(
ClusterSecurityGroupName='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type ClusterSecurityGroupName: string
:param ClusterSecurityGroupName: The name of a cluster security group for which you are requesting details. You can specify either the Marker parameter or a ClusterSecurityGroupName parameter, but not both.
Example: securitygroup1
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterSecurityGroups request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
Constraints: You can specify either the ClusterSecurityGroupName parameter or the Marker parameter, but not both.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching cluster security groups that are associated with the specified key or keys. For example, suppose that you have security groups that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the security groups that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching cluster security groups that are associated with the specified tag value or values. For example, suppose that you have security groups that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the security groups that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Description': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupOwnerId': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
"""
pass
def describe_cluster_snapshots(ClusterIdentifier=None, SnapshotIdentifier=None, SnapshotType=None, StartTime=None, EndTime=None, MaxRecords=None, Marker=None, OwnerAccount=None, TagKeys=None, TagValues=None):
"""
Returns one or more snapshot objects, which contain metadata about your cluster snapshots. By default, this operation returns information about all snapshots of all clusters that are owned by you AWS customer account. No information is returned for snapshots owned by inactive AWS customer accounts.
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all snapshots that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all snapshots that have any combination of those values are returned. Only snapshots that you own are returned in the response; shared snapshots are not returned with the tag key and tag value request parameters.
If both tag keys and values are omitted from the request, snapshots are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_cluster_snapshots(
ClusterIdentifier='string',
SnapshotIdentifier='string',
SnapshotType='string',
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
MaxRecords=123,
Marker='string',
OwnerAccount='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type ClusterIdentifier: string
:param ClusterIdentifier: The identifier of the cluster for which information about snapshots is requested.
:type SnapshotIdentifier: string
:param SnapshotIdentifier: The snapshot identifier of the snapshot about which to return information.
:type SnapshotType: string
:param SnapshotType: The type of snapshots for which you are requesting information. By default, snapshots of all types are returned.
Valid Values: automated | manual
:type StartTime: datetime
:param StartTime: A value that requests only snapshots created at or after the specified time. The time value is specified in ISO 8601 format. For more information about ISO 8601, go to the ISO8601 Wikipedia page.
Example: 2012-07-16T18:00:00Z
:type EndTime: datetime
:param EndTime: A time value that requests only snapshots created at or before the specified time. The time value is specified in ISO 8601 format. For more information about ISO 8601, go to the ISO8601 Wikipedia page.
Example: 2012-07-16T18:00:00Z
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterSnapshots request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:type OwnerAccount: string
:param OwnerAccount: The AWS customer account used to create or copy the snapshot. Use this field to filter the results to snapshots owned by a particular account. To describe snapshots you own, either specify your AWS customer account, or do not specify the parameter.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching cluster snapshots that are associated with the specified key or keys. For example, suppose that you have snapshots that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the snapshots that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching cluster snapshots that are associated with the specified tag value or values. For example, suppose that you have snapshots that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the snapshots that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'Snapshots': [
{
'SnapshotIdentifier': 'string',
'ClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'ClusterVersion': 'string',
'SnapshotType': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'DBName': 'string',
'VpcId': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'EncryptedWithHSM': True|False,
'AccountsWithRestoreAccess': [
{
'AccountId': 'string',
'AccountAlias': 'string'
},
],
'OwnerAccount': 'string',
'TotalBackupSizeInMegaBytes': 123.0,
'ActualIncrementalBackupSizeInMegaBytes': 123.0,
'BackupProgressInMegaBytes': 123.0,
'CurrentBackupRateInMegaBytesPerSecond': 123.0,
'EstimatedSecondsToCompletion': 123,
'ElapsedTimeInSeconds': 123,
'SourceRegion': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'RestorableNodeTypes': [
'string',
],
'EnhancedVpcRouting': True|False
},
]
}
:returns:
CreateClusterSnapshot and CopyClusterSnapshot returns status as "creating".
DescribeClusterSnapshots returns status as "creating", "available", "final snapshot", or "failed".
DeleteClusterSnapshot returns status as "deleted".
"""
pass
def describe_cluster_subnet_groups(ClusterSubnetGroupName=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns one or more cluster subnet group objects, which contain metadata about your cluster subnet groups. By default, this operation returns information about all cluster subnet groups that are defined in you AWS account.
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all subnet groups that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all subnet groups that have any combination of those values are returned.
If both tag keys and values are omitted from the request, subnet groups are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_cluster_subnet_groups(
ClusterSubnetGroupName='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: The name of the cluster subnet group for which information is requested.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterSubnetGroups request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching cluster subnet groups that are associated with the specified key or keys. For example, suppose that you have subnet groups that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the subnet groups that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching cluster subnet groups that are associated with the specified tag value or values. For example, suppose that you have subnet groups that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the subnet groups that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'ClusterSubnetGroups': [
{
'ClusterSubnetGroupName': 'string',
'Description': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
"""
pass
def describe_cluster_versions(ClusterVersion=None, ClusterParameterGroupFamily=None, MaxRecords=None, Marker=None):
"""
Returns descriptions of the available Amazon Redshift cluster versions. You can call this operation even before creating any clusters to learn more about the Amazon Redshift versions. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.describe_cluster_versions(
ClusterVersion='string',
ClusterParameterGroupFamily='string',
MaxRecords=123,
Marker='string'
)
:type ClusterVersion: string
:param ClusterVersion: The specific cluster version to return.
Example: 1.0
:type ClusterParameterGroupFamily: string
:param ClusterParameterGroupFamily: The name of a specific cluster parameter group family to return details for.
Constraints:
Must be 1 to 255 alphanumeric characters
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusterVersions request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'Marker': 'string',
'ClusterVersions': [
{
'ClusterVersion': 'string',
'ClusterParameterGroupFamily': 'string',
'Description': 'string'
},
]
}
"""
pass
def describe_clusters(ClusterIdentifier=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns properties of provisioned clusters including general cluster properties, cluster database properties, maintenance and backup properties, and security and access properties. This operation supports pagination. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all clusters that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all clusters that have any combination of those values are returned.
If both tag keys and values are omitted from the request, clusters are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_clusters(
ClusterIdentifier='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type ClusterIdentifier: string
:param ClusterIdentifier: The unique identifier of a cluster whose properties you are requesting. This parameter is case sensitive.
The default is that all clusters defined for an account are returned.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeClusters request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
Constraints: You can specify either the ClusterIdentifier parameter or the Marker parameter, but not both.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching clusters that are associated with the specified key or keys. For example, suppose that you have clusters that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching clusters that are associated with the specified tag value or values. For example, suppose that you have clusters that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the clusters that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'Clusters': [
{
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
},
]
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def describe_default_cluster_parameters(ParameterGroupFamily=None, MaxRecords=None, Marker=None):
"""
Returns a list of parameter settings for the specified parameter group family.
For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.describe_default_cluster_parameters(
ParameterGroupFamily='string',
MaxRecords=123,
Marker='string'
)
:type ParameterGroupFamily: string
:param ParameterGroupFamily: [REQUIRED]
The name of the cluster parameter group family.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeDefaultClusterParameters request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'DefaultClusterParameters': {
'ParameterGroupFamily': 'string',
'Marker': 'string',
'Parameters': [
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'ApplyType': 'static'|'dynamic',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string'
},
]
}
}
"""
pass
def describe_event_categories(SourceType=None):
"""
Displays a list of event categories for all event source types, or for a specified source type. For a list of the event categories and source types, go to Amazon Redshift Event Notifications .
See also: AWS API Documentation
:example: response = client.describe_event_categories(
SourceType='string'
)
:type SourceType: string
:param SourceType: The source type, such as cluster or parameter group, to which the described event categories apply.
Valid values: cluster, cluster-snapshot, cluster-parameter-group, and cluster-security-group.
:rtype: dict
:return: {
'EventCategoriesMapList': [
{
'SourceType': 'string',
'Events': [
{
'EventId': 'string',
'EventCategories': [
'string',
],
'EventDescription': 'string',
'Severity': 'string'
},
]
},
]
}
"""
pass
def describe_event_subscriptions(SubscriptionName=None, MaxRecords=None, Marker=None):
"""
Lists descriptions of all the Amazon Redshift event notifications subscription for a customer account. If you specify a subscription name, lists the description for that subscription.
See also: AWS API Documentation
:example: response = client.describe_event_subscriptions(
SubscriptionName='string',
MaxRecords=123,
Marker='string'
)
:type SubscriptionName: string
:param SubscriptionName: The name of the Amazon Redshift event notification subscription to be described.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeEventSubscriptions request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'Marker': 'string',
'EventSubscriptionsList': [
{
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': datetime(2015, 1, 1),
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Severity': 'string',
'Enabled': True|False,
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
:returns:
Can be one of the following: active | no-permission | topic-not-exist
The status "no-permission" indicates that Amazon Redshift no longer has permission to post to the Amazon SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created.
"""
pass
def describe_events(SourceIdentifier=None, SourceType=None, StartTime=None, EndTime=None, Duration=None, MaxRecords=None, Marker=None):
"""
Returns events related to clusters, security groups, snapshots, and parameter groups for the past 14 days. Events specific to a particular cluster, security group, snapshot or parameter group can be obtained by providing the name as a parameter. By default, the past hour of events are returned.
See also: AWS API Documentation
:example: response = client.describe_events(
SourceIdentifier='string',
SourceType='cluster'|'cluster-parameter-group'|'cluster-security-group'|'cluster-snapshot',
StartTime=datetime(2015, 1, 1),
EndTime=datetime(2015, 1, 1),
Duration=123,
MaxRecords=123,
Marker='string'
)
:type SourceIdentifier: string
:param SourceIdentifier: The identifier of the event source for which events will be returned. If this parameter is not specified, then all sources are included in the response.
Constraints:
If SourceIdentifier is supplied, SourceType must also be provided.
Specify a cluster identifier when SourceType is cluster .
Specify a cluster security group name when SourceType is cluster-security-group .
Specify a cluster parameter group name when SourceType is cluster-parameter-group .
Specify a cluster snapshot identifier when SourceType is cluster-snapshot .
:type SourceType: string
:param SourceType: The event source to retrieve events for. If no value is specified, all events are returned.
Constraints:
If SourceType is supplied, SourceIdentifier must also be provided.
Specify cluster when SourceIdentifier is a cluster identifier.
Specify cluster-security-group when SourceIdentifier is a cluster security group name.
Specify cluster-parameter-group when SourceIdentifier is a cluster parameter group name.
Specify cluster-snapshot when SourceIdentifier is a cluster snapshot identifier.
:type StartTime: datetime
:param StartTime: The beginning of the time interval to retrieve events for, specified in ISO 8601 format. For more information about ISO 8601, go to the ISO8601 Wikipedia page.
Example: 2009-07-08T18:00Z
:type EndTime: datetime
:param EndTime: The end of the time interval for which to retrieve events, specified in ISO 8601 format. For more information about ISO 8601, go to the ISO8601 Wikipedia page.
Example: 2009-07-08T18:00Z
:type Duration: integer
:param Duration: The number of minutes prior to the time of the request for which to retrieve events. For example, if the request is sent at 18:00 and you specify a duration of 60, then only events which have occurred after 17:00 will be returned.
Default: 60
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeEvents request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'Marker': 'string',
'Events': [
{
'SourceIdentifier': 'string',
'SourceType': 'cluster'|'cluster-parameter-group'|'cluster-security-group'|'cluster-snapshot',
'Message': 'string',
'EventCategories': [
'string',
],
'Severity': 'string',
'Date': datetime(2015, 1, 1),
'EventId': 'string'
},
]
}
:returns:
(string) --
"""
pass
def describe_hsm_client_certificates(HsmClientCertificateIdentifier=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns information about the specified HSM client certificate. If no certificate ID is specified, returns information about all the HSM certificates owned by your AWS customer account.
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all HSM client certificates that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all HSM client certificates that have any combination of those values are returned.
If both tag keys and values are omitted from the request, HSM client certificates are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_hsm_client_certificates(
HsmClientCertificateIdentifier='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: The identifier of a specific HSM client certificate for which you want information. If no identifier is specified, information is returned for all HSM client certificates owned by your AWS customer account.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeHsmClientCertificates request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching HSM client certificates that are associated with the specified key or keys. For example, suppose that you have HSM client certificates that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the HSM client certificates that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching HSM client certificates that are associated with the specified tag value or values. For example, suppose that you have HSM client certificates that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the HSM client certificates that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'HsmClientCertificates': [
{
'HsmClientCertificateIdentifier': 'string',
'HsmClientCertificatePublicKey': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
"""
pass
def describe_hsm_configurations(HsmConfigurationIdentifier=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns information about the specified Amazon Redshift HSM configuration. If no configuration ID is specified, returns information about all the HSM configurations owned by your AWS customer account.
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all HSM connections that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all HSM connections that have any combination of those values are returned.
If both tag keys and values are omitted from the request, HSM connections are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_hsm_configurations(
HsmConfigurationIdentifier='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: The identifier of a specific Amazon Redshift HSM configuration to be described. If no identifier is specified, information is returned for all HSM configurations owned by your AWS customer account.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeHsmConfigurations request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching HSM configurations that are associated with the specified key or keys. For example, suppose that you have HSM configurations that are tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with the HSM configurations that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching HSM configurations that are associated with the specified tag value or values. For example, suppose that you have HSM configurations that are tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with the HSM configurations that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'HsmConfigurations': [
{
'HsmConfigurationIdentifier': 'string',
'Description': 'string',
'HsmIpAddress': 'string',
'HsmPartitionName': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
"""
pass
def describe_logging_status(ClusterIdentifier=None):
"""
Describes whether information, such as queries and connection attempts, is being logged for the specified Amazon Redshift cluster.
See also: AWS API Documentation
:example: response = client.describe_logging_status(
ClusterIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the cluster from which to get the logging status.
Example: examplecluster
:rtype: dict
:return: {
'LoggingEnabled': True|False,
'BucketName': 'string',
'S3KeyPrefix': 'string',
'LastSuccessfulDeliveryTime': datetime(2015, 1, 1),
'LastFailureTime': datetime(2015, 1, 1),
'LastFailureMessage': 'string'
}
"""
pass
def describe_orderable_cluster_options(ClusterVersion=None, NodeType=None, MaxRecords=None, Marker=None):
"""
Returns a list of orderable cluster options. Before you create a new cluster you can use this operation to find what options are available, such as the EC2 Availability Zones (AZ) in the specific AWS region that you can specify, and the node types you can request. The node types differ by available storage, memory, CPU and price. With the cost involved you might want to obtain a list of cluster options in the specific region and specify values when creating a cluster. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.describe_orderable_cluster_options(
ClusterVersion='string',
NodeType='string',
MaxRecords=123,
Marker='string'
)
:type ClusterVersion: string
:param ClusterVersion: The version filter value. Specify this parameter to show only the available offerings matching the specified version.
Default: All versions.
Constraints: Must be one of the version returned from DescribeClusterVersions .
:type NodeType: string
:param NodeType: The node type filter value. Specify this parameter to show only the available offerings matching the specified node type.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeOrderableClusterOptions request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'OrderableClusterOptions': [
{
'ClusterVersion': 'string',
'ClusterType': 'string',
'NodeType': 'string',
'AvailabilityZones': [
{
'Name': 'string'
},
]
},
],
'Marker': 'string'
}
"""
pass
def describe_reserved_node_offerings(ReservedNodeOfferingId=None, MaxRecords=None, Marker=None):
"""
Returns a list of the available reserved node offerings by Amazon Redshift with their descriptions including the node type, the fixed and recurring costs of reserving the node and duration the node will be reserved for you. These descriptions help you determine which reserve node offering you want to purchase. You then use the unique offering ID in you call to PurchaseReservedNodeOffering to reserve one or more nodes for your Amazon Redshift cluster.
For more information about reserved node offerings, go to Purchasing Reserved Nodes in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.describe_reserved_node_offerings(
ReservedNodeOfferingId='string',
MaxRecords=123,
Marker='string'
)
:type ReservedNodeOfferingId: string
:param ReservedNodeOfferingId: The unique identifier for the offering.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeReservedNodeOfferings request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'Marker': 'string',
'ReservedNodeOfferings': [
{
'ReservedNodeOfferingId': 'string',
'NodeType': 'string',
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'OfferingType': 'string',
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
},
]
}
"""
pass
def describe_reserved_nodes(ReservedNodeId=None, MaxRecords=None, Marker=None):
"""
Returns the descriptions of the reserved nodes.
See also: AWS API Documentation
:example: response = client.describe_reserved_nodes(
ReservedNodeId='string',
MaxRecords=123,
Marker='string'
)
:type ReservedNodeId: string
:param ReservedNodeId: Identifier for the node reservation.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeReservedNodes request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
:rtype: dict
:return: {
'Marker': 'string',
'ReservedNodes': [
{
'ReservedNodeId': 'string',
'ReservedNodeOfferingId': 'string',
'NodeType': 'string',
'StartTime': datetime(2015, 1, 1),
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'NodeCount': 123,
'State': 'string',
'OfferingType': 'string',
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
},
]
}
:returns:
pending-payment-This reserved node has recently been purchased, and the sale has been approved, but payment has not yet been confirmed.
active-This reserved node is owned by the caller and is available for use.
payment-failed-Payment failed for the purchase attempt.
"""
pass
def describe_resize(ClusterIdentifier=None):
"""
Returns information about the last resize operation for the specified cluster. If no resize operation has ever been initiated for the specified cluster, a HTTP 404 error is returned. If a resize operation was initiated and completed, the status of the resize remains as SUCCEEDED until the next resize.
A resize operation can be requested using ModifyCluster and specifying a different number or type of nodes for the cluster.
See also: AWS API Documentation
:example: response = client.describe_resize(
ClusterIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of a cluster whose resize progress you are requesting. This parameter is case-sensitive.
By default, resize operations for all clusters defined for an AWS account are returned.
:rtype: dict
:return: {
'TargetNodeType': 'string',
'TargetNumberOfNodes': 123,
'TargetClusterType': 'string',
'Status': 'string',
'ImportTablesCompleted': [
'string',
],
'ImportTablesInProgress': [
'string',
],
'ImportTablesNotStarted': [
'string',
],
'AvgResizeRateInMegaBytesPerSecond': 123.0,
'TotalResizeDataInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
}
:returns:
(string) --
"""
pass
def describe_snapshot_copy_grants(SnapshotCopyGrantName=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns a list of snapshot copy grants owned by the AWS account in the destination region.
For more information about managing snapshot copy grants, go to Amazon Redshift Database Encryption in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.describe_snapshot_copy_grants(
SnapshotCopyGrantName='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type SnapshotCopyGrantName: string
:param SnapshotCopyGrantName: The name of the snapshot copy grant.
:type MaxRecords: integer
:param MaxRecords: The maximum number of response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Default: 100
Constraints: minimum 20, maximum 100.
:type Marker: string
:param Marker: An optional parameter that specifies the starting point to return a set of response records. When the results of a DescribeSnapshotCopyGrant request exceed the value specified in MaxRecords , AWS returns a value in the Marker field of the response. You can retrieve the next set of response records by providing the returned marker value in the Marker parameter and retrying the request.
Constraints: You can specify either the SnapshotCopyGrantName parameter or the Marker parameter, but not both.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching resources that are associated with the specified key or keys. For example, suppose that you have resources tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with all resources that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching resources that are associated with the specified value or values. For example, suppose that you have resources tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with all resources that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'Marker': 'string',
'SnapshotCopyGrants': [
{
'SnapshotCopyGrantName': 'string',
'KmsKeyId': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
]
}
"""
pass
def describe_table_restore_status(ClusterIdentifier=None, TableRestoreRequestId=None, MaxRecords=None, Marker=None):
"""
Lists the status of one or more table restore requests made using the RestoreTableFromClusterSnapshot API action. If you don't specify a value for the TableRestoreRequestId parameter, then DescribeTableRestoreStatus returns the status of all table restore requests ordered by the date and time of the request in ascending order. Otherwise DescribeTableRestoreStatus returns the status of the table specified by TableRestoreRequestId .
See also: AWS API Documentation
:example: response = client.describe_table_restore_status(
ClusterIdentifier='string',
TableRestoreRequestId='string',
MaxRecords=123,
Marker='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: The Amazon Redshift cluster that the table is being restored to.
:type TableRestoreRequestId: string
:param TableRestoreRequestId: The identifier of the table restore request to return status for. If you don't specify a TableRestoreRequestId value, then DescribeTableRestoreStatus returns the status of all in-progress table restore requests.
:type MaxRecords: integer
:param MaxRecords: The maximum number of records to include in the response. If more records exist than the specified MaxRecords value, a pagination token called a marker is included in the response so that the remaining results can be retrieved.
:type Marker: string
:param Marker: An optional pagination token provided by a previous DescribeTableRestoreStatus request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by the MaxRecords parameter.
:rtype: dict
:return: {
'TableRestoreStatusDetails': [
{
'TableRestoreRequestId': 'string',
'Status': 'PENDING'|'IN_PROGRESS'|'SUCCEEDED'|'FAILED'|'CANCELED',
'Message': 'string',
'RequestTime': datetime(2015, 1, 1),
'ProgressInMegaBytes': 123,
'TotalDataInMegaBytes': 123,
'ClusterIdentifier': 'string',
'SnapshotIdentifier': 'string',
'SourceDatabaseName': 'string',
'SourceSchemaName': 'string',
'SourceTableName': 'string',
'TargetDatabaseName': 'string',
'TargetSchemaName': 'string',
'NewTableName': 'string'
},
],
'Marker': 'string'
}
"""
pass
def describe_tags(ResourceName=None, ResourceType=None, MaxRecords=None, Marker=None, TagKeys=None, TagValues=None):
"""
Returns a list of tags. You can return tags from a specific resource by specifying an ARN, or you can return all tags for a given type of resource, such as clusters, snapshots, and so on.
The following are limitations for DescribeTags :
If you specify both tag keys and tag values in the same request, Amazon Redshift returns all resources that match any combination of the specified keys and values. For example, if you have owner and environment for tag keys, and admin and test for tag values, all resources that have any combination of those values are returned.
If both tag keys and values are omitted from the request, resources are returned regardless of whether they have tag keys or values associated with them.
See also: AWS API Documentation
:example: response = client.describe_tags(
ResourceName='string',
ResourceType='string',
MaxRecords=123,
Marker='string',
TagKeys=[
'string',
],
TagValues=[
'string',
]
)
:type ResourceName: string
:param ResourceName: The Amazon Resource Name (ARN) for which you want to describe the tag or tags. For example, arn:aws:redshift:us-east-1:123456789:cluster:t1 .
:type ResourceType: string
:param ResourceType: The type of resource with which you want to view tags. Valid resource types are:
Cluster
CIDR/IP
EC2 security group
Snapshot
Cluster security group
Subnet group
HSM connection
HSM certificate
Parameter group
Snapshot copy grant
For more information about Amazon Redshift resource types and constructing ARNs, go to Constructing an Amazon Redshift Amazon Resource Name (ARN) in the Amazon Redshift Cluster Management Guide.
:type MaxRecords: integer
:param MaxRecords: The maximum number or response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
:type Marker: string
:param Marker: A value that indicates the starting point for the next set of response records in a subsequent request. If a value is returned in a response, you can retrieve the next set of records by providing this returned marker value in the marker parameter and retrying the command. If the marker field is empty, all response records have been retrieved for the request.
:type TagKeys: list
:param TagKeys: A tag key or keys for which you want to return all matching resources that are associated with the specified key or keys. For example, suppose that you have resources tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with all resources that have either or both of these tag keys associated with them.
(string) --
:type TagValues: list
:param TagValues: A tag value or values for which you want to return all matching resources that are associated with the specified value or values. For example, suppose that you have resources tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with all resources that have either or both of these tag values associated with them.
(string) --
:rtype: dict
:return: {
'TaggedResources': [
{
'Tag': {
'Key': 'string',
'Value': 'string'
},
'ResourceName': 'string',
'ResourceType': 'string'
},
],
'Marker': 'string'
}
:returns:
ResourceName (string) -- The Amazon Resource Name (ARN) for which you want to describe the tag or tags. For example, arn:aws:redshift:us-east-1:123456789:cluster:t1 .
ResourceType (string) -- The type of resource with which you want to view tags. Valid resource types are:
Cluster
CIDR/IP
EC2 security group
Snapshot
Cluster security group
Subnet group
HSM connection
HSM certificate
Parameter group
Snapshot copy grant
For more information about Amazon Redshift resource types and constructing ARNs, go to Constructing an Amazon Redshift Amazon Resource Name (ARN) in the Amazon Redshift Cluster Management Guide.
MaxRecords (integer) -- The maximum number or response records to return in each call. If the number of remaining response records exceeds the specified MaxRecords value, a value is returned in a marker field of the response. You can retrieve the next set of records by retrying the command with the returned marker value.
Marker (string) -- A value that indicates the starting point for the next set of response records in a subsequent request. If a value is returned in a response, you can retrieve the next set of records by providing this returned marker value in the marker parameter and retrying the command. If the marker field is empty, all response records have been retrieved for the request.
TagKeys (list) -- A tag key or keys for which you want to return all matching resources that are associated with the specified key or keys. For example, suppose that you have resources tagged with keys called owner and environment . If you specify both of these tag keys in the request, Amazon Redshift returns a response with all resources that have either or both of these tag keys associated with them.
(string) --
TagValues (list) -- A tag value or values for which you want to return all matching resources that are associated with the specified value or values. For example, suppose that you have resources tagged with values called admin and test . If you specify both of these tag values in the request, Amazon Redshift returns a response with all resources that have either or both of these tag values associated with them.
(string) --
"""
pass
def disable_logging(ClusterIdentifier=None):
"""
Stops logging information, such as queries and connection attempts, for the specified Amazon Redshift cluster.
See also: AWS API Documentation
:example: response = client.disable_logging(
ClusterIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the cluster on which logging is to be stopped.
Example: examplecluster
:rtype: dict
:return: {
'LoggingEnabled': True|False,
'BucketName': 'string',
'S3KeyPrefix': 'string',
'LastSuccessfulDeliveryTime': datetime(2015, 1, 1),
'LastFailureTime': datetime(2015, 1, 1),
'LastFailureMessage': 'string'
}
"""
pass
def disable_snapshot_copy(ClusterIdentifier=None):
"""
Disables the automatic copying of snapshots from one region to another region for a specified cluster.
If your cluster and its snapshots are encrypted using a customer master key (CMK) from AWS KMS, use DeleteSnapshotCopyGrant to delete the grant that grants Amazon Redshift permission to the CMK in the destination region.
See also: AWS API Documentation
:example: response = client.disable_snapshot_copy(
ClusterIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the source cluster that you want to disable copying of snapshots to a destination region.
Constraints: Must be the valid name of an existing cluster that has cross-region snapshot copy enabled.
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
in-sync : The parameter value is in sync with the database.
pending-reboot : The parameter value will be applied after the cluster reboots.
applying : The parameter value is being applied to the database.
invalid-parameter : Cannot apply the parameter value because it has an invalid value or syntax.
apply-deferred : The parameter contains static property changes. The changes are deferred until the cluster reboots.
apply-error : Cannot connect to the cluster. The parameter change will be applied after the cluster reboots.
unknown-error : Cannot apply the parameter change right now. The change will be applied after the cluster reboots.
"""
pass
def enable_logging(ClusterIdentifier=None, BucketName=None, S3KeyPrefix=None):
"""
Starts logging information, such as queries and connection attempts, for the specified Amazon Redshift cluster.
See also: AWS API Documentation
:example: response = client.enable_logging(
ClusterIdentifier='string',
BucketName='string',
S3KeyPrefix='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the cluster on which logging is to be started.
Example: examplecluster
:type BucketName: string
:param BucketName: [REQUIRED]
The name of an existing S3 bucket where the log files are to be stored.
Constraints:
Must be in the same region as the cluster
The cluster must have read bucket and put object permissions
:type S3KeyPrefix: string
:param S3KeyPrefix: The prefix applied to the log file names.
Constraints:
Cannot exceed 512 characters
Cannot contain spaces( ), double quotes ('), single quotes ('), a backslash (), or control characters. The hexadecimal codes for invalid characters are:
x00 to x20
x22
x27
x5c
x7f or larger
:rtype: dict
:return: {
'LoggingEnabled': True|False,
'BucketName': 'string',
'S3KeyPrefix': 'string',
'LastSuccessfulDeliveryTime': datetime(2015, 1, 1),
'LastFailureTime': datetime(2015, 1, 1),
'LastFailureMessage': 'string'
}
"""
pass
def enable_snapshot_copy(ClusterIdentifier=None, DestinationRegion=None, RetentionPeriod=None, SnapshotCopyGrantName=None):
"""
Enables the automatic copy of snapshots from one region to another region for a specified cluster.
See also: AWS API Documentation
:example: response = client.enable_snapshot_copy(
ClusterIdentifier='string',
DestinationRegion='string',
RetentionPeriod=123,
SnapshotCopyGrantName='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the source cluster to copy snapshots from.
Constraints: Must be the valid name of an existing cluster that does not already have cross-region snapshot copy enabled.
:type DestinationRegion: string
:param DestinationRegion: [REQUIRED]
The destination region that you want to copy snapshots to.
Constraints: Must be the name of a valid region. For more information, see Regions and Endpoints in the Amazon Web Services General Reference.
:type RetentionPeriod: integer
:param RetentionPeriod: The number of days to retain automated snapshots in the destination region after they are copied from the source region.
Default: 7.
Constraints: Must be at least 1 and no more than 35.
:type SnapshotCopyGrantName: string
:param SnapshotCopyGrantName: The name of the snapshot copy grant to use when snapshots of an AWS KMS-encrypted cluster are copied to the destination region.
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_cluster_credentials(DbUser=None, DbName=None, ClusterIdentifier=None, DurationSeconds=None, AutoCreate=None, DbGroups=None):
"""
Returns a database user name and temporary password with temporary authorization to log in to an Amazon Redshift database. The action returns the database user name prefixed with IAM: if AutoCreate is False or IAMA: if AutoCreate is True . You can optionally specify one or more database user groups that the user will join at log in. By default, the temporary credentials expire in 900 seconds. You can optionally specify a duration between 900 seconds (15 minutes) and 3600 seconds (60 minutes). For more information, see Generating IAM Database User Credentials in the Amazon Redshift Cluster Management Guide.
The IAM user or role that executes GetClusterCredentials must have an IAM policy attached that allows the redshift:GetClusterCredentials action with access to the dbuser resource on the cluster. The user name specified for dbuser in the IAM policy and the user name specified for the DbUser parameter must match.
If the DbGroups parameter is specified, the IAM policy must allow the redshift:JoinGroup action with access to the listed dbgroups .
In addition, if the AutoCreate parameter is set to True , then the policy must include the redshift:CreateClusterUser privilege.
If the DbName parameter is specified, the IAM policy must allow access to the resource dbname for the specified database name.
See also: AWS API Documentation
:example: response = client.get_cluster_credentials(
DbUser='string',
DbName='string',
ClusterIdentifier='string',
DurationSeconds=123,
AutoCreate=True|False,
DbGroups=[
'string',
]
)
:type DbUser: string
:param DbUser: [REQUIRED]
The name of a database user. If a user name matching DbUser exists in the database, the temporary user credentials have the same permissions as the existing user. If DbUser doesn't exist in the database and Autocreate is True , a new user is created using the value for DbUser with PUBLIC permissions. If a database user matching the value for DbUser doesn't exist and Autocreate is False , then the command succeeds but the connection attempt will fail because the user doesn't exist in the database.
For more information, see CREATE USER in the Amazon Redshift Database Developer Guide.
Constraints:
Must be 1 to 128 alphanumeric characters or hyphens
Must contain only lowercase letters.
First character must be a letter.
Must not contain a colon ( : ) or slash ( / ).
Cannot be a reserved word. A list of reserved words can be found in Reserved Words in the Amazon Redshift Database Developer Guide.
:type DbName: string
:param DbName: The name of a database that DbUser is authorized to log on to. If DbName is not specified, DbUser can log in to any existing database.
Constraints:
Must be 1 to 64 alphanumeric characters or hyphens
Must contain only lowercase letters.
Cannot be a reserved word. A list of reserved words can be found in Reserved Words in the Amazon Redshift Database Developer Guide.
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the cluster that contains the database for which your are requesting credentials. This parameter is case sensitive.
:type DurationSeconds: integer
:param DurationSeconds: The number of seconds until the returned temporary password expires.
Constraint: minimum 900, maximum 3600.
Default: 900
:type AutoCreate: boolean
:param AutoCreate: Create a database user with the name specified for DbUser if one does not exist.
:type DbGroups: list
:param DbGroups: A list of the names of existing database groups that DbUser will join for the current session. If not specified, the new user is added only to PUBLIC.
(string) --
:rtype: dict
:return: {
'DbUser': 'string',
'DbPassword': 'string',
'Expiration': datetime(2015, 1, 1)
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
"""
pass
def get_waiter():
"""
"""
pass
def modify_cluster(ClusterIdentifier=None, ClusterType=None, NodeType=None, NumberOfNodes=None, ClusterSecurityGroups=None, VpcSecurityGroupIds=None, MasterUserPassword=None, ClusterParameterGroupName=None, AutomatedSnapshotRetentionPeriod=None, PreferredMaintenanceWindow=None, ClusterVersion=None, AllowVersionUpgrade=None, HsmClientCertificateIdentifier=None, HsmConfigurationIdentifier=None, NewClusterIdentifier=None, PubliclyAccessible=None, ElasticIp=None, EnhancedVpcRouting=None):
"""
Modifies the settings for a cluster. For example, you can add another security or parameter group, update the preferred maintenance window, or change the master user password. Resetting a cluster password or modifying the security groups associated with a cluster do not need a reboot. However, modifying a parameter group requires a reboot for parameters to take effect. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
You can also change node type and the number of nodes to scale up or down the cluster. When resizing a cluster, you must specify both the number of nodes and the node type even if one of the parameters does not change.
See also: AWS API Documentation
:example: response = client.modify_cluster(
ClusterIdentifier='string',
ClusterType='string',
NodeType='string',
NumberOfNodes=123,
ClusterSecurityGroups=[
'string',
],
VpcSecurityGroupIds=[
'string',
],
MasterUserPassword='string',
ClusterParameterGroupName='string',
AutomatedSnapshotRetentionPeriod=123,
PreferredMaintenanceWindow='string',
ClusterVersion='string',
AllowVersionUpgrade=True|False,
HsmClientCertificateIdentifier='string',
HsmConfigurationIdentifier='string',
NewClusterIdentifier='string',
PubliclyAccessible=True|False,
ElasticIp='string',
EnhancedVpcRouting=True|False
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the cluster to be modified.
Example: examplecluster
:type ClusterType: string
:param ClusterType: The new cluster type.
When you submit your cluster resize request, your existing cluster goes into a read-only mode. After Amazon Redshift provisions a new cluster based on your resize requirements, there will be outage for a period while the old cluster is deleted and your connection is switched to the new cluster. You can use DescribeResize to track the progress of the resize request.
Valid Values: multi-node | single-node
:type NodeType: string
:param NodeType: The new node type of the cluster. If you specify a new node type, you must also specify the number of nodes parameter.
When you submit your request to resize a cluster, Amazon Redshift sets access permissions for the cluster to read-only. After Amazon Redshift provisions a new cluster according to your resize requirements, there will be a temporary outage while the old cluster is deleted and your connection is switched to the new cluster. When the new connection is complete, the original access permissions for the cluster are restored. You can use DescribeResize to track the progress of the resize request.
Valid Values: ds1.xlarge | ds1.8xlarge | ds2.xlarge | ds2.8xlarge | dc1.large | dc1.8xlarge .
:type NumberOfNodes: integer
:param NumberOfNodes: The new number of nodes of the cluster. If you specify a new number of nodes, you must also specify the node type parameter.
When you submit your request to resize a cluster, Amazon Redshift sets access permissions for the cluster to read-only. After Amazon Redshift provisions a new cluster according to your resize requirements, there will be a temporary outage while the old cluster is deleted and your connection is switched to the new cluster. When the new connection is complete, the original access permissions for the cluster are restored. You can use DescribeResize to track the progress of the resize request.
Valid Values: Integer greater than 0 .
:type ClusterSecurityGroups: list
:param ClusterSecurityGroups: A list of cluster security groups to be authorized on this cluster. This change is asynchronously applied as soon as possible.
Security groups currently associated with the cluster, and not in the list of groups to apply, will be revoked from the cluster.
Constraints:
Must be 1 to 255 alphanumeric characters or hyphens
First character must be a letter
Cannot end with a hyphen or contain two consecutive hyphens
(string) --
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of virtual private cloud (VPC) security groups to be associated with the cluster.
(string) --
:type MasterUserPassword: string
:param MasterUserPassword: The new password for the cluster master user. This change is asynchronously applied as soon as possible. Between the time of the request and the completion of the request, the MasterUserPassword element exists in the PendingModifiedValues element of the operation response.
Note
Operations never return the password, so this operation provides a way to regain access to the master user account for a cluster if the password is lost.
Default: Uses existing setting.
Constraints:
Must be between 8 and 64 characters in length.
Must contain at least one uppercase letter.
Must contain at least one lowercase letter.
Must contain one number.
Can be any printable ASCII character (ASCII code 33 to 126) except ' (single quote), ' (double quote), , /, @, or space.
:type ClusterParameterGroupName: string
:param ClusterParameterGroupName: The name of the cluster parameter group to apply to this cluster. This change is applied only after the cluster is rebooted. To reboot a cluster use RebootCluster .
Default: Uses existing setting.
Constraints: The cluster parameter group must be in the same parameter group family that matches the cluster version.
:type AutomatedSnapshotRetentionPeriod: integer
:param AutomatedSnapshotRetentionPeriod: The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Even if automated snapshots are disabled, you can still create manual snapshots when you want with CreateClusterSnapshot .
If you decrease the automated snapshot retention period from its current value, existing automated snapshots that fall outside of the new retention period will be immediately deleted.
Default: Uses existing setting.
Constraints: Must be a value from 0 to 35.
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range (in UTC) during which system maintenance can occur, if necessary. If system maintenance is necessary during the window, it may result in an outage.
This maintenance window change is made immediately. If the new maintenance window indicates the current time, there must be at least 120 minutes between the current time and end of the window in order to ensure that pending changes are applied.
Default: Uses existing setting.
Format: ddd:hh24:mi-ddd:hh24:mi, for example wed:07:30-wed:08:00 .
Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun
Constraints: Must be at least 30 minutes.
:type ClusterVersion: string
:param ClusterVersion: The new version number of the Amazon Redshift engine to upgrade to.
For major version upgrades, if a non-default cluster parameter group is currently in use, a new cluster parameter group in the cluster parameter group family for the new version must be specified. The new cluster parameter group can be the default for that cluster parameter group family. For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
Example: 1.0
:type AllowVersionUpgrade: boolean
:param AllowVersionUpgrade: If true , major version upgrades will be applied automatically to the cluster during the maintenance window.
Default: false
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM.
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
:type NewClusterIdentifier: string
:param NewClusterIdentifier: The new identifier for the cluster.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
Alphabetic characters must be lowercase.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique for all clusters within an AWS account.
Example: examplecluster
:type PubliclyAccessible: boolean
:param PubliclyAccessible: If true , the cluster can be accessed from a public network. Only clusters in VPCs can be set to be publicly available.
:type ElasticIp: string
:param ElasticIp: The Elastic IP (EIP) address for the cluster.
Constraints: The cluster must be provisioned in EC2-VPC and publicly-accessible through an Internet gateway. For more information about provisioning clusters in EC2-VPC, go to Supported Platforms to Launch Your Cluster in the Amazon Redshift Cluster Management Guide.
:type EnhancedVpcRouting: boolean
:param EnhancedVpcRouting: An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def modify_cluster_iam_roles(ClusterIdentifier=None, AddIamRoles=None, RemoveIamRoles=None):
"""
Modifies the list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services.
A cluster can have up to 10 IAM roles associated at any time.
See also: AWS API Documentation
:example: response = client.modify_cluster_iam_roles(
ClusterIdentifier='string',
AddIamRoles=[
'string',
],
RemoveIamRoles=[
'string',
]
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the cluster for which you want to associate or disassociate IAM roles.
:type AddIamRoles: list
:param AddIamRoles: Zero or more IAM roles to associate with the cluster. The roles must be in their Amazon Resource Name (ARN) format. You can associate up to 10 IAM roles with a single cluster in a single request.
(string) --
:type RemoveIamRoles: list
:param RemoveIamRoles: Zero or more IAM roles in ARN format to disassociate from the cluster. You can disassociate up to 10 IAM roles from a single cluster in a single request.
(string) --
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def modify_cluster_parameter_group(ParameterGroupName=None, Parameters=None):
"""
Modifies the parameters of a parameter group.
For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.modify_cluster_parameter_group(
ParameterGroupName='string',
Parameters=[
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'ApplyType': 'static'|'dynamic',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string'
},
]
)
:type ParameterGroupName: string
:param ParameterGroupName: [REQUIRED]
The name of the parameter group to be modified.
:type Parameters: list
:param Parameters: [REQUIRED]
An array of parameters to be modified. A maximum of 20 parameters can be modified in a single request.
For each parameter to be modified, you must supply at least the parameter name and parameter value; other name-value pairs of the parameter are optional.
For the workload management (WLM) configuration, you must supply all the name-value pairs in the wlm_json_configuration parameter.
(dict) --Describes a parameter in a cluster parameter group.
ParameterName (string) --The name of the parameter.
ParameterValue (string) --The value of the parameter.
Description (string) --A description of the parameter.
Source (string) --The source of the parameter value, such as 'engine-default' or 'user'.
DataType (string) --The data type of the parameter.
AllowedValues (string) --The valid range of values for the parameter.
ApplyType (string) --Specifies how to apply the WLM configuration parameter. Some properties can be applied dynamically, while other properties require that any associated clusters be rebooted for the configuration changes to be applied. For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
IsModifiable (boolean) --If true , the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed.
MinimumEngineVersion (string) --The earliest engine version to which the parameter can apply.
:rtype: dict
:return: {
'ParameterGroupName': 'string',
'ParameterGroupStatus': 'string'
}
"""
pass
def modify_cluster_subnet_group(ClusterSubnetGroupName=None, Description=None, SubnetIds=None):
"""
Modifies a cluster subnet group to include the specified list of VPC subnets. The operation replaces the existing list of subnets with the new list of subnets.
See also: AWS API Documentation
:example: response = client.modify_cluster_subnet_group(
ClusterSubnetGroupName='string',
Description='string',
SubnetIds=[
'string',
]
)
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: [REQUIRED]
The name of the subnet group to be modified.
:type Description: string
:param Description: A text description of the subnet group to be modified.
:type SubnetIds: list
:param SubnetIds: [REQUIRED]
An array of VPC subnet IDs. A maximum of 20 subnets can be modified in a single request.
(string) --
:rtype: dict
:return: {
'ClusterSubnetGroup': {
'ClusterSubnetGroupName': 'string',
'Description': 'string',
'VpcId': 'string',
'SubnetGroupStatus': 'string',
'Subnets': [
{
'SubnetIdentifier': 'string',
'SubnetAvailabilityZone': {
'Name': 'string'
},
'SubnetStatus': 'string'
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def modify_event_subscription(SubscriptionName=None, SnsTopicArn=None, SourceType=None, SourceIds=None, EventCategories=None, Severity=None, Enabled=None):
"""
Modifies an existing Amazon Redshift event notification subscription.
See also: AWS API Documentation
:example: response = client.modify_event_subscription(
SubscriptionName='string',
SnsTopicArn='string',
SourceType='string',
SourceIds=[
'string',
],
EventCategories=[
'string',
],
Severity='string',
Enabled=True|False
)
:type SubscriptionName: string
:param SubscriptionName: [REQUIRED]
The name of the modified Amazon Redshift event notification subscription.
:type SnsTopicArn: string
:param SnsTopicArn: The Amazon Resource Name (ARN) of the SNS topic to be used by the event notification subscription.
:type SourceType: string
:param SourceType: The type of source that will be generating the events. For example, if you want to be notified of events generated by a cluster, you would set this parameter to cluster. If this value is not specified, events are returned for all Amazon Redshift objects in your AWS account. You must specify a source type in order to specify source IDs.
Valid values: cluster, cluster-parameter-group, cluster-security-group, and cluster-snapshot.
:type SourceIds: list
:param SourceIds: A list of one or more identifiers of Amazon Redshift source objects. All of the objects must be of the same type as was specified in the source type parameter. The event subscription will return only events generated by the specified objects. If not specified, then events are returned for all objects within the source type specified.
Example: my-cluster-1, my-cluster-2
Example: my-snapshot-20131010
(string) --
:type EventCategories: list
:param EventCategories: Specifies the Amazon Redshift event categories to be published by the event notification subscription.
Values: Configuration, Management, Monitoring, Security
(string) --
:type Severity: string
:param Severity: Specifies the Amazon Redshift event severity to be published by the event notification subscription.
Values: ERROR, INFO
:type Enabled: boolean
:param Enabled: A Boolean value indicating if the subscription is enabled. true indicates the subscription is enabled
:rtype: dict
:return: {
'EventSubscription': {
'CustomerAwsId': 'string',
'CustSubscriptionId': 'string',
'SnsTopicArn': 'string',
'Status': 'string',
'SubscriptionCreationTime': datetime(2015, 1, 1),
'SourceType': 'string',
'SourceIdsList': [
'string',
],
'EventCategoriesList': [
'string',
],
'Severity': 'string',
'Enabled': True|False,
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
:returns:
Can be one of the following: active | no-permission | topic-not-exist
The status "no-permission" indicates that Amazon Redshift no longer has permission to post to the Amazon SNS topic. The status "topic-not-exist" indicates that the topic was deleted after the subscription was created.
"""
pass
def modify_snapshot_copy_retention_period(ClusterIdentifier=None, RetentionPeriod=None):
"""
Modifies the number of days to retain automated snapshots in the destination region after they are copied from the source region.
See also: AWS API Documentation
:example: response = client.modify_snapshot_copy_retention_period(
ClusterIdentifier='string',
RetentionPeriod=123
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the cluster for which you want to change the retention period for automated snapshots that are copied to a destination region.
Constraints: Must be the valid name of an existing cluster that has cross-region snapshot copy enabled.
:type RetentionPeriod: integer
:param RetentionPeriod: [REQUIRED]
The number of days to retain automated snapshots in the destination region after they are copied from the source region.
If you decrease the retention period for automated snapshots that are copied to a destination region, Amazon Redshift will delete any existing automated snapshots that were copied to the destination region and that fall outside of the new retention period.
Constraints: Must be at least 1 and no more than 35.
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def purchase_reserved_node_offering(ReservedNodeOfferingId=None, NodeCount=None):
"""
Allows you to purchase reserved nodes. Amazon Redshift offers a predefined set of reserved node offerings. You can purchase one or more of the offerings. You can call the DescribeReservedNodeOfferings API to obtain the available reserved node offerings. You can call this API by providing a specific reserved node offering and the number of nodes you want to reserve.
For more information about reserved node offerings, go to Purchasing Reserved Nodes in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.purchase_reserved_node_offering(
ReservedNodeOfferingId='string',
NodeCount=123
)
:type ReservedNodeOfferingId: string
:param ReservedNodeOfferingId: [REQUIRED]
The unique identifier of the reserved node offering you want to purchase.
:type NodeCount: integer
:param NodeCount: The number of reserved nodes that you want to purchase.
Default: 1
:rtype: dict
:return: {
'ReservedNode': {
'ReservedNodeId': 'string',
'ReservedNodeOfferingId': 'string',
'NodeType': 'string',
'StartTime': datetime(2015, 1, 1),
'Duration': 123,
'FixedPrice': 123.0,
'UsagePrice': 123.0,
'CurrencyCode': 'string',
'NodeCount': 123,
'State': 'string',
'OfferingType': 'string',
'RecurringCharges': [
{
'RecurringChargeAmount': 123.0,
'RecurringChargeFrequency': 'string'
},
]
}
}
:returns:
pending-payment-This reserved node has recently been purchased, and the sale has been approved, but payment has not yet been confirmed.
active-This reserved node is owned by the caller and is available for use.
payment-failed-Payment failed for the purchase attempt.
"""
pass
def reboot_cluster(ClusterIdentifier=None):
"""
Reboots a cluster. This action is taken as soon as possible. It results in a momentary outage to the cluster, during which the cluster status is set to rebooting . A cluster event is created when the reboot is completed. Any pending cluster modifications (see ModifyCluster ) are applied at this reboot. For more information about managing clusters, go to Amazon Redshift Clusters in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.reboot_cluster(
ClusterIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The cluster identifier.
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
in-sync : The parameter value is in sync with the database.
pending-reboot : The parameter value will be applied after the cluster reboots.
applying : The parameter value is being applied to the database.
invalid-parameter : Cannot apply the parameter value because it has an invalid value or syntax.
apply-deferred : The parameter contains static property changes. The changes are deferred until the cluster reboots.
apply-error : Cannot connect to the cluster. The parameter change will be applied after the cluster reboots.
unknown-error : Cannot apply the parameter change right now. The change will be applied after the cluster reboots.
"""
pass
def reset_cluster_parameter_group(ParameterGroupName=None, ResetAllParameters=None, Parameters=None):
"""
Sets one or more parameters of the specified parameter group to their default values and sets the source values of the parameters to "engine-default". To reset the entire parameter group specify the ResetAllParameters parameter. For parameter changes to take effect you must reboot any associated clusters.
See also: AWS API Documentation
:example: response = client.reset_cluster_parameter_group(
ParameterGroupName='string',
ResetAllParameters=True|False,
Parameters=[
{
'ParameterName': 'string',
'ParameterValue': 'string',
'Description': 'string',
'Source': 'string',
'DataType': 'string',
'AllowedValues': 'string',
'ApplyType': 'static'|'dynamic',
'IsModifiable': True|False,
'MinimumEngineVersion': 'string'
},
]
)
:type ParameterGroupName: string
:param ParameterGroupName: [REQUIRED]
The name of the cluster parameter group to be reset.
:type ResetAllParameters: boolean
:param ResetAllParameters: If true , all parameters in the specified parameter group will be reset to their default values.
Default: true
:type Parameters: list
:param Parameters: An array of names of parameters to be reset. If ResetAllParameters option is not used, then at least one parameter name must be supplied.
Constraints: A maximum of 20 parameters can be reset in a single request.
(dict) --Describes a parameter in a cluster parameter group.
ParameterName (string) --The name of the parameter.
ParameterValue (string) --The value of the parameter.
Description (string) --A description of the parameter.
Source (string) --The source of the parameter value, such as 'engine-default' or 'user'.
DataType (string) --The data type of the parameter.
AllowedValues (string) --The valid range of values for the parameter.
ApplyType (string) --Specifies how to apply the WLM configuration parameter. Some properties can be applied dynamically, while other properties require that any associated clusters be rebooted for the configuration changes to be applied. For more information about parameters and parameter groups, go to Amazon Redshift Parameter Groups in the Amazon Redshift Cluster Management Guide .
IsModifiable (boolean) --If true , the parameter can be modified. Some parameters have security or operational implications that prevent them from being changed.
MinimumEngineVersion (string) --The earliest engine version to which the parameter can apply.
:rtype: dict
:return: {
'ParameterGroupName': 'string',
'ParameterGroupStatus': 'string'
}
"""
pass
def restore_from_cluster_snapshot(ClusterIdentifier=None, SnapshotIdentifier=None, SnapshotClusterIdentifier=None, Port=None, AvailabilityZone=None, AllowVersionUpgrade=None, ClusterSubnetGroupName=None, PubliclyAccessible=None, OwnerAccount=None, HsmClientCertificateIdentifier=None, HsmConfigurationIdentifier=None, ElasticIp=None, ClusterParameterGroupName=None, ClusterSecurityGroups=None, VpcSecurityGroupIds=None, PreferredMaintenanceWindow=None, AutomatedSnapshotRetentionPeriod=None, KmsKeyId=None, NodeType=None, EnhancedVpcRouting=None, AdditionalInfo=None, IamRoles=None):
"""
Creates a new cluster from a snapshot. By default, Amazon Redshift creates the resulting cluster with the same configuration as the original cluster from which the snapshot was created, except that the new cluster is created with the default cluster security and parameter groups. After Amazon Redshift creates the cluster, you can use the ModifyCluster API to associate a different security group and different parameter group with the restored cluster. If you are using a DS node type, you can also choose to change to another DS node type of the same size during restore.
If you restore a cluster into a VPC, you must provide a cluster subnet group where you want the cluster restored.
For more information about working with snapshots, go to Amazon Redshift Snapshots in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.restore_from_cluster_snapshot(
ClusterIdentifier='string',
SnapshotIdentifier='string',
SnapshotClusterIdentifier='string',
Port=123,
AvailabilityZone='string',
AllowVersionUpgrade=True|False,
ClusterSubnetGroupName='string',
PubliclyAccessible=True|False,
OwnerAccount='string',
HsmClientCertificateIdentifier='string',
HsmConfigurationIdentifier='string',
ElasticIp='string',
ClusterParameterGroupName='string',
ClusterSecurityGroups=[
'string',
],
VpcSecurityGroupIds=[
'string',
],
PreferredMaintenanceWindow='string',
AutomatedSnapshotRetentionPeriod=123,
KmsKeyId='string',
NodeType='string',
EnhancedVpcRouting=True|False,
AdditionalInfo='string',
IamRoles=[
'string',
]
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the cluster that will be created from restoring the snapshot.
Constraints:
Must contain from 1 to 63 alphanumeric characters or hyphens.
Alphabetic characters must be lowercase.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
Must be unique for all clusters within an AWS account.
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The name of the snapshot from which to create the new cluster. This parameter isn't case sensitive.
Example: my-snapshot-id
:type SnapshotClusterIdentifier: string
:param SnapshotClusterIdentifier: The name of the cluster the source snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
:type Port: integer
:param Port: The port number on which the cluster accepts connections.
Default: The same port as the original cluster.
Constraints: Must be between 1115 and 65535 .
:type AvailabilityZone: string
:param AvailabilityZone: The Amazon EC2 Availability Zone in which to restore the cluster.
Default: A random, system-chosen Availability Zone.
Example: us-east-1a
:type AllowVersionUpgrade: boolean
:param AllowVersionUpgrade: If true , major version upgrades can be applied during the maintenance window to the Amazon Redshift engine that is running on the cluster.
Default: true
:type ClusterSubnetGroupName: string
:param ClusterSubnetGroupName: The name of the subnet group where you want to cluster restored.
A snapshot of cluster in VPC can be restored only in VPC. Therefore, you must provide subnet group name where you want the cluster restored.
:type PubliclyAccessible: boolean
:param PubliclyAccessible: If true , the cluster can be accessed from a public network.
:type OwnerAccount: string
:param OwnerAccount: The AWS customer account used to create or copy the snapshot. Required if you are restoring a snapshot you do not own, optional if you own the snapshot.
:type HsmClientCertificateIdentifier: string
:param HsmClientCertificateIdentifier: Specifies the name of the HSM client certificate the Amazon Redshift cluster uses to retrieve the data encryption keys stored in an HSM.
:type HsmConfigurationIdentifier: string
:param HsmConfigurationIdentifier: Specifies the name of the HSM configuration that contains the information the Amazon Redshift cluster can use to retrieve and store keys in an HSM.
:type ElasticIp: string
:param ElasticIp: The elastic IP (EIP) address for the cluster.
:type ClusterParameterGroupName: string
:param ClusterParameterGroupName: The name of the parameter group to be associated with this cluster.
Default: The default Amazon Redshift cluster parameter group. For information about the default parameter group, go to Working with Amazon Redshift Parameter Groups .
Constraints:
Must be 1 to 255 alphanumeric characters or hyphens.
First character must be a letter.
Cannot end with a hyphen or contain two consecutive hyphens.
:type ClusterSecurityGroups: list
:param ClusterSecurityGroups: A list of security groups to be associated with this cluster.
Default: The default cluster security group for Amazon Redshift.
Cluster security groups only apply to clusters outside of VPCs.
(string) --
:type VpcSecurityGroupIds: list
:param VpcSecurityGroupIds: A list of Virtual Private Cloud (VPC) security groups to be associated with the cluster.
Default: The default VPC security group is associated with the cluster.
VPC security groups only apply to clusters in VPCs.
(string) --
:type PreferredMaintenanceWindow: string
:param PreferredMaintenanceWindow: The weekly time range (in UTC) during which automated cluster maintenance can occur.
Format: ddd:hh24:mi-ddd:hh24:mi
Default: The value selected for the cluster from which the snapshot was taken. For more information about the time blocks for each region, see Maintenance Windows in Amazon Redshift Cluster Management Guide.
Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun
Constraints: Minimum 30-minute window.
:type AutomatedSnapshotRetentionPeriod: integer
:param AutomatedSnapshotRetentionPeriod: The number of days that automated snapshots are retained. If the value is 0, automated snapshots are disabled. Even if automated snapshots are disabled, you can still create manual snapshots when you want with CreateClusterSnapshot .
Default: The value selected for the cluster from which the snapshot was taken.
Constraints: Must be a value from 0 to 35.
:type KmsKeyId: string
:param KmsKeyId: The AWS Key Management Service (KMS) key ID of the encryption key that you want to use to encrypt data in the cluster that you restore from a shared snapshot.
:type NodeType: string
:param NodeType: The node type that the restored cluster will be provisioned with.
Default: The node type of the cluster from which the snapshot was taken. You can modify this if you are using any DS node type. In that case, you can choose to restore into another DS node type of the same size. For example, you can restore ds1.8xlarge into ds2.8xlarge, or ds2.xlarge into ds1.xlarge. If you have a DC instance type, you must restore into that same instance type and size. In other words, you can only restore a dc1.large instance type into another dc1.large instance type. For more information about node types, see About Clusters and Nodes in the Amazon Redshift Cluster Management Guide
:type EnhancedVpcRouting: boolean
:param EnhancedVpcRouting: An option that specifies whether to create the cluster with enhanced VPC routing enabled. To create a cluster that uses enhanced VPC routing, the cluster must be in a VPC. For more information, see Enhanced VPC Routing in the Amazon Redshift Cluster Management Guide.
If this option is true , enhanced VPC routing is enabled.
Default: false
:type AdditionalInfo: string
:param AdditionalInfo: Reserved.
:type IamRoles: list
:param IamRoles: A list of AWS Identity and Access Management (IAM) roles that can be used by the cluster to access other AWS services. You must supply the IAM roles in their Amazon Resource Name (ARN) format. You can supply up to 10 IAM roles in a single request.
A cluster can have up to 10 IAM roles associated at any time.
(string) --
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
available
creating
deleting
final-snapshot
hardware-failure
incompatible-hsm
incompatible-network
incompatible-parameters
incompatible-restore
modifying
rebooting
renaming
resizing
rotating-keys
storage-full
updating-hsm
"""
pass
def restore_table_from_cluster_snapshot(ClusterIdentifier=None, SnapshotIdentifier=None, SourceDatabaseName=None, SourceSchemaName=None, SourceTableName=None, TargetDatabaseName=None, TargetSchemaName=None, NewTableName=None):
"""
Creates a new table from a table in an Amazon Redshift cluster snapshot. You must create the new table within the Amazon Redshift cluster that the snapshot was taken from.
You cannot use RestoreTableFromClusterSnapshot to restore a table with the same name as an existing table in an Amazon Redshift cluster. That is, you cannot overwrite an existing table in a cluster with a restored table. If you want to replace your original table with a new, restored table, then rename or drop your original table before you call RestoreTableFromClusterSnapshot . When you have renamed your original table, then you can pass the original name of the table as the NewTableName parameter value in the call to RestoreTableFromClusterSnapshot . This way, you can replace the original table with the table created from the snapshot.
See also: AWS API Documentation
:example: response = client.restore_table_from_cluster_snapshot(
ClusterIdentifier='string',
SnapshotIdentifier='string',
SourceDatabaseName='string',
SourceSchemaName='string',
SourceTableName='string',
TargetDatabaseName='string',
TargetSchemaName='string',
NewTableName='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The identifier of the Amazon Redshift cluster to restore the table to.
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The identifier of the snapshot to restore the table from. This snapshot must have been created from the Amazon Redshift cluster specified by the ClusterIdentifier parameter.
:type SourceDatabaseName: string
:param SourceDatabaseName: [REQUIRED]
The name of the source database that contains the table to restore from.
:type SourceSchemaName: string
:param SourceSchemaName: The name of the source schema that contains the table to restore from. If you do not specify a SourceSchemaName value, the default is public .
:type SourceTableName: string
:param SourceTableName: [REQUIRED]
The name of the source table to restore from.
:type TargetDatabaseName: string
:param TargetDatabaseName: The name of the database to restore the table to.
:type TargetSchemaName: string
:param TargetSchemaName: The name of the schema to restore the table to.
:type NewTableName: string
:param NewTableName: [REQUIRED]
The name of the table to create as a result of the current request.
:rtype: dict
:return: {
'TableRestoreStatus': {
'TableRestoreRequestId': 'string',
'Status': 'PENDING'|'IN_PROGRESS'|'SUCCEEDED'|'FAILED'|'CANCELED',
'Message': 'string',
'RequestTime': datetime(2015, 1, 1),
'ProgressInMegaBytes': 123,
'TotalDataInMegaBytes': 123,
'ClusterIdentifier': 'string',
'SnapshotIdentifier': 'string',
'SourceDatabaseName': 'string',
'SourceSchemaName': 'string',
'SourceTableName': 'string',
'TargetDatabaseName': 'string',
'TargetSchemaName': 'string',
'NewTableName': 'string'
}
}
"""
pass
def revoke_cluster_security_group_ingress(ClusterSecurityGroupName=None, CIDRIP=None, EC2SecurityGroupName=None, EC2SecurityGroupOwnerId=None):
"""
Revokes an ingress rule in an Amazon Redshift security group for a previously authorized IP range or Amazon EC2 security group. To add an ingress rule, see AuthorizeClusterSecurityGroupIngress . For information about managing security groups, go to Amazon Redshift Cluster Security Groups in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.revoke_cluster_security_group_ingress(
ClusterSecurityGroupName='string',
CIDRIP='string',
EC2SecurityGroupName='string',
EC2SecurityGroupOwnerId='string'
)
:type ClusterSecurityGroupName: string
:param ClusterSecurityGroupName: [REQUIRED]
The name of the security Group from which to revoke the ingress rule.
:type CIDRIP: string
:param CIDRIP: The IP range for which to revoke access. This range must be a valid Classless Inter-Domain Routing (CIDR) block of IP addresses. If CIDRIP is specified, EC2SecurityGroupName and EC2SecurityGroupOwnerId cannot be provided.
:type EC2SecurityGroupName: string
:param EC2SecurityGroupName: The name of the EC2 Security Group whose access is to be revoked. If EC2SecurityGroupName is specified, EC2SecurityGroupOwnerId must also be provided and CIDRIP cannot be provided.
:type EC2SecurityGroupOwnerId: string
:param EC2SecurityGroupOwnerId: The AWS account number of the owner of the security group specified in the EC2SecurityGroupName parameter. The AWS access key ID is not an acceptable value. If EC2SecurityGroupOwnerId is specified, EC2SecurityGroupName must also be provided. and CIDRIP cannot be provided.
Example: 111122223333
:rtype: dict
:return: {
'ClusterSecurityGroup': {
'ClusterSecurityGroupName': 'string',
'Description': 'string',
'EC2SecurityGroups': [
{
'Status': 'string',
'EC2SecurityGroupName': 'string',
'EC2SecurityGroupOwnerId': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'IPRanges': [
{
'Status': 'string',
'CIDRIP': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
},
],
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
]
}
}
"""
pass
def revoke_snapshot_access(SnapshotIdentifier=None, SnapshotClusterIdentifier=None, AccountWithRestoreAccess=None):
"""
Removes the ability of the specified AWS customer account to restore the specified snapshot. If the account is currently restoring the snapshot, the restore will run to completion.
For more information about working with snapshots, go to Amazon Redshift Snapshots in the Amazon Redshift Cluster Management Guide .
See also: AWS API Documentation
:example: response = client.revoke_snapshot_access(
SnapshotIdentifier='string',
SnapshotClusterIdentifier='string',
AccountWithRestoreAccess='string'
)
:type SnapshotIdentifier: string
:param SnapshotIdentifier: [REQUIRED]
The identifier of the snapshot that the account can no longer access.
:type SnapshotClusterIdentifier: string
:param SnapshotClusterIdentifier: The identifier of the cluster the snapshot was created from. This parameter is required if your IAM user has a policy containing a snapshot resource element that specifies anything other than * for the cluster name.
:type AccountWithRestoreAccess: string
:param AccountWithRestoreAccess: [REQUIRED]
The identifier of the AWS customer account that can no longer restore the specified snapshot.
:rtype: dict
:return: {
'Snapshot': {
'SnapshotIdentifier': 'string',
'ClusterIdentifier': 'string',
'SnapshotCreateTime': datetime(2015, 1, 1),
'Status': 'string',
'Port': 123,
'AvailabilityZone': 'string',
'ClusterCreateTime': datetime(2015, 1, 1),
'MasterUsername': 'string',
'ClusterVersion': 'string',
'SnapshotType': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'DBName': 'string',
'VpcId': 'string',
'Encrypted': True|False,
'KmsKeyId': 'string',
'EncryptedWithHSM': True|False,
'AccountsWithRestoreAccess': [
{
'AccountId': 'string',
'AccountAlias': 'string'
},
],
'OwnerAccount': 'string',
'TotalBackupSizeInMegaBytes': 123.0,
'ActualIncrementalBackupSizeInMegaBytes': 123.0,
'BackupProgressInMegaBytes': 123.0,
'CurrentBackupRateInMegaBytesPerSecond': 123.0,
'EstimatedSecondsToCompletion': 123,
'ElapsedTimeInSeconds': 123,
'SourceRegion': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'RestorableNodeTypes': [
'string',
],
'EnhancedVpcRouting': True|False
}
}
:returns:
CreateClusterSnapshot and CopyClusterSnapshot returns status as "creating".
DescribeClusterSnapshots returns status as "creating", "available", "final snapshot", or "failed".
DeleteClusterSnapshot returns status as "deleted".
"""
pass
def rotate_encryption_key(ClusterIdentifier=None):
"""
Rotates the encryption keys for a cluster.
See also: AWS API Documentation
:example: response = client.rotate_encryption_key(
ClusterIdentifier='string'
)
:type ClusterIdentifier: string
:param ClusterIdentifier: [REQUIRED]
The unique identifier of the cluster that you want to rotate the encryption keys for.
Constraints: Must be the name of valid cluster that has encryption enabled.
:rtype: dict
:return: {
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': 'string',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
]
}
}
:returns:
in-sync : The parameter value is in sync with the database.
pending-reboot : The parameter value will be applied after the cluster reboots.
applying : The parameter value is being applied to the database.
invalid-parameter : Cannot apply the parameter value because it has an invalid value or syntax.
apply-deferred : The parameter contains static property changes. The changes are deferred until the cluster reboots.
apply-error : Cannot connect to the cluster. The parameter change will be applied after the cluster reboots.
unknown-error : Cannot apply the parameter change right now. The change will be applied after the cluster reboots.
"""
pass
| 43.449137
| 649
| 0.614168
| 23,779
| 236,624
| 6.098154
| 0.050423
| 0.020951
| 0.009613
| 0.005648
| 0.771647
| 0.724477
| 0.696424
| 0.681135
| 0.666363
| 0.644289
| 0
| 0.010229
| 0.318687
| 236,624
| 5,445
| 650
| 43.457117
| 0.889241
| 0.877324
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.522388
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c71593ad01c2038fcf56371da542a88a2e3c94c4
| 124
|
py
|
Python
|
totalupdate.py
|
manhof/test_isp_curl
|
eb5197f30cc72f0527ee66d988b49b5dba9131da
|
[
"MIT"
] | 1
|
2016-05-05T15:32:24.000Z
|
2016-05-05T15:32:24.000Z
|
totalupdate.py
|
manhof/test_isp_curl
|
eb5197f30cc72f0527ee66d988b49b5dba9131da
|
[
"MIT"
] | null | null | null |
totalupdate.py
|
manhof/test_isp_curl
|
eb5197f30cc72f0527ee66d988b49b5dba9131da
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import os
os.system("sudo /usr/local/bin/csv_copy.py")
os.system("/usr/local/bin/datbase_upload.py")
exit
| 20.666667
| 45
| 0.741935
| 23
| 124
| 3.913043
| 0.608696
| 0.177778
| 0.244444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056452
| 124
| 5
| 46
| 24.8
| 0.769231
| 0.129032
| 0
| 0
| 0
| 0
| 0.588785
| 0.542056
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c7328419e4affcbeb6d69f9b337e3f95f4c4f828
| 53
|
py
|
Python
|
WEEKS/wk17/CodeSignal-Solutions/Core_017_-_killKthBit.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/wk17/CodeSignal-Solutions/Core_017_-_killKthBit.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/wk17/CodeSignal-Solutions/Core_017_-_killKthBit.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
def killKthBit(n, k):
return n & ~(2 ** (k - 1))
| 17.666667
| 30
| 0.471698
| 9
| 53
| 2.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.283019
| 53
| 2
| 31
| 26.5
| 0.605263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
c735c8e67ca6b93112def52e1d0952ffd43b82dc
| 13,746
|
py
|
Python
|
BNGBS_DEMO/DEMO/classification.py
|
LiangjunFeng/BNGBS
|
c7e99263b87c3d739d48f8964b26213f8a0d96bf
|
[
"Apache-2.0"
] | 1
|
2022-03-16T12:00:41.000Z
|
2022-03-16T12:00:41.000Z
|
BNGBS_DEMO/DEMO/classification.py
|
LiangjunFeng/BNGBS
|
c7e99263b87c3d739d48f8964b26213f8a0d96bf
|
[
"Apache-2.0"
] | null | null | null |
BNGBS_DEMO/DEMO/classification.py
|
LiangjunFeng/BNGBS
|
c7e99263b87c3d739d48f8964b26213f8a0d96bf
|
[
"Apache-2.0"
] | 1
|
2020-03-27T15:49:16.000Z
|
2020-03-27T15:49:16.000Z
|
import numpy as np
from BNGBS_DEMO.Algorithm.BNGBS import GradientBoostingNet
import os
path = os.path.dirname(os.getcwd())
def show_accuracy(predictLabel,Label):
Label = np.ravel(Label).tolist()
predictLabel = predictLabel.tolist()
count = 0
for i in range(len(Label)):
if Label[i] == predictLabel[i]:
count += 1
return (round(count/len(Label),5))
print('/*===================BRE dataset=========================*/')
traindata = np.load(path+'/Data/BRE/BRE_traindata.npy')
trainlabel = np.load(path+'/Data/BRE/BRE_trainlabel.npy')
testdata = np.load(path+'/Data/BRE/BRE_testdata.npy')
testlabel = np.load(path+'/Data/BRE/BRE_testlabel.npy')
print("basic information of BRE dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 50,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================DIG dataset=========================*/')
traindata = np.load(path+'/Data/DIG/DIG_traindata.npy')
trainlabel = np.load(path+'/Data/DIG/DIG_trainlabel.npy')
testdata = np.load(path+'/Data/DIG/DIG_testdata.npy')
testlabel = np.load(path+'/Data/DIG/DIG_testlabel.npy')
print("basic information of DIG dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 50,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================YAL dataset=========================*/')
traindata = np.load(path+'/Data/YAL/YAL_traindata.npy')
trainlabel = np.load(path+'/Data/YAL/YAL_trainlabel.npy')
testdata = np.load(path+'/Data/YAL/YAL_testdata.npy')
testlabel = np.load(path+'/Data/YAL/YAL_testlabel.npy')
print("basic information of YAL dataset (Processed by PCA):")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 10,
enhencetimes = 10,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 100,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================MPF dataset=========================*/')
traindata = np.load(path+'/Data/MPF/MPF_traindata.npy')
trainlabel = np.load(path+'/Data/MPF/MPF_trainlabel.npy')
testdata = np.load(path+'/Data/MPF/MPF_testdata.npy')
testlabel = np.load(path+'/Data/MPF/MPF_testlabel.npy')
print("basic information of MPF dataset :")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 50,
reg = 1e-3)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================DIA dataset=========================*/')
traindata = np.load(path+'/Data/DIA/DIA_traindata.npy')
trainlabel = np.load(path+'/Data/DIA/DIA_trainlabel.npy')
testdata = np.load(path+'/Data/DIA/DIA_testdata.npy')
testlabel = np.load(path+'/Data/DIA/DIA_testlabel.npy')
print("basic information of DIA dataset (after removing seven text dims):")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 10,
enhencetimes = 10,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 100,
reg = 1e-3)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================SPF dataset=========================*/')
traindata = np.load(path+'/Data/SPF/SPF_traindata.npy')
trainlabel = np.load(path+'/Data/SPF/SPF_trainlabel.npy')
testdata = np.load(path+'/Data/SPF/SPF_testdata.npy')
testlabel = np.load(path+'/Data/SPF/SPF_testlabel.npy')
print("basic information of SPF dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 50,
enhencetimes = 50,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 20,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================USP dataset=========================*/')
traindata = np.load(path+'/Data/USP/USP_traindata.npy')
trainlabel = np.load(path+'/Data/USP/USP_trainlabel.npy')
testdata = np.load(path+'/Data/USP/USP_testdata.npy')
testlabel = np.load(path+'/Data/USP/USP_testlabel.npy')
print("basic information of USP dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'sigmoid',
batchsize = 50,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print('/*===================CAE dataset=========================*/')
traindata = np.load(path+'/Data/CAE/CAE_traindata.npy')
trainlabel = np.load(path+'/Data/CAE/CAE_trainlabel.npy')
testdata = np.load(path+'/Data/CAE/CAE_testdata.npy')
testlabel = np.load(path+'/Data/CAE/CAE_testlabel.npy')
print("basic information of CAE dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 50,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print()
print('/*===================HTR dataset=========================*/')
traindata = np.load(path+'/Data/HTR/HTR_traindata.npy')
trainlabel = np.load(path+'/Data/HTR/HTR_trainlabel.npy')
testdata = np.load(path+'/Data/HTR/HTR_testdata.npy')
testlabel = np.load(path+'/Data/HTR/HTR_testlabel.npy')
print("basic information of HTR dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 50,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS is {0}'.format(show_accuracy(predictlabel,testlabel)))
print()
print('/*===================MUS dataset=========================*/')
traindata = np.load(path+'/Data/MUS/MUS_traindata.npy')
trainlabel = np.load(path+'/Data/MUS/MUS_trainlabel.npy')
testdata = np.load(path+'/Data/MUS/MUS_testdata.npy')
testlabel = np.load(path+'/Data/MUS/MUS_testlabel.npy')
print("basic information of MUS dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 10,
enhencetimes = 10,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 100,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print()
print('/*===================CRS dataset=========================*/')
traindata = np.load(path+'/Data/CRS/CRS_traindata.npy')
trainlabel = np.load(path+'/Data/CRS/CRS_trainlabel.npy')
testdata = np.load(path+'/Data/CRS/CRS_testdata.npy')
testlabel = np.load(path+'/Data/CRS/CRS_testlabel.npy')
print("basic information of CRS dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes = 20,
enhencetimes = 20,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 50,
reg = 1)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS on testdata is {0}'.format(show_accuracy(predictlabel,testlabel)))
print()
print('/*===================BAS dataset=========================*/')
traindata = np.load(path+'/Data/BAS/BAS_traindata.npy')
trainlabel = np.load(path+'/Data/BAS/BAS_trainlabel.npy')
testdata = np.load(path+'/Data/BAS/BAS_testdata.npy')
testlabel = np.load(path+'/Data/BAS/BAS_testlabel.npy')
print("basic information of BAS dataset:")
print("the traindata shape : ", traindata.shape)
print("the testdata shape : ", testdata.shape)
print()
print("BNGBS begins training...")
print()
gbn = GradientBoostingNet(column_sampling = 1,
row_sampling = 1,
learning_rate = 0.1,
n_estimators = 10,
maptimes =50,
enhencetimes = 50,
map_function = 'tanh',
enhence_function = 'tanh',
batchsize = 20,
reg = 2)
gbn.fit(traindata,trainlabel)
predictlabel = gbn.predict(testdata)
print('the accuracy of BNGBS is {0}'.format(show_accuracy(predictlabel,testlabel)))
print()
| 40.075802
| 95
| 0.552233
| 1,432
| 13,746
| 5.207402
| 0.067737
| 0.038621
| 0.064369
| 0.090117
| 0.946225
| 0.946225
| 0.803004
| 0.634035
| 0.634035
| 0.633365
| 0
| 0.018087
| 0.292085
| 13,746
| 342
| 96
| 40.192982
| 0.748227
| 0
| 0
| 0.718033
| 0
| 0
| 0.27821
| 0.144925
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003279
| false
| 0
| 0.009836
| 0
| 0.016393
| 0.331148
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c74bc97690bdc6f4d7fe7ddf4c2528330a95cb5c
| 27
|
py
|
Python
|
propertyappraisal/appraisal/controllers.py
|
SriramSrinivas/Real-Estate-WebScraping
|
d91ca896d2c69fd6b03aed59f34a241b51cedfce
|
[
"MIT"
] | null | null | null |
propertyappraisal/appraisal/controllers.py
|
SriramSrinivas/Real-Estate-WebScraping
|
d91ca896d2c69fd6b03aed59f34a241b51cedfce
|
[
"MIT"
] | null | null | null |
propertyappraisal/appraisal/controllers.py
|
SriramSrinivas/Real-Estate-WebScraping
|
d91ca896d2c69fd6b03aed59f34a241b51cedfce
|
[
"MIT"
] | 1
|
2018-12-06T16:31:26.000Z
|
2018-12-06T16:31:26.000Z
|
def test(request):
pass
| 13.5
| 18
| 0.666667
| 4
| 27
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.222222
| 27
| 2
| 19
| 13.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
c75819373d911e0848b81d5627011e7ac3ccdd37
| 38
|
py
|
Python
|
maml_trpo/meta_learner/__init__.py
|
adityabingi/maml-trpo-metaworld
|
9247018e72563a5cbf3df9ce7c384aef9812d18b
|
[
"MIT"
] | null | null | null |
maml_trpo/meta_learner/__init__.py
|
adityabingi/maml-trpo-metaworld
|
9247018e72563a5cbf3df9ce7c384aef9812d18b
|
[
"MIT"
] | null | null | null |
maml_trpo/meta_learner/__init__.py
|
adityabingi/maml-trpo-metaworld
|
9247018e72563a5cbf3df9ce7c384aef9812d18b
|
[
"MIT"
] | null | null | null |
from .meta_learner import MetaLearner
| 19
| 37
| 0.868421
| 5
| 38
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c7ad7620218ba89f6ecb031c249ce0f46f03a6e4
| 31
|
py
|
Python
|
loo/__main__.py
|
LooMan/looman-cli
|
7f8d0498b2e070a8d226de421744f247e249334c
|
[
"MIT"
] | null | null | null |
loo/__main__.py
|
LooMan/looman-cli
|
7f8d0498b2e070a8d226de421744f247e249334c
|
[
"MIT"
] | null | null | null |
loo/__main__.py
|
LooMan/looman-cli
|
7f8d0498b2e070a8d226de421744f247e249334c
|
[
"MIT"
] | null | null | null |
from loo.cli import main
main()
| 15.5
| 24
| 0.774194
| 6
| 31
| 4
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 2
| 25
| 15.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
c7ef303f44c6eea95784e590390c569419d0dc6c
| 207
|
py
|
Python
|
src/schemas/member.py
|
jabuckle26/library-api
|
f52970585959be202b85a26e419fc994c273f21e
|
[
"MIT"
] | null | null | null |
src/schemas/member.py
|
jabuckle26/library-api
|
f52970585959be202b85a26e419fc994c273f21e
|
[
"MIT"
] | null | null | null |
src/schemas/member.py
|
jabuckle26/library-api
|
f52970585959be202b85a26e419fc994c273f21e
|
[
"MIT"
] | null | null | null |
from pydantic import BaseModel
class Member(BaseModel):
id: int
first_name: str
last_name: str
email: str
class MemberIn(BaseModel):
first_name: str
last_name: str
email: str
| 13.8
| 30
| 0.676329
| 28
| 207
| 4.857143
| 0.5
| 0.205882
| 0.176471
| 0.235294
| 0.455882
| 0.455882
| 0.455882
| 0.455882
| 0
| 0
| 0
| 0
| 0.26087
| 207
| 14
| 31
| 14.785714
| 0.888889
| 0
| 0
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
1befe08a00e6dadaad31f624b08afa21913dd20b
| 4,889
|
py
|
Python
|
tests/test_routes.py
|
Stock-Stalker/robinhood
|
3861b00faad165230b68e72f9712f82714c779de
|
[
"MIT"
] | null | null | null |
tests/test_routes.py
|
Stock-Stalker/robinhood
|
3861b00faad165230b68e72f9712f82714c779de
|
[
"MIT"
] | null | null | null |
tests/test_routes.py
|
Stock-Stalker/robinhood
|
3861b00faad165230b68e72f9712f82714c779de
|
[
"MIT"
] | null | null | null |
"""Test robinhood/routes.py functions."""
import json
import unittest
from app import app
class TestRoutes(unittest.TestCase):
"""Routes test class."""
def test_search_stocks(self):
"""Test search stocks route."""
query = "AAPL"
res = app.test_client().get("/robinhood/search/{0}".format(query))
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_current_price(self):
"""Test get current price route."""
symbol = "AAPL"
res = app.test_client().get("/robinhood/{0}/price".format(symbol))
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_current_price_exceeded_symbol(self):
"""Test get current price route with exceeded length symbol."""
symbol = "AAPL-----"
res = app.test_client().get("/robinhood/{0}/price".format(symbol))
self.assertEqual(res.status_code, 400)
def test_get_company_name(self):
"""Test get company name route."""
symbol = "AAPL"
expected_company_name = "Apple"
res = app.test_client().get("/robinhood/{0}/name".format(symbol))
expected_json = {"companyName": expected_company_name}
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertEqual(expected_json, result_json)
self.assertEqual(res.status_code, 200)
def test_get_company_name_exceeded_symbol(self):
"""Test get company name route with exceeded length symbol."""
symbol = "AAPL-----"
res = app.test_client().get("/robinhood/{0}/name".format(symbol))
self.assertEqual(res.status_code, 400)
def test_get_historical_day(self):
"""Test get historical route for day."""
symbol = "AAPL"
span = "day"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_historical_week(self):
"""Test get historical route for week."""
symbol = "AAPL"
span = "week"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_historical_3month(self):
"""Test get historical route for 3month."""
symbol = "AAPL"
span = "3month"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_historical_year(self):
"""Test get historical route for year."""
symbol = "AAPL"
span = "year"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_historical_5year(self):
"""Test get historical route for 5year."""
symbol = "AAPL"
span = "5year"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
result_json = json.loads(res.get_data().decode("utf-8"))
self.assertIsInstance(result_json, dict)
self.assertEqual(res.status_code, 200)
def test_get_historical_invalid_span(self):
"""Test get historical route for invalid span."""
symbol = "AAPL"
span = "decade"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
self.assertEqual(res.status_code, 400)
def test_get_historical_exceeded_symbol(self):
"""Test get historical route with exceeded length symbol."""
symbol = "AAPL-----"
span = "day"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
self.assertEqual(res.status_code, 400)
def test_get_historical_exceeded_span(self):
"""Test get historical route with exceeded length span."""
symbol = "AAPL"
span = "----------"
res = app.test_client().get(
"/robinhood/{0}/historical/{1}".format(symbol, span)
)
self.assertEqual(res.status_code, 400)
| 31.541935
| 74
| 0.609736
| 585
| 4,889
| 4.923077
| 0.102564
| 0.058333
| 0.094444
| 0.072222
| 0.837847
| 0.818403
| 0.720486
| 0.707292
| 0.676736
| 0.6625
| 0
| 0.020098
| 0.246881
| 4,889
| 154
| 75
| 31.746753
| 0.762086
| 0.12027
| 0
| 0.632653
| 0
| 0
| 0.11716
| 0.059882
| 0
| 0
| 0
| 0
| 0.214286
| 1
| 0.132653
| false
| 0
| 0.030612
| 0
| 0.173469
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
40023ba518c2613852f02235802f4a8e87b38715
| 1,565
|
py
|
Python
|
cogs/other.py
|
hyarsan/mewtwo-bot
|
26bd66d524c004e26e228e013e51092e1c0b10d3
|
[
"MIT"
] | null | null | null |
cogs/other.py
|
hyarsan/mewtwo-bot
|
26bd66d524c004e26e228e013e51092e1c0b10d3
|
[
"MIT"
] | null | null | null |
cogs/other.py
|
hyarsan/mewtwo-bot
|
26bd66d524c004e26e228e013e51092e1c0b10d3
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
import mewtwo_config as config
botver = "Mewtwo v2.0"
class Other(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.cooldown(1, 60, commands.BucketType.user)
async def bug(self, ctx, *, arg):
dev = self.bot.get_user(config.owner)
embed = discord.Embed(title="⚠ Bug Report", description=arg, color=0xff0000)
embed.set_footer(text="Submitted by " + ctx.author.name + "#" + ctx.author.discriminator + " - " + botver + " by sks316#2523", icon_url=ctx.author.avatar_url)
await dev.send(embed=embed)
await ctx.send("✅ Successfully sent in your bug report! Thank you for helping to make Mewtwo better. \nIf you want to provide further details, send a DM to **" + dev.name + "#" + dev.discriminator + "**.")
@commands.command()
@commands.cooldown(1, 60, commands.BucketType.user)
async def suggest(self, ctx, *, arg):
dev = self.bot.get_user(config.owner)
embed = discord.Embed(title="💭 Suggestion", description=arg, color=0x7289da)
embed.set_footer(text="Submitted by " + ctx.author.name + "#" + ctx.author.discriminator + " - " + botver + " by sks316#2523", icon_url=ctx.author.avatar_url)
await dev.send(embed=embed)
await ctx.send("✅ Successfully sent in your suggestion! Thank you for helping to make Mewtwo better. \nIf you want to provide further details, send a DM to **" + dev.name + "#" + dev.discriminator + "**.")
def setup(bot):
bot.add_cog(Other(bot))
| 52.166667
| 213
| 0.66901
| 219
| 1,565
| 4.73516
| 0.356164
| 0.052073
| 0.044359
| 0.059788
| 0.750241
| 0.750241
| 0.750241
| 0.750241
| 0.750241
| 0.750241
| 0
| 0.025357
| 0.19361
| 1,565
| 30
| 214
| 52.166667
| 0.793185
| 0
| 0
| 0.4
| 0
| 0.08
| 0.249681
| 0
| 0
| 0
| 0.010217
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.12
| 0
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
402b90d998cab9172bd8bb62c82f53d972809363
| 37
|
py
|
Python
|
tests/__init__.py
|
ninadpage/contact-book-python
|
5247a7d13df0c952da59fb3c0875ab4002a3fc14
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
ninadpage/contact-book-python
|
5247a7d13df0c952da59fb3c0875ab4002a3fc14
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
ninadpage/contact-book-python
|
5247a7d13df0c952da59fb3c0875ab4002a3fc14
|
[
"MIT"
] | null | null | null |
# encoding=utf-8
# Author: ninadpage
| 12.333333
| 19
| 0.72973
| 5
| 37
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.135135
| 37
| 2
| 20
| 18.5
| 0.8125
| 0.864865
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4038b696d9b65e9e0838cbf5a7bf7b8728f0bb62
| 166
|
py
|
Python
|
pype/modules/standalonepublish/__init__.py
|
Yowza-Animation/pype
|
0212fa8357e6ffd490230193e69e101aaf262587
|
[
"MIT"
] | null | null | null |
pype/modules/standalonepublish/__init__.py
|
Yowza-Animation/pype
|
0212fa8357e6ffd490230193e69e101aaf262587
|
[
"MIT"
] | null | null | null |
pype/modules/standalonepublish/__init__.py
|
Yowza-Animation/pype
|
0212fa8357e6ffd490230193e69e101aaf262587
|
[
"MIT"
] | null | null | null |
from .standalonepublish_module import StandAlonePublishModule
def tray_init(tray_widget, main_widget):
return StandAlonePublishModule(main_widget, tray_widget)
| 27.666667
| 61
| 0.855422
| 18
| 166
| 7.555556
| 0.611111
| 0.147059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096386
| 166
| 5
| 62
| 33.2
| 0.906667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
405b39ddf3f3e45acbd329709f2d7bcf1d564af2
| 99
|
py
|
Python
|
Scripts/commons.py
|
Ninja91/DnCensus
|
98f45d63ef739fd0d61428ed1c4581593443c6db
|
[
"MIT"
] | 1
|
2020-08-19T00:13:45.000Z
|
2020-08-19T00:13:45.000Z
|
Scripts/commons.py
|
Ninja91/DnCensus
|
98f45d63ef739fd0d61428ed1c4581593443c6db
|
[
"MIT"
] | 1
|
2015-10-14T02:25:37.000Z
|
2015-10-14T02:25:37.000Z
|
Scripts/commons.py
|
Ninja91/DnCensus
|
98f45d63ef739fd0d61428ed1c4581593443c6db
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
__author__ = 'Nitin'
def count_iterable(i):
return sum(1 for e in i)
| 14.142857
| 28
| 0.626263
| 16
| 99
| 3.5625
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 0.242424
| 99
| 7
| 28
| 14.142857
| 0.746667
| 0.161616
| 0
| 0
| 0
| 0
| 0.064935
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
4065dbfb8854cf8dfcfe92b95e9644c996956434
| 2,220
|
py
|
Python
|
continuum/datasets/__init__.py
|
arthurdouillard/continuum
|
83e7437944486cabd5e4c149d41816dbef49b7bf
|
[
"MIT"
] | 4
|
2020-04-15T14:31:42.000Z
|
2020-04-24T17:07:34.000Z
|
continuum/datasets/__init__.py
|
arthurdouillard/continuum
|
83e7437944486cabd5e4c149d41816dbef49b7bf
|
[
"MIT"
] | 18
|
2020-04-15T14:57:27.000Z
|
2020-05-02T14:05:36.000Z
|
continuum/datasets/__init__.py
|
arthurdouillard/continuum
|
83e7437944486cabd5e4c149d41816dbef49b7bf
|
[
"MIT"
] | 1
|
2020-04-15T15:50:28.000Z
|
2020-04-15T15:50:28.000Z
|
# pylint: disable=C0401
# flake8: noqa
from continuum.datasets.base import (
ImageFolderDataset, InMemoryDataset, PyTorchDataset, _ContinuumDataset, H5Dataset, _AudioDataset
)
from continuum.datasets.cifar100 import CIFAR100
from continuum.datasets.core50 import (Core50, Core50v2_79, Core50v2_196, Core50v2_391)
from continuum.datasets.fellowship import (CIFARFellowship, Fellowship, MNISTFellowship)
from continuum.datasets.imagenet import ImageNet100, ImageNet1000, TinyImageNet200
from continuum.datasets.synbols import Synbols
from continuum.datasets.nlp import MultiNLI
from continuum.datasets.pytorch import (
CIFAR10, EMNIST, KMNIST, MNIST, QMNIST, FashionMNIST
)
from continuum.datasets.svhn import SVHN
from continuum.datasets.colored_mnist import ColoredMNIST
from continuum.datasets.rainbow_mnist import RainbowMNIST
from continuum.datasets.cub200 import CUB200
from continuum.datasets.awa2 import AwA2
from continuum.datasets.pascalvoc import PascalVOC2012
from continuum.datasets.stream51 import Stream51
from continuum.datasets.dtd import DTD
from continuum.datasets.vlcs import VLCS
from continuum.datasets.pacs import PACS
from continuum.datasets.domain_net import DomainNet
from continuum.datasets.office_home import OfficeHome
from continuum.datasets.terra_incognita import TerraIncognita
from continuum.datasets.domain_net import DomainNet
from continuum.datasets.rainbow_mnist import RainbowMNIST
from continuum.datasets.car196 import Car196
from continuum.datasets.caltech import Caltech101, Caltech256
from continuum.datasets.fgvc_aircraft import FGVCAircraft
from continuum.datasets.stl10 import STL10
from continuum.datasets.food101 import Food101
from continuum.datasets.omniglot import Omniglot
from continuum.datasets.birdsnap import Birdsnap
from continuum.datasets.ctrl import CTRL, CTRLplus, CTRLminus, CTRLin, CTRLout, CTRLplastic
from continuum.datasets.flowers102 import OxfordFlower102
from continuum.datasets.oxford_pet import OxfordPet
from continuum.datasets.gtsrb import GTSRB
from continuum.datasets.sun397 import SUN397
from continuum.datasets.fer2013 import FER2013
from continuum.datasets.eurosat import EuroSAT
from continuum.datasets.fluentspeech import FluentSpeech
| 49.333333
| 100
| 0.862613
| 268
| 2,220
| 7.093284
| 0.328358
| 0.259863
| 0.419779
| 0.029458
| 0.13414
| 0.13414
| 0.13414
| 0.13414
| 0.13414
| 0.13414
| 0
| 0.050814
| 0.086937
| 2,220
| 44
| 101
| 50.454545
| 0.887025
| 0.015315
| 0
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.904762
| 0
| 0.904762
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dc129d0a326c32f583c5667f40185b5afb60f6b2
| 153
|
py
|
Python
|
SQLiteWaterUsage/cython-setup.py
|
Ivanov1ch/Building-Energy-Bootcamp-2019
|
3b438b9fa4595fe437086db56987660850799916
|
[
"MIT"
] | null | null | null |
SQLiteWaterUsage/cython-setup.py
|
Ivanov1ch/Building-Energy-Bootcamp-2019
|
3b438b9fa4595fe437086db56987660850799916
|
[
"MIT"
] | null | null | null |
SQLiteWaterUsage/cython-setup.py
|
Ivanov1ch/Building-Energy-Bootcamp-2019
|
3b438b9fa4595fe437086db56987660850799916
|
[
"MIT"
] | null | null | null |
import os
from distutils.core import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize(os.path.join('SQLiteWaterUsage', 'main.py')))
| 25.5
| 73
| 0.803922
| 22
| 153
| 5.545455
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084967
| 153
| 5
| 74
| 30.6
| 0.871429
| 0
| 0
| 0
| 0
| 0
| 0.150327
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dc18c0c97918e212e271787e5e8ae6858b4f5ef9
| 44
|
py
|
Python
|
pydomo/utilities/__init__.py
|
bradleyhurley/domo-python-sdk
|
14a1b5367ad4ae8f2a4f05e46211ae1a665e31ef
|
[
"MIT"
] | 81
|
2017-04-21T20:49:01.000Z
|
2022-03-29T20:38:36.000Z
|
pydomo/utilities/__init__.py
|
bradleyhurley/domo-python-sdk
|
14a1b5367ad4ae8f2a4f05e46211ae1a665e31ef
|
[
"MIT"
] | 57
|
2017-05-11T15:55:00.000Z
|
2022-02-18T00:20:45.000Z
|
pydomo/utilities/__init__.py
|
bradleyhurley/domo-python-sdk
|
14a1b5367ad4ae8f2a4f05e46211ae1a665e31ef
|
[
"MIT"
] | 66
|
2017-05-31T14:39:48.000Z
|
2022-03-25T22:06:18.000Z
|
from .UtilitiesClient import UtilitiesClient
| 44
| 44
| 0.909091
| 4
| 44
| 10
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 44
| 1
| 44
| 44
| 0.97561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
905656b816627a1c8f038cdf39f9d0a5cba5a720
| 23
|
py
|
Python
|
hshsh.py
|
wang130520/Git_student
|
db3c4fb990a7a7e648aadfcc6e53b0a3464d54b6
|
[
"MIT"
] | null | null | null |
hshsh.py
|
wang130520/Git_student
|
db3c4fb990a7a7e648aadfcc6e53b0a3464d54b6
|
[
"MIT"
] | null | null | null |
hshsh.py
|
wang130520/Git_student
|
db3c4fb990a7a7e648aadfcc6e53b0a3464d54b6
|
[
"MIT"
] | null | null | null |
print("hello world1!")
| 11.5
| 22
| 0.695652
| 3
| 23
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.086957
| 23
| 1
| 23
| 23
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0.565217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
907b6128771f58d50460f21f137fd871c5d2eb30
| 164
|
py
|
Python
|
src/nodeconductor_paas_oracle/__init__.py
|
opennode/nodeconductor-paas-oracle
|
799c05438265da0b328bcaa425af01e9576f57fe
|
[
"MIT"
] | null | null | null |
src/nodeconductor_paas_oracle/__init__.py
|
opennode/nodeconductor-paas-oracle
|
799c05438265da0b328bcaa425af01e9576f57fe
|
[
"MIT"
] | null | null | null |
src/nodeconductor_paas_oracle/__init__.py
|
opennode/nodeconductor-paas-oracle
|
799c05438265da0b328bcaa425af01e9576f57fe
|
[
"MIT"
] | null | null | null |
from nodeconductor import _get_version
__version__ = _get_version('nodeconductor_paas_oracle')
default_app_config = 'nodeconductor_paas_oracle.apps.OracleConfig'
| 27.333333
| 66
| 0.865854
| 19
| 164
| 6.736842
| 0.631579
| 0.15625
| 0.359375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 164
| 5
| 67
| 32.8
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0.414634
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
907dad99b045ee4bf40b3703e0aa4ce567f31bb2
| 95
|
py
|
Python
|
discord_ritoman/bot/bot.py
|
stephend017/Discord-RitoMan
|
18bea140af8a552e1cc12557977f73b2434218e4
|
[
"MIT"
] | null | null | null |
discord_ritoman/bot/bot.py
|
stephend017/Discord-RitoMan
|
18bea140af8a552e1cc12557977f73b2434218e4
|
[
"MIT"
] | 26
|
2020-11-18T05:09:34.000Z
|
2022-01-05T19:09:30.000Z
|
discord_ritoman/bot/bot.py
|
stephend017/Discord-RitoMan
|
18bea140af8a552e1cc12557977f73b2434218e4
|
[
"MIT"
] | 1
|
2020-11-28T02:47:29.000Z
|
2020-11-28T02:47:29.000Z
|
from discord.ext import commands
bot = commands.Bot(command_prefix="<@!779328785043554334> ")
| 23.75
| 60
| 0.778947
| 11
| 95
| 6.636364
| 0.818182
| 0.30137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209302
| 0.094737
| 95
| 3
| 61
| 31.666667
| 0.639535
| 0
| 0
| 0
| 0
| 0
| 0.242105
| 0.231579
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
9085d3940389251325ff39608d5c0fd48bf227ed
| 4,200
|
py
|
Python
|
tests/test_account.py
|
dmitriyVasilievich1986/sudoku-server
|
767059998ccc2493424a7da39dfb9ac4284ae8a4
|
[
"MIT"
] | null | null | null |
tests/test_account.py
|
dmitriyVasilievich1986/sudoku-server
|
767059998ccc2493424a7da39dfb9ac4284ae8a4
|
[
"MIT"
] | null | null | null |
tests/test_account.py
|
dmitriyVasilievich1986/sudoku-server
|
767059998ccc2493424a7da39dfb9ac4284ae8a4
|
[
"MIT"
] | null | null | null |
from rest_framework.test import APIClient
from account.models import Account
from django.test import TestCase
import json
API_URL = "http://localhost/api/account/"
TEST_CREATE_USERNAME = "root"
TEST_USER_USERNAME = "test"
class AccountTestModel(TestCase):
def setUp(self):
self.client = APIClient()
obj: Account = Account.create(
username=TEST_USER_USERNAME, password=TEST_USER_USERNAME)
obj.save()
self.token = f"token {obj.token}"
self._id = obj.id
def test_api_create(self):
data = {"username": TEST_CREATE_USERNAME,
"password": TEST_CREATE_USERNAME}
response = self.client.post(API_URL, data=data)
self.assertEqual(response.status_code, 201)
def test_api_create_fail(self):
data = {"username": TEST_USER_USERNAME, "password": TEST_USER_USERNAME}
response = self.client.post(API_URL, data=data)
self.assertEqual(response.status_code, 400)
response = self.client.post(
API_URL, data={"username": TEST_CREATE_USERNAME})
self.assertEqual(response.status_code, 400)
response = self.client.post(
API_URL, data={"password": TEST_CREATE_USERNAME})
self.assertEqual(response.status_code, 400)
response = self.client.post(API_URL)
self.assertEqual(response.status_code, 400)
def test_api_get(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(API_URL)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json().get("username"), TEST_USER_USERNAME)
def test_api_get_fail(self):
self.client.credentials(HTTP_AUTHORIZATION="token wrong token")
response = self.client.get(API_URL)
self.assertEqual(response.status_code, 403)
response = self.client.get(f"{API_URL}{self._id}/")
self.assertEqual(response.status_code, 405)
def test_api_update(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.patch(
f"{API_URL}{self._id}/", data=json.dumps({"name": "name"}), content_type="application/json")
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json()["name"], "name")
response = self.client.patch(
f"{API_URL}{self._id}/", data=json.dumps({"surname": "surname"}), content_type="application/json")
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json()["surname"], "surname")
response = self.client.patch(
f"{API_URL}{self._id}/", data=json.dumps({"dificulty": 10}), content_type="application/json")
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json().get("dificulty"), 10)
response = self.client.patch(
f"{API_URL}{self._id}/", data=json.dumps({"help": False}), content_type="application/json")
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json().get("help"), False)
def test_api_login(self):
data = json.dumps({"username": TEST_USER_USERNAME,
"password": TEST_USER_USERNAME})
response = self.client.post(
f"{API_URL}login/", data=data, content_type="application/json")
self.assertEqual(response.status_code, 200)
self.token = f"token {response.json().get('token')}"
def test_api_login_fail(self):
data = json.dumps({"username": TEST_CREATE_USERNAME,
"password": TEST_CREATE_USERNAME})
response = self.client.post(
f"{API_URL}login/", data=data, content_type="application/json")
self.assertEqual(response.status_code, 404)
def test_api_logout(self):
self.client.credentials(HTTP_AUTHORIZATION=self.token)
response = self.client.get(f"{API_URL}logout/")
self.assertEqual(response.status_code, 204)
def test_api_logout_fail(self):
self.client.credentials(HTTP_AUTHORIZATION="token wrong token")
response = self.client.get(f"{API_URL}logout/")
self.assertEqual(response.status_code, 403)
| 43.75
| 110
| 0.664286
| 511
| 4,200
| 5.258317
| 0.123288
| 0.081876
| 0.179754
| 0.172683
| 0.772609
| 0.743952
| 0.726833
| 0.698921
| 0.681057
| 0.681057
| 0
| 0.015588
| 0.205714
| 4,200
| 95
| 111
| 44.210526
| 0.789868
| 0
| 0
| 0.414634
| 0
| 0
| 0.128571
| 0.007143
| 0
| 0
| 0
| 0
| 0.256098
| 1
| 0.121951
| false
| 0.073171
| 0.04878
| 0
| 0.182927
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.