hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
87a90edb077ff81f869992bfed058d4bd67673d1
| 27
|
py
|
Python
|
utils/__init__.py
|
leoHeidel/dog-face-dataset
|
a70013b43abb7060cccdc0e07883e7dc7052ceeb
|
[
"MIT"
] | 1
|
2020-12-09T16:38:09.000Z
|
2020-12-09T16:38:09.000Z
|
utils/__init__.py
|
leoHeidel/dog-face-dataset
|
a70013b43abb7060cccdc0e07883e7dc7052ceeb
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
leoHeidel/dog-face-dataset
|
a70013b43abb7060cccdc0e07883e7dc7052ceeb
|
[
"MIT"
] | null | null | null |
from .image_utils import *
| 13.5
| 26
| 0.777778
| 4
| 27
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87c0b3a4dd28be2c7e1b934d6b635e62ffdf59ae
| 70
|
py
|
Python
|
axelerate/__init__.py
|
chuangzhu/aXeleRate
|
af3d45b6d44ac5815ae9e022fc49b28db855de4b
|
[
"MIT"
] | null | null | null |
axelerate/__init__.py
|
chuangzhu/aXeleRate
|
af3d45b6d44ac5815ae9e022fc49b28db855de4b
|
[
"MIT"
] | null | null | null |
axelerate/__init__.py
|
chuangzhu/aXeleRate
|
af3d45b6d44ac5815ae9e022fc49b28db855de4b
|
[
"MIT"
] | null | null | null |
from .train import setup_training
from .infer import setup_inference
| 17.5
| 34
| 0.842857
| 10
| 70
| 5.7
| 0.7
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128571
| 70
| 3
| 35
| 23.333333
| 0.934426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
87e9cf919057a238e5363ad03d013584c1530152
| 20,867
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_network.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Dell EMC OpenManage Ansible Modules
# Version 3.0.0
# Copyright (C) 2018-2021 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_network
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock
from io import StringIO
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from pytest import importorskip
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
class TestConfigNetwork(FakeAnsibleModule):
module = idrac_network
@pytest.fixture
def idrac_configure_network_mock(self):
omsdk_mock = MagicMock()
idrac_obj = MagicMock()
omsdk_mock.file_share_manager = idrac_obj
omsdk_mock.config_mgr = idrac_obj
type(idrac_obj).create_share_obj = Mock(return_value="networkstatus")
type(idrac_obj).set_liason_share = Mock(return_value="networkstatus")
return idrac_obj
@pytest.fixture
def idrac_file_manager_config_networking_mock(self, mocker):
try:
file_manager_obj = mocker.patch(
MODULE_PATH + 'idrac_network.file_share_manager')
except AttributeError:
file_manager_obj = MagicMock()
obj = MagicMock()
file_manager_obj.create_share_obj.return_value = obj
return file_manager_obj
@pytest.fixture
def idrac_connection_configure_network_mock(self, mocker, idrac_configure_network_mock):
idrac_conn_class_mock = mocker.patch(MODULE_PATH +
'idrac_network.iDRACConnection',
return_value=idrac_configure_network_mock)
idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_network_mock
return idrac_configure_network_mock
def test_main_idrac_configure_network_success_case(self, idrac_connection_configure_network_mock, mocker,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename"})
message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
mocker.patch(MODULE_PATH +
'idrac_network.run_idrac_network_config', return_value=message)
result = self._run_module(idrac_default_args)
assert result == {'msg': 'Successfully configured the idrac network settings.',
'network_status': {
'changed': False,
'msg': {'Status': 'Success', 'message': 'No changes found to commit!'}},
'changed': False, 'failed': False}
def test_run_idrac_network_config_success_case01(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": True, "message": "changes are applicable"}
idrac_connection_configure_network_mock.config_mgr.is_change_applicable.return_value = message
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'changes_applicable': True, 'message': 'changes are applicable'}
def test_run_idrac_network_config_success_case02(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Status': 'Success',
'changed': True,
'changes_applicable': True,
'message': 'changes found to commit!'}
def test_run_idrac_network_config_success_case03(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes found to commit!',
'Status': 'Success',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_success_case04(self, idrac_connection_configure_network_mock,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes were applied',
'Status': 'Success',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_success_case05(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": None,
"dns_idrac_name": None, "auto_config": None, "static_dns": None,
"setup_idrac_nic_vlan": None, "vlan_id": None, "vlan_priority": None,
"enable_nic": None, "nic_selection": None,
"failover_network": None, "auto_detect": None, "auto_negotiation": None,
"network_speed": None, "duplex_mode": None, "nic_mtu": None,
"enable_dhcp": None, "ip_address": None, "enable_ipv4": None,
"dns_from_dhcp": None, "static_dns_1": None, "static_dns_2": None,
"static_gateway": None, "static_net_mask": None})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.configure_dns.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_nic_vlan.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_network_settings.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_ipv4.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_static_ipv4.return_value = message
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes were applied',
'Status': 'Success',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_failed_case01(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
idrac_connection_configure_network_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
idrac_connection_configure_network_mock.config_mgr.set_liason_share.return_value = message
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
result = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert result == idrac_connection_configure_network_mock.config_mgr.is_change_applicable()
def test_run_idrac_network_config_failed_case02(self, idrac_connection_configure_network_mock,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "failed"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes were applied',
'Status': 'failed',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_failed_case03(self, idrac_connection_configure_network_mock,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "sharepassword", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "100.100.102.114", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
idrac_connection_configure_network_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
idrac_connection_configure_network_mock.config_mgr.set_liason_share.return_value = message
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == idrac_connection_configure_network_mock.config_mgr.is_change_applicable()
@pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
ImportError, ValueError, TypeError, HTTPError, URLError])
def test_main_idrac_configure_network_exception_handling_case(self, exc_type, mocker, idrac_default_args,
idrac_connection_configure_network_mock,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename"})
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(
MODULE_PATH + 'idrac_network.run_idrac_network_config',
side_effect=exc_type('test'))
else:
mocker.patch(
MODULE_PATH + 'idrac_network.run_idrac_network_config',
side_effect=exc_type('http://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
| 73.996454
| 119
| 0.600853
| 2,098
| 20,867
| 5.555291
| 0.110582
| 0.060403
| 0.070356
| 0.095753
| 0.817417
| 0.802746
| 0.768597
| 0.746203
| 0.720206
| 0.720206
| 0
| 0.012211
| 0.293574
| 20,867
| 281
| 120
| 74.259786
| 0.778441
| 0.011406
| 0
| 0.603175
| 0
| 0
| 0.277387
| 0.019301
| 0
| 0
| 0
| 0
| 0.043651
| 1
| 0.051587
| false
| 0.031746
| 0.055556
| 0
| 0.126984
| 0.003968
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
3564142adb9c9709547799f4912aee51b2096e6d
| 236
|
py
|
Python
|
app/views.py
|
ykatzir/qapp-gsuite-react
|
a2935b27049372d65f7c995fef14642685306a0e
|
[
"Apache-2.0"
] | null | null | null |
app/views.py
|
ykatzir/qapp-gsuite-react
|
a2935b27049372d65f7c995fef14642685306a0e
|
[
"Apache-2.0"
] | null | null | null |
app/views.py
|
ykatzir/qapp-gsuite-react
|
a2935b27049372d65f7c995fef14642685306a0e
|
[
"Apache-2.0"
] | null | null | null |
from flask import render_template
from app import app
@app.route('/')
@app.route('/index')
def index():
return render_template('config.html')
@app.route('/dashboard')
def dashboard():
return render_template('dashboard.html')
| 18.153846
| 44
| 0.716102
| 31
| 236
| 5.354839
| 0.419355
| 0.253012
| 0.240964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127119
| 236
| 12
| 45
| 19.666667
| 0.805825
| 0
| 0
| 0
| 0
| 0
| 0.177966
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| true
| 0
| 0.222222
| 0.222222
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
3567222aaafd72769e8a3611b5534ddaea1529ea
| 39
|
py
|
Python
|
main/online/permissions.py
|
MahanBi/Back-End
|
5074ac1d341ad2addd1750e4aea2e6800be2bfef
|
[
"MIT"
] | 1
|
2017-04-02T17:14:04.000Z
|
2017-04-02T17:14:04.000Z
|
ifttt/permissions.py
|
Verbozeteam/web
|
2aecd67ec823e9d6ac243d6f8a71849dd0f9ed9d
|
[
"MIT"
] | 26
|
2020-06-12T14:36:59.000Z
|
2020-07-10T08:39:53.000Z
|
ifttt/permissions.py
|
Verbozeteam/web
|
2aecd67ec823e9d6ac243d6f8a71849dd0f9ed9d
|
[
"MIT"
] | 2
|
2015-08-14T13:31:30.000Z
|
2015-09-13T10:07:49.000Z
|
from rest_framework import permissions
| 19.5
| 38
| 0.897436
| 5
| 39
| 6.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3574dcbb9d3462310499245056c754c10db0ca16
| 27
|
py
|
Python
|
ipodcopier/__init__.py
|
kristjano/ipodcopier
|
e63665aa28704abbec00d0ec83fbd7279d1afabb
|
[
"MIT"
] | null | null | null |
ipodcopier/__init__.py
|
kristjano/ipodcopier
|
e63665aa28704abbec00d0ec83fbd7279d1afabb
|
[
"MIT"
] | null | null | null |
ipodcopier/__init__.py
|
kristjano/ipodcopier
|
e63665aa28704abbec00d0ec83fbd7279d1afabb
|
[
"MIT"
] | null | null | null |
from .musiccopier import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3588a0560589f32183d0e5c192a311660ac0da70
| 86
|
py
|
Python
|
TimeWrapper_JE/venv/Lib/site-packages/je_time/modules/__init__.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
TimeWrapper_JE/venv/Lib/site-packages/je_time/modules/__init__.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
TimeWrapper_JE/venv/Lib/site-packages/je_time/modules/__init__.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
from je_time.modules import get_calendar
from je_time.modules import time_calculate
| 28.666667
| 43
| 0.860465
| 14
| 86
| 5
| 0.571429
| 0.171429
| 0.285714
| 0.485714
| 0.657143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 86
| 2
| 44
| 43
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
358fa5b291248e329be68f7bfbf94f06fb8aedea
| 29
|
py
|
Python
|
corehq/apps/ota/tests/__init__.py
|
dslowikowski/commcare-hq
|
ad8885cf8dab69dc85cb64f37aeaf06106124797
|
[
"BSD-3-Clause"
] | 1
|
2015-02-10T23:26:39.000Z
|
2015-02-10T23:26:39.000Z
|
corehq/apps/ota/tests/__init__.py
|
SEL-Columbia/commcare-hq
|
992ee34a679c37f063f86200e6df5a197d5e3ff6
|
[
"BSD-3-Clause"
] | 1
|
2022-03-12T01:03:25.000Z
|
2022-03-12T01:03:25.000Z
|
corehq/apps/ota/tests/__init__.py
|
johan--/commcare-hq
|
86ee99c54f55ee94e4c8f2f6f30fc44e10e69ebd
|
[
"BSD-3-Clause"
] | null | null | null |
from .digest_restore import *
| 29
| 29
| 0.827586
| 4
| 29
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103448
| 29
| 1
| 29
| 29
| 0.884615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ea3b35ba8d176391b3eb639bd03e104807b07c74
| 175
|
py
|
Python
|
Chapter1/sumoftwodice.py
|
galinadychko/IntroToPython
|
14b804f0d7f40e9e8cb515c885d01bdc5f70c627
|
[
"MIT"
] | null | null | null |
Chapter1/sumoftwodice.py
|
galinadychko/IntroToPython
|
14b804f0d7f40e9e8cb515c885d01bdc5f70c627
|
[
"MIT"
] | null | null | null |
Chapter1/sumoftwodice.py
|
galinadychko/IntroToPython
|
14b804f0d7f40e9e8cb515c885d01bdc5f70c627
|
[
"MIT"
] | null | null | null |
import random
import stdio
a = random.randrange(1, 7)
b = random.randrange(1, 7)
stdio.writeln("Sum after rolling 2 dices: " + str(a) + " + " + str(b) + " = " + str(a + b))
| 21.875
| 91
| 0.6
| 28
| 175
| 3.75
| 0.535714
| 0.285714
| 0.304762
| 0.32381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035971
| 0.205714
| 175
| 7
| 92
| 25
| 0.719424
| 0
| 0
| 0
| 0
| 0
| 0.188571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
ea4bc0609e9147ed8fcd19098387bd9ce696d0ae
| 568
|
py
|
Python
|
sklearntools/sym/sym_transform_parts.py
|
modusdatascience/sklearntools
|
6cb87edcb501440266622fe4c738be3f9015a859
|
[
"BSD-3-Clause"
] | 2
|
2018-06-13T21:13:07.000Z
|
2019-11-19T20:37:47.000Z
|
sklearntools/sym/sym_transform_parts.py
|
jcrudy/sklearntools
|
6cb87edcb501440266622fe4c738be3f9015a859
|
[
"BSD-3-Clause"
] | null | null | null |
sklearntools/sym/sym_transform_parts.py
|
jcrudy/sklearntools
|
6cb87edcb501440266622fe4c738be3f9015a859
|
[
"BSD-3-Clause"
] | 1
|
2019-12-06T00:28:58.000Z
|
2019-12-06T00:28:58.000Z
|
from .base import call_method_or_dispatch, fallback, create_registerer
from .parts import double_check
from .syms import syms
from .sym_transform import sym_transform
def sym_transform_parts_base(obj, target=None):
return (syms(obj), sym_transform(obj), target)
sym_transform_parts_dispatcher = {}
sym_transform_parts = double_check(fallback(call_method_or_dispatch('sym_transform_parts', sym_transform_parts_dispatcher), sym_transform_parts_base))
register_sym_transform_parts = create_registerer(sym_transform_parts_dispatcher, 'register_sym_transform_parts')
| 47.333333
| 150
| 0.855634
| 79
| 568
| 5.670886
| 0.291139
| 0.321429
| 0.341518
| 0.180804
| 0.196429
| 0.196429
| 0.196429
| 0
| 0
| 0
| 0
| 0
| 0.075704
| 568
| 11
| 151
| 51.636364
| 0.853333
| 0
| 0
| 0
| 0
| 0
| 0.082746
| 0.049296
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.444444
| 0.111111
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
ea6497e13f62c88b58db2bdbd7c366c42860f050
| 124
|
py
|
Python
|
tests/input.py
|
ExObsSim/Rapoc-public
|
dd5a4af1e7ab531e8d3176026354719585752d58
|
[
"BSD-3-Clause"
] | null | null | null |
tests/input.py
|
ExObsSim/Rapoc-public
|
dd5a4af1e7ab531e8d3176026354719585752d58
|
[
"BSD-3-Clause"
] | null | null | null |
tests/input.py
|
ExObsSim/Rapoc-public
|
dd5a4af1e7ab531e8d3176026354719585752d58
|
[
"BSD-3-Clause"
] | null | null | null |
exomol_file = 'test_data/1H2-16O__POKAZATEL__R15000_0.3-50mu.xsec.TauREx.h5'
dace_file = 'test_data/1H2-16O__POKAZATEL_e2b'
| 41.333333
| 76
| 0.830645
| 22
| 124
| 4.136364
| 0.727273
| 0.175824
| 0.263736
| 0.32967
| 0.593407
| 0.593407
| 0
| 0
| 0
| 0
| 0
| 0.161017
| 0.048387
| 124
| 2
| 77
| 62
| 0.610169
| 0
| 0
| 0
| 0
| 0
| 0.741935
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
57a05e3dde8bbac098574805d972b5207e2dd0b7
| 1,719
|
py
|
Python
|
share/extended-cpt/analysis/vis/run.py
|
ucla-pbpl/pbpl-compton
|
a5afcdffc778f61a4726d7c5a231af2bca466900
|
[
"MIT"
] | 2
|
2019-09-24T23:52:58.000Z
|
2020-06-03T20:59:33.000Z
|
share/extended-cpt/analysis/vis/run.py
|
ucla-pbpl/pbpl-compton
|
a5afcdffc778f61a4726d7c5a231af2bca466900
|
[
"MIT"
] | null | null | null |
share/extended-cpt/analysis/vis/run.py
|
ucla-pbpl/pbpl-compton
|
a5afcdffc778f61a4726d7c5a231af2bca466900
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import toml
import sys
os.system('rm -f *wrl *h5')
# print('### RUNNING GEANT4 (design.wrl) ###')
# conf = toml.load('sfqed.toml')
# A = conf['PrimaryGenerator']
# A['PythoGenerator'] = 'sfqed.pattern_spray'
# A['NumEvents'] = 100
# with open('temp.toml', 'w') as fout:
# toml.dump(conf, fout)
# os.system('pbpl-compton-mc temp.toml vis.mac > /dev/null 2>&1')
# os.system('pbpl-compton-extrude-vrml g4_00.wrl --radius=0.2 --num-points=8 --output=design.wrl')
# os.system('rm -f temp.toml g4*wrl')
print('### RUNNING GEANT4 (gamma-10MeV.wrl) ###')
conf = toml.load('sfqed.toml')
A = conf['PrimaryGenerator']
A['PythonGenerator'] = 'pbpl.compton.generators.repeater'
A['PythonGeneratorArgs'] = ['gamma', '10*MeV', '[0,0,-100*mm]', '[0,0,1]']
A['NumEvents'] = 20000
with open('temp.toml', 'w') as fout:
toml.dump(conf, fout)
os.system('pbpl-compton-mc temp.toml vis.mac')
#os.system('pbpl-compton-mc temp.toml vis.mac > /dev/null 2>&1')
os.system('pbpl-compton-extrude-vrml g4_00.wrl --radius=0.8 --num-points=8 --output=gamma-10MeV.wrl')
# os.system('rm -f temp.toml g4*wrl')
# print('### RUNNING GEANT4 (gamma-2GeV.wrl) ###')
# conf = toml.load('sfqed.toml')
# A = conf['PrimaryGenerator']
# A['PythonGenerator'] = 'pbpl.compton.generators.repeater'
# A['PythonGeneratorArgs'] = ['gamma', '2*GeV', '[0,0,-100*mm]', '[0,0,1]']
# A['NumEvents'] = 20000
# with open('temp.toml', 'w') as fout:
# toml.dump(conf, fout)
# os.system('pbpl-compton-mc temp.toml vis.mac')
# #os.system('pbpl-compton-mc temp.toml vis.mac > /dev/null 2>&1')
# os.system('pbpl-compton-extrude-vrml g4_00.wrl --radius=0.8 --num-points=8 --output=gamma-2GeV.wrl')
# os.system('rm -f temp.toml g4*wrl')
| 38.2
| 102
| 0.656195
| 277
| 1,719
| 4.057762
| 0.234657
| 0.085409
| 0.085409
| 0.135231
| 0.843416
| 0.843416
| 0.843416
| 0.843416
| 0.843416
| 0.819395
| 0
| 0.045039
| 0.108784
| 1,719
| 44
| 103
| 39.068182
| 0.688642
| 0.646888
| 0
| 0
| 0
| 0.071429
| 0.557118
| 0.142355
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.214286
| 0
| 0.214286
| 0.071429
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
57c61e1fa99ee81dd07e84545b3887c1d51d383c
| 2,584
|
py
|
Python
|
app/closeLoop/trainModels-prcp.py
|
fkwai/geolearn
|
30cb4353d22af5020a48100d07ab04f465a315b0
|
[
"MIT"
] | null | null | null |
app/closeLoop/trainModels-prcp.py
|
fkwai/geolearn
|
30cb4353d22af5020a48100d07ab04f465a315b0
|
[
"MIT"
] | null | null | null |
app/closeLoop/trainModels-prcp.py
|
fkwai/geolearn
|
30cb4353d22af5020a48100d07ab04f465a315b0
|
[
"MIT"
] | 2
|
2021-04-04T02:45:59.000Z
|
2022-03-19T09:41:39.000Z
|
from hydroDL import pathSMAP, master, utils
from hydroDL.master import default
from hydroDL.post import plot, stat
import os
import matplotlib.pyplot as plt
import numpy as np
import torch
# training
tLst = [[20150501, 20151001], [20150402, 20160401]]
tagLst = ['2015RK', '2015']
for k in range(len(tLst)):
optData = default.update(
default.optDataSMAP,
varT=['APCP_FORA'],
rootDB=pathSMAP['DB_L3_NA'],
subset='CONUSv2f1',
tRange=tLst[k],
daObs=1)
optModel = default.optLstmClose
optLoss = default.optLossRMSE
optTrain = default.update(default.optTrainSMAP, nEpoch=500)
out = os.path.join(pathSMAP['Out_L3_NA'], 'DA', 'CONUSv2f1_DA_Prcp_' + tagLst[k])
masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)
master.runTrain(masterDict, cudaID=(k+1) % 3, screen='DA' + tagLst[k])
optData = default.update(
default.optDataSMAP,
varT=['APCP_FORA'],
rootDB=pathSMAP['DB_L3_NA'],
subset='CONUSv2f1',
tRange=tLst[k])
optModel = default.optLstm
optLoss = default.optLossRMSE
optTrain = default.update(default.optTrainSMAP, nEpoch=500)
out = os.path.join(pathSMAP['Out_L3_NA'], 'DA',
'CONUSv2f1_LSTM_Prcp_'+tagLst[k])
masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)
master.runTrain(masterDict, cudaID=(k+1) % 3, screen='LSTM' + tagLst[k])
# training
tLst = [[20150501, 20151001]]
yrLst = ['2015RK']
for k in range(len(tLst)):
optData = default.update(
default.optDataSMAP,
rootDB=pathSMAP['DB_L3_NA'],
subset='CONUSv2f1',
tRange=tLst[k],
daObs=1)
optModel = default.optLstmClose
optLoss = default.optLossRMSE
optTrain = default.update(default.optTrainSMAP, nEpoch=500)
out = os.path.join(pathSMAP['Out_L3_NA'], 'DA', 'CONUSv2f1_DA_' + yrLst[k])
masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)
master.runTrain(masterDict, cudaID=2, screen='DA' + yrLst[k])
# optData = default.update(
# default.optDataSMAP,
# rootDB=pathSMAP['DB_L3_NA'],
# subset='CONUSv2f1',
# tRange=tLst[k])
# optModel = default.optLstm
# optLoss = default.optLossRMSE
# optTrain = default.update(default.optTrainSMAP, nEpoch=300)
# out = os.path.join(pathSMAP['Out_L3_NA'], 'DA', 'CONUSv2f1_LSTM'+yrLst[k])
# masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)
# master.runTrain(masterDict, cudaID=k % 3, screen='LSTM' + yrLst[k])
| 37.449275
| 85
| 0.662926
| 306
| 2,584
| 5.509804
| 0.232026
| 0.061684
| 0.094899
| 0.064057
| 0.817319
| 0.817319
| 0.816133
| 0.816133
| 0.816133
| 0.816133
| 0
| 0.05029
| 0.19969
| 2,584
| 68
| 86
| 38
| 0.76499
| 0.185372
| 0
| 0.607843
| 0
| 0
| 0.084648
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.137255
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
17b915dc6e6f90a242e36ddfb4d72d3c90de4d48
| 12,978
|
py
|
Python
|
optixrap/cu/boolean_solid.py
|
hanswenzel/opticks
|
b75b5929b6cf36a5eedeffb3031af2920f75f9f0
|
[
"Apache-2.0"
] | 11
|
2020-07-05T02:39:32.000Z
|
2022-03-20T18:52:44.000Z
|
optixrap/cu/boolean_solid.py
|
hanswenzel/opticks
|
b75b5929b6cf36a5eedeffb3031af2920f75f9f0
|
[
"Apache-2.0"
] | null | null | null |
optixrap/cu/boolean_solid.py
|
hanswenzel/opticks
|
b75b5929b6cf36a5eedeffb3031af2920f75f9f0
|
[
"Apache-2.0"
] | 4
|
2020-09-03T20:36:32.000Z
|
2022-01-19T07:42:21.000Z
|
#
# Copyright (c) 2019 Opticks Team. All Rights Reserved.
#
# This file is part of Opticks
# (see https://bitbucket.org/simoncblyth/opticks).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# generated Tue Mar 14 18:57:46 2017
# from /Users/blyth/opticks/optixrap/cu
# base boolean-solid.h stem boolean-solid
# with command : /Users/blyth/opticks/bin/c_enums_to_python.py boolean-solid.h
import sys
#0
class Act_(object):
ReturnMiss = 0x1 << 0
ReturnAIfCloser = 0x1 << 1
ReturnAIfFarther = 0x1 << 2
ReturnA = 0x1 << 3
ReturnBIfCloser = 0x1 << 4
ReturnBIfFarther = 0x1 << 5
ReturnB = 0x1 << 6
ReturnFlipBIfCloser = 0x1 << 7
AdvanceAAndLoop = 0x1 << 8
AdvanceBAndLoop = 0x1 << 9
AdvanceAAndLoopIfCloser = 0x1 << 10
AdvanceBAndLoopIfCloser = 0x1 << 11
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#1
class CTRL_(object):
RETURN_MISS = 0
RETURN_A = 1
RETURN_B = 2
RETURN_FLIP_B = 3
LOOP_A = 4
LOOP_B = 5
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#2
class State_(object):
Enter = 0
Exit = 1
Miss = 2
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#3
class ERROR_(object):
LHS_POP_EMPTY = 0x1 << 0
RHS_POP_EMPTY = 0x1 << 1
LHS_END_NONEMPTY = 0x1 << 2
RHS_END_EMPTY = 0x1 << 3
BAD_CTRL = 0x1 << 4
LHS_OVERFLOW = 0x1 << 5
RHS_OVERFLOW = 0x1 << 6
LHS_TRANCHE_OVERFLOW = 0x1 << 7
RHS_TRANCHE_OVERFLOW = 0x1 << 8
RESULT_OVERFLOW = 0x1 << 9
OVERFLOW = 0x1 << 10
TRANCHE_OVERFLOW = 0x1 << 11
POP_EMPTY = 0x1 << 12
XOR_SIDE = 0x1 << 13
END_EMPTY = 0x1 << 14
ROOT_STATE = 0x1 << 15
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#4
class Union_(object):
EnterA_EnterB = Act_.ReturnAIfCloser | Act_.ReturnBIfCloser
EnterA_ExitB = Act_.ReturnBIfCloser | Act_.AdvanceAAndLoop
EnterA_MissB = Act_.ReturnA
ExitA_EnterB = Act_.ReturnAIfCloser | Act_.AdvanceBAndLoop
ExitA_ExitB = Act_.ReturnAIfFarther | Act_.ReturnBIfFarther
ExitA_MissB = Act_.ReturnA
MissA_EnterB = Act_.ReturnB
MissA_ExitB = Act_.ReturnB
MissA_MissB = Act_.ReturnMiss
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#5
class ACloser_Union_(object):
EnterA_EnterB = CTRL_.RETURN_A
EnterA_ExitB = CTRL_.LOOP_A
EnterA_MissB = CTRL_.RETURN_A
ExitA_EnterB = CTRL_.RETURN_A
ExitA_ExitB = CTRL_.RETURN_B
ExitA_MissB = CTRL_.RETURN_A
MissA_EnterB = CTRL_.RETURN_B
MissA_ExitB = CTRL_.RETURN_B
MissA_MissB = CTRL_.RETURN_MISS
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#6
class BCloser_Union_(object):
EnterA_EnterB = CTRL_.RETURN_B
EnterA_ExitB = CTRL_.RETURN_B
EnterA_MissB = CTRL_.RETURN_A
ExitA_EnterB = CTRL_.LOOP_B
ExitA_ExitB = CTRL_.RETURN_A
ExitA_MissB = CTRL_.RETURN_A
MissA_EnterB = CTRL_.RETURN_B
MissA_ExitB = CTRL_.RETURN_B
MissA_MissB = CTRL_.RETURN_MISS
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#7
class Difference_(object):
EnterA_EnterB = Act_.ReturnAIfCloser | Act_.AdvanceBAndLoop
EnterA_ExitB = Act_.AdvanceAAndLoopIfCloser | Act_.AdvanceBAndLoopIfCloser
EnterA_MissB = Act_.ReturnA
ExitA_EnterB = Act_.ReturnAIfCloser | Act_.ReturnFlipBIfCloser
ExitA_ExitB = Act_.ReturnFlipBIfCloser | Act_.AdvanceAAndLoop
ExitA_MissB = Act_.ReturnA
MissA_EnterB = Act_.ReturnMiss
MissA_ExitB = Act_.ReturnMiss
MissA_MissB = Act_.ReturnMiss
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#8
class ACloser_Difference_(object):
EnterA_EnterB = CTRL_.RETURN_A
EnterA_ExitB = CTRL_.LOOP_A
EnterA_MissB = CTRL_.RETURN_A
ExitA_EnterB = CTRL_.RETURN_A
ExitA_ExitB = CTRL_.LOOP_A
ExitA_MissB = CTRL_.RETURN_A
MissA_EnterB = CTRL_.RETURN_MISS
MissA_ExitB = CTRL_.RETURN_MISS
MissA_MissB = CTRL_.RETURN_MISS
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#9
class BCloser_Difference_(object):
EnterA_EnterB = CTRL_.LOOP_B
EnterA_ExitB = CTRL_.LOOP_B
EnterA_MissB = CTRL_.RETURN_A
ExitA_EnterB = CTRL_.RETURN_FLIP_B
ExitA_ExitB = CTRL_.RETURN_FLIP_B
ExitA_MissB = CTRL_.RETURN_A
MissA_EnterB = CTRL_.RETURN_MISS
MissA_ExitB = CTRL_.RETURN_MISS
MissA_MissB = CTRL_.RETURN_MISS
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#10
class Intersection_(object):
EnterA_EnterB = Act_.AdvanceAAndLoopIfCloser | Act_.AdvanceBAndLoopIfCloser
EnterA_ExitB = Act_.ReturnAIfCloser | Act_.AdvanceBAndLoop
EnterA_MissB = Act_.ReturnMiss
ExitA_EnterB = Act_.ReturnBIfCloser | Act_.AdvanceAAndLoop
ExitA_ExitB = Act_.ReturnAIfCloser | Act_.ReturnBIfCloser
ExitA_MissB = Act_.ReturnMiss
MissA_EnterB = Act_.ReturnMiss
MissA_ExitB = Act_.ReturnMiss
MissA_MissB = Act_.ReturnMiss
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#11
class ACloser_Intersection_(object):
EnterA_EnterB = CTRL_.LOOP_A
EnterA_ExitB = CTRL_.RETURN_A
EnterA_MissB = CTRL_.RETURN_MISS
ExitA_EnterB = CTRL_.LOOP_A
ExitA_ExitB = CTRL_.RETURN_A
ExitA_MissB = CTRL_.RETURN_MISS
MissA_EnterB = CTRL_.RETURN_MISS
MissA_ExitB = CTRL_.RETURN_MISS
MissA_MissB = CTRL_.RETURN_MISS
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
#12
class BCloser_Intersection_(object):
EnterA_EnterB = CTRL_.LOOP_B
EnterA_ExitB = CTRL_.LOOP_B
EnterA_MissB = CTRL_.RETURN_MISS
ExitA_EnterB = CTRL_.RETURN_B
ExitA_ExitB = CTRL_.RETURN_B
ExitA_MissB = CTRL_.RETURN_MISS
MissA_EnterB = CTRL_.RETURN_MISS
MissA_ExitB = CTRL_.RETURN_MISS
MissA_MissB = CTRL_.RETURN_MISS
@classmethod
def enum(cls):
return filter(lambda kv:type(kv[1]) is int,cls.__dict__.items())
@classmethod
def desc(cls, typ):
kvs = filter(lambda kv:kv[1] == typ, cls.enum())
return kvs[0][0] if len(kvs) == 1 else "UNKNOWN"
@classmethod
def descmask(cls, typ):
kvs = filter(lambda kv:kv[1] & typ, cls.enum())
return ",".join(map(lambda kv:kv[0], kvs))
@classmethod
def fromdesc(cls, label):
kvs = filter(lambda kv:kv[0] == label, cls.enum())
return kvs[0][1] if len(kvs) == 1 else -1
| 29.295711
| 80
| 0.63153
| 1,853
| 12,978
| 4.243929
| 0.098219
| 0.066124
| 0.092574
| 0.084308
| 0.764878
| 0.734232
| 0.713123
| 0.706256
| 0.698754
| 0.668998
| 0
| 0.029698
| 0.23979
| 12,978
| 442
| 81
| 29.361991
| 0.767383
| 0.067037
| 0
| 0.764526
| 0
| 0
| 0.008617
| 0
| 0
| 0
| 0.00696
| 0
| 0
| 1
| 0.159021
| false
| 0
| 0.003058
| 0.039755
| 0.721713
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
17dd7143678dea9fa12650112a9e8401d5bbfae0
| 22
|
py
|
Python
|
auxCallsHandler.py
|
storage4grid/s4g-electric-vehicle-analytics
|
0f353800fa6dcb60d2b6bcacd760e77ae60c9ef3
|
[
"MIT"
] | null | null | null |
auxCallsHandler.py
|
storage4grid/s4g-electric-vehicle-analytics
|
0f353800fa6dcb60d2b6bcacd760e77ae60c9ef3
|
[
"MIT"
] | 5
|
2021-03-19T08:15:07.000Z
|
2022-02-10T13:46:53.000Z
|
auxCallsHandler.py
|
storage4grid/s4g-electric-vehicle-analytics
|
0f353800fa6dcb60d2b6bcacd760e77ae60c9ef3
|
[
"MIT"
] | null | null | null |
import os, json, time
| 11
| 21
| 0.727273
| 4
| 22
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
aa094a8838949baf51c1b22cc1d0c99dd1c94374
| 4,073
|
py
|
Python
|
tests/unit_tests/test_nn/test_converters/test_tensorflow/test_Reshape.py
|
samysweb/dnnv
|
58fb95b7300914d9da28eed86c39eca473b1aaef
|
[
"MIT"
] | 5
|
2022-01-28T20:30:34.000Z
|
2022-03-17T09:26:52.000Z
|
tests/unit_tests/test_nn/test_converters/test_tensorflow/test_Reshape.py
|
samysweb/dnnv
|
58fb95b7300914d9da28eed86c39eca473b1aaef
|
[
"MIT"
] | 9
|
2022-01-27T03:50:28.000Z
|
2022-02-08T18:42:17.000Z
|
tests/unit_tests/test_nn/test_converters/test_tensorflow/test_Reshape.py
|
samysweb/dnnv
|
58fb95b7300914d9da28eed86c39eca473b1aaef
|
[
"MIT"
] | 2
|
2022-02-03T17:32:43.000Z
|
2022-03-24T16:38:49.000Z
|
import numpy as np
import pytest
from dnnv.nn.converters.tensorflow import *
from dnnv.nn.operations import *
def test_Reshape():
original_shape = [0, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([3, 4, 0], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape, allowzero=True)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
op = Reshape(
Input((0, 3, 4), np.dtype(np.float32)),
Input((3,), np.dtype(np.int64)),
allowzero=True,
)
tf_op = TensorflowConverter().visit(op)
result = tf_op(data, new_shape).numpy()
assert np.allclose(result, y)
def test_Reshape_reordered_all_dims():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([4, 2, 3], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_reordered_last_dims():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([2, 4, 3], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_reduced_dims():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([2, 12], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_extended_dims():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([2, 3, 2, 2], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_one_dim():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([24], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_negative_dim():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([2, -1, 2], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_negative_extended_dims():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([-1, 2, 3, 4], dtype=np.int64)
y = np.reshape(data, new_shape)
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_zero_dim():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([2, 0, 4, 1], dtype=np.int64)
y = np.reshape(data, [2, 3, 4, 1])
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
def test_Reshape_zero_and_negative_dim():
original_shape = [2, 3, 4]
data = np.random.random_sample(original_shape).astype(np.float32)
new_shape = np.array([2, 0, 1, -1], dtype=np.int64)
y = np.reshape(data, [2, 3, 1, -1])
op = Reshape(data, new_shape)
tf_op = TensorflowConverter().visit(op)
result = tf_op().numpy()
assert np.allclose(result, y)
| 30.17037
| 69
| 0.66413
| 612
| 4,073
| 4.23366
| 0.088235
| 0.089541
| 0.087997
| 0.131995
| 0.913933
| 0.913933
| 0.913933
| 0.913933
| 0.913933
| 0.913933
| 0
| 0.036017
| 0.188804
| 4,073
| 134
| 70
| 30.395522
| 0.748184
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107843
| 1
| 0.098039
| false
| 0
| 0.039216
| 0
| 0.137255
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a4bd91610c066b32e2e37a695bd14a74212e369f
| 51
|
py
|
Python
|
TwitterScraperAPI/packages/__init__.py
|
vaskrneup/TwitterScraperAPI
|
c4bfc860ec012a8233d5d325c25ba58642a843e2
|
[
"MIT"
] | 1
|
2020-09-19T14:39:50.000Z
|
2020-09-19T14:39:50.000Z
|
TwitterScraperAPI/packages/__init__.py
|
vaskrneup/TwitterScraperAPI
|
c4bfc860ec012a8233d5d325c25ba58642a843e2
|
[
"MIT"
] | null | null | null |
TwitterScraperAPI/packages/__init__.py
|
vaskrneup/TwitterScraperAPI
|
c4bfc860ec012a8233d5d325c25ba58642a843e2
|
[
"MIT"
] | null | null | null |
from . import default_data
from . import extractor
| 17
| 26
| 0.803922
| 7
| 51
| 5.714286
| 0.714286
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 51
| 2
| 27
| 25.5
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a4ce4b433335850dbaf45b173f75c428d8dabc53
| 30
|
py
|
Python
|
expertise/setup/__init__.py
|
mspector/expertise
|
6c50b794bfaeaf3a3a9ae271397f177a326d11f2
|
[
"MIT"
] | 12
|
2020-02-25T18:45:32.000Z
|
2022-02-07T22:24:48.000Z
|
expertise/setup/__init__.py
|
mspector/expertise
|
6c50b794bfaeaf3a3a9ae271397f177a326d11f2
|
[
"MIT"
] | 52
|
2019-12-19T08:14:43.000Z
|
2022-03-23T19:40:02.000Z
|
expertise/setup/__init__.py
|
mspector/expertise
|
6c50b794bfaeaf3a3a9ae271397f177a326d11f2
|
[
"MIT"
] | 3
|
2019-10-24T16:52:34.000Z
|
2021-09-27T21:40:39.000Z
|
from .core import setup_model
| 15
| 29
| 0.833333
| 5
| 30
| 4.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 30
| 1
| 30
| 30
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a4e3eb84e005678adb66ed404806c88f7c6efc7e
| 22,991
|
py
|
Python
|
Lib/test/test_compiler/test_static/final.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | 1,886
|
2021-05-03T23:58:43.000Z
|
2022-03-31T19:15:58.000Z
|
Lib/test/test_compiler/test_static/final.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | 70
|
2021-05-04T23:25:35.000Z
|
2022-03-31T18:42:08.000Z
|
Lib/test/test_compiler/test_static/final.py
|
isabella232/cinder-1
|
428669a9a925287f192ab361226e5a8ca3fb74d9
|
[
"CNRI-Python-GPL-Compatible"
] | 52
|
2021-05-04T21:26:03.000Z
|
2022-03-08T18:02:56.000Z
|
from compiler.errors import TypedSyntaxError
from typing import ClassVar
from .common import StaticTestBase
class FinalTests(StaticTestBase):
def test_final_multiple_typeargs(self):
codestr = """
from typing import Final
from something import hello
x: Final[int, str] = hello()
"""
with self.assertRaisesRegex(
TypedSyntaxError,
r"incorrect number of generic arguments for Final\[T\], expected 1, got 2",
):
self.compile(codestr, modname="foo")
def test_final_annotation_nesting(self):
with self.assertRaisesRegex(
TypedSyntaxError, "Final annotation is only valid in initial declaration"
):
self.compile(
"""
from typing import Final, List
x: List[Final[str]] = []
""",
modname="foo",
)
with self.assertRaisesRegex(
TypedSyntaxError, "Final annotation is only valid in initial declaration"
):
self.compile(
"""
from typing import Final, List
x: List[int | Final] = []
""",
modname="foo",
)
def test_final(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
"""
self.compile(codestr, modname="foo")
def test_final_generic(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
"""
self.compile(codestr, modname="foo")
def test_final_generic_types(self):
codestr = """
from typing import Final
def g(i: int) -> int:
return i
def f() -> int:
x: Final[int] = 0xdeadbeef
return g(x)
"""
self.compile(codestr, modname="foo")
def test_final_uninitialized(self):
codestr = """
from typing import Final
x: Final[int]
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Must assign a value when declaring a Final"
):
self.compile(codestr, modname="foo")
def test_final_reassign(self):
codestr = """
from typing import Any, Final
x: Final[Any] = 0xdeadbeef
x = "something"
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassign_explicit_global(self):
codestr = """
from typing import Final
a: Final[int] = 1337
def fn():
def fn2():
global a
a = 0
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassign_explicit_global_shadowed(self):
codestr = """
from typing import Final
a: Final[int] = 1337
def fn():
a = 2
def fn2():
global a
a = 0
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassign_nonlocal(self):
codestr = """
from typing import Final
a: Final[int] = 1337
def fn():
def fn2():
nonlocal a
a = 0
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassign_nonlocal_shadowed(self):
codestr = """
from typing import Final
a: Final[int] = 1337
def fn():
a = 3
def fn2():
nonlocal a
# should be allowed, we're assigning to the shadowed
# value
a = 0
"""
self.compile(codestr, modname="foo")
def test_final_reassigned_in_tuple(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
y = 3
x, y = 4, 5
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassigned_in_loop(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
for x in [1, 3, 5]:
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassigned_in_except(self):
codestr = """
from typing import Final
def f():
e: Final[int] = 3
try:
x = 1 + "2"
except Exception as e:
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassigned_in_loop_target_tuple(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
for x, y in [(1, 2)]:
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_reassigned_in_ctxmgr(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
with open("lol") as x:
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_generic_reassign(self):
codestr = """
from typing import Final
x: Final[int] = 0xdeadbeef
x = 0x5ca1ab1e
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final variable"
):
self.compile(codestr, modname="foo")
def test_final_callable_protocol_retains_inferred_type(self):
codestr = """
from typing import Final, Protocol
def foo(x: int) -> str:
return "A"
class CallableProtocol(Protocol):
def __call__(self, x: int) -> str:
pass
f: Final[CallableProtocol] = foo
def bar(x: int) -> str:
return f(x)
"""
with self.in_module(codestr) as mod:
f = mod.bar
self.assertInBytecode(f, "INVOKE_FUNCTION")
def test_final_in_args(self):
codestr = """
from typing import Final
def f(a: Final) -> None:
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError,
"Final annotation is only valid in initial declaration",
):
self.compile(codestr, modname="foo")
def test_final_returns(self):
codestr = """
from typing import Final
def f() -> Final[int]:
return 1
"""
with self.assertRaisesRegex(
TypedSyntaxError,
"Final annotation is only valid in initial declaration",
):
self.compile(codestr, modname="foo")
def test_final_decorator(self):
codestr = """
from typing import final
class C:
@final
def f():
pass
"""
self.compile(codestr, modname="foo")
def test_final_decorator_override(self):
codestr = """
from typing import final
class C:
@final
def f():
pass
class D(C):
def f():
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final attribute of foo.D:f"
):
self.compile(codestr, modname="foo")
def test_final_decorator_override_with_assignment(self):
codestr = """
from typing import final
class C:
@final
def f():
pass
class D(C):
f = print
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final attribute of foo.D:f"
):
self.compile(codestr, modname="foo")
def test_final_decorator_override_transitivity(self):
codestr = """
from typing import final
class C:
@final
def f():
pass
class D(C):
pass
class E(D):
def f():
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Cannot assign to a Final attribute of foo.E:f"
):
self.compile(codestr, modname="foo")
def test_final_decorator_class(self):
codestr = """
from typing import final
@final
class C:
def f(self):
pass
def f():
return C().f()
"""
c = self.compile(codestr, modname="foo")
f = self.find_code(c, "f")
self.assertInBytecode(f, "INVOKE_FUNCTION")
def test_final_decorator_class_inheritance(self):
codestr = """
from typing import final
@final
class C:
pass
class D(C):
pass
"""
with self.assertRaisesRegex(
TypedSyntaxError, "Class `foo.D` cannot subclass a Final class: `foo.C`"
):
self.compile(codestr, modname="foo")
def test_final_decorator_class_nonstatic_subclass(self):
codestr = """
from typing import final
@final
class C:
pass
"""
with self.in_module(codestr) as mod:
with self.assertRaisesRegex(
TypeError, "type 'C' is not an acceptable base type"
):
class D(mod.C):
pass
def test_final_decorator_class_dynamic(self):
"""We should never mark DYNAMIC_TYPE as final."""
codestr = """
from typing import final, Generic, NamedTuple
@final
class NT(NamedTuple):
x: int
class C(Generic):
pass
"""
# No TypedSyntaxError "cannot inherit from Final class 'dynamic'"
self.compile(codestr)
def test_final_constant_folding_int(self):
codestr = """
from typing import Final
X: Final[int] = 1337
def plus_1337(i: int) -> int:
return i + X
"""
with self.in_module(codestr) as mod:
plus_1337 = mod.plus_1337
self.assertInBytecode(plus_1337, "LOAD_CONST", 1337)
self.assertNotInBytecode(plus_1337, "LOAD_GLOBAL")
self.assertEqual(plus_1337(3), 1340)
def test_final_constant_folding_bool(self):
codestr = """
from typing import Final
X: Final[bool] = True
def f() -> bool:
return not X
"""
with self.in_module(codestr) as mod:
f = mod.f
self.assertInBytecode(f, "LOAD_CONST", True)
self.assertNotInBytecode(f, "LOAD_GLOBAL")
self.assertFalse(f())
def test_final_constant_folding_str(self):
codestr = """
from typing import Final
X: Final[str] = "omg"
def f() -> str:
return X[1]
"""
with self.in_module(codestr) as mod:
f = mod.f
self.assertInBytecode(f, "LOAD_CONST", "omg")
self.assertNotInBytecode(f, "LOAD_GLOBAL")
self.assertEqual(f(), "m")
def test_final_constant_folding_disabled_on_nonfinals(self):
codestr = """
from typing import Final
X: str = "omg"
def f() -> str:
return X[1]
"""
with self.in_module(codestr) as mod:
f = mod.f
self.assertNotInBytecode(f, "LOAD_CONST", "omg")
self.assertInBytecode(f, "LOAD_GLOBAL", "X")
self.assertEqual(f(), "m")
def test_final_constant_folding_disabled_on_nonconstant_finals(self):
codestr = """
from typing import Final
def p() -> str:
return "omg"
X: Final[str] = p()
def f() -> str:
return X[1]
"""
with self.in_module(codestr) as mod:
f = mod.f
self.assertNotInBytecode(f, "LOAD_CONST", "omg")
self.assertInBytecode(f, "LOAD_GLOBAL", "X")
self.assertEqual(f(), "m")
def test_final_constant_folding_shadowing(self):
codestr = """
from typing import Final
X: Final[str] = "omg"
def f() -> str:
X = "lol"
return X[1]
"""
with self.in_module(codestr) as mod:
f = mod.f
self.assertInBytecode(f, "LOAD_CONST", "lol")
self.assertNotInBytecode(f, "LOAD_GLOBAL", "omg")
self.assertEqual(f(), "o")
def test_final_constant_folding_in_module_scope(self):
codestr = """
from typing import Final
X: Final[int] = 21
y = X + 3
"""
c = self.compile(codestr, modname="foo.py")
self.assertNotInBytecode(c, "LOAD_NAME", "X")
with self.in_module(codestr) as mod:
self.assertEqual(mod.y, 24)
def test_final_constant_in_module_scope(self):
codestr = """
from typing import Final
X: Final[int] = 21
"""
with self.in_module(codestr) as mod:
self.assertEqual(mod.__final_constants__, ("X",))
def test_final_nonconstant_in_module_scope(self):
codestr = """
from typing import Final
def p() -> str:
return "omg"
X: Final[str] = p()
"""
with self.in_module(codestr) as mod:
self.assertEqual(mod.__final_constants__, ())
def test_final_method_in_class_slots(self):
codestr = """
from typing import final
class C:
@final
def foo(self):
return self
def bar(self):
return self
"""
with self.in_module(codestr) as mod:
self.assertEqual(mod.C.__final_method_names__, ("foo",))
def test_final_method_in_class_slots_with_inheritance(self):
codestr = """
from typing import final
class C:
@final
def foo(self):
return self
def bar(self):
return self
class D(C):
@final
def bar(self):
return self
def baz(self):
return self
class E(D):
@final
def baz(self):
return self
class F(D):
def baz(self):
return self
"""
with self.in_module(codestr) as mod:
self.assertEqual(mod.C.__final_method_names__, ("foo",))
self.assertEqual(mod.D.__final_method_names__, ("bar", "foo"))
self.assertEqual(mod.E.__final_method_names__, ("bar", "baz", "foo"))
self.assertEqual(mod.F.__final_method_names__, ("bar", "foo"))
def test_final_method_in_class_nonstatic_subclass_slots(self):
codestr = """
from typing import final
class C:
@final
def foo(self):
return self
def bar(self):
return self
"""
with self.in_module(codestr) as mod:
class D(mod.C):
pass
self.assertEqual(D.__final_method_names__, ("foo",))
def test_final_method_nonstatic_override_throws_runtime_type_error(self):
codestr = """
from typing import final
class C:
@final
def foo(self):
return self
def bar(self):
return self
"""
with self.in_module(codestr) as mod:
with self.assertRaisesRegex(
TypeError, r"'foo' overrides a final method in the static base class"
):
class D(mod.C):
def foo(self):
return self
def test_final_method_nonstatic_override_of_static_subclass_throws_runtime_type_error(
self,
):
codestr = """
from typing import final
class C:
@final
def foo(self):
return self
def bar(self):
return self
class D(C):
pass
"""
with self.in_module(codestr) as mod:
with self.assertRaisesRegex(
TypeError, r"'foo' overrides a final method in the static base class"
):
class E(mod.D):
def foo(self):
return self
def test_final_method_nonstatic_subclass_of_static_class_throws_runtime_type_error(
self,
):
codestr = """
from typing import final
class C:
@final
def foo(self):
return self
def bar(self):
return self
"""
with self.in_module(codestr) as mod:
with self.assertRaisesRegex(
TypeError, r"'foo' overrides a final method in the static base class"
):
class D(mod.C):
pass
class E(D):
def foo(self):
return self
def test_final_method_with_other_decorator_throws_type_error(
self,
):
codestr = """
from typing import final
class C:
@final
@staticmethod
def foo():
return self
@staticmethod
@final
def bar():
return self
"""
with self.in_module(codestr) as mod:
with self.assertRaisesRegex(
TypeError, r"'foo' overrides a final method in the static base class"
):
class D(mod.C):
@staticmethod
def foo():
return self
with self.assertRaisesRegex(
TypeError, r"'bar' overrides a final method in the static base class"
):
class D(mod.C):
@staticmethod
def bar():
return self
def test_updating_slot_of_final_method_in_subclass_throws_type_error(
self,
):
codestr = """
from typing import final
class C:
@final
def foo(self) -> int:
return 0
"""
with self.in_module(codestr) as mod:
with self.assertRaisesRegex(
TypeError, r"'foo' overrides a final method in the static base class"
):
class D(mod.C):
pass
D.foo = lambda self: 0
def test_updating_slot_of_final_method_in_base_class_succeeds(
self,
):
codestr = """
from typing import final
class C:
@final
def foo(self) -> int:
return 0
"""
with self.in_module(codestr) as mod:
class D(mod.C):
pass
mod.C.foo = lambda self: 1
self.assertEqual(mod.C().foo(), 1)
def test_final_method_in_non_final_class_emits_invoke_function(
self,
):
codestr = """
from typing import final
class C:
def __init__(self, x: int) -> None:
self.x = x
@final
def foo(self) -> int:
return self.x
def foo(c: C) -> int:
return c.foo()
"""
with self.in_module(codestr) as mod:
class D(mod.C):
def __init__(self):
super().__init__(5)
self.assertInBytecode(mod.foo, "INVOKE_FUNCTION")
self.assertEqual(mod.foo(mod.C(4)), 4)
self.assertEqual(mod.foo(D()), 5)
def test_final_method_in_subclass_of_non_final_class_emits_invoke_function(
self,
):
codestr = """
from typing import final
class C:
def __init__(self, x: int) -> None:
self.x = x
@final
def foo(self) -> int:
return self.x
class D(C):
def __init__(self) -> None:
self.x = 4
def foo(d: D) -> int:
return d.foo()
"""
with self.in_module(codestr) as mod:
self.assertInBytecode(
mod.foo, "INVOKE_FUNCTION", ((mod.__name__, "C", "foo"), 1)
)
self.assertEqual(mod.foo(mod.D()), 4)
def test_final_classmethod_in_non_final_nonstatic_class_emits_invoke_function(
self,
):
codestr = """
from typing import ClassVar, final
class C:
CV: ClassVar[int] = 42
@final
@classmethod
def foo(cls) -> int:
return cls.CV
def foo(c: C) -> int:
return c.foo()
"""
with self.in_module(codestr) as mod:
class D(mod.C):
CV: ClassVar[int] = 84
self.assertInBytecode(
mod.foo, "INVOKE_FUNCTION", ((mod.__name__, "C", "foo"), 1)
)
self.assertEqual(mod.foo(mod.C()), 42)
self.assertEqual(mod.foo(D()), 84)
def test_final_classmethod_in_non_final_static_class_emits_invoke_function(
self,
):
codestr = """
from typing import ClassVar, final
class C:
CV: ClassVar[int] = 42
@final
@classmethod
def foo(cls) -> int:
return cls.CV
class D(C):
CV: ClassVar[int] = 63
def foo(c: C) -> int:
return c.foo()
"""
with self.in_module(codestr) as mod:
self.assertInBytecode(
mod.foo, "INVOKE_FUNCTION", ((mod.__name__, "C", "foo"), 1)
)
self.assertEqual(mod.foo(mod.C()), 42)
self.assertEqual(mod.foo(mod.D()), 63)
| 26.396096
| 90
| 0.50324
| 2,390
| 22,991
| 4.676987
| 0.080753
| 0.04652
| 0.074432
| 0.100823
| 0.82224
| 0.775452
| 0.738683
| 0.718912
| 0.676597
| 0.658526
| 0
| 0.00955
| 0.403332
| 22,991
| 870
| 91
| 26.426437
| 0.805292
| 0.004697
| 0
| 0.770115
| 0
| 0
| 0.442988
| 0.002207
| 0
| 0
| 0.004414
| 0
| 0.099138
| 1
| 0.08046
| false
| 0.034483
| 0.076149
| 0.007184
| 0.242816
| 0.001437
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
351b678a89f6c26ca7f25d35f75f15f1f5b46220
| 3,184
|
py
|
Python
|
tests/unit_tests/test_tethys_apps/test_templatetags/test_tags.py
|
rfun/tethys
|
a19df1658c0e525ef8b0442df6a23c3fd067551b
|
[
"BSD-2-Clause"
] | 79
|
2015-10-05T13:13:28.000Z
|
2022-02-01T12:30:33.000Z
|
tests/unit_tests/test_tethys_apps/test_templatetags/test_tags.py
|
rfun/tethys
|
a19df1658c0e525ef8b0442df6a23c3fd067551b
|
[
"BSD-2-Clause"
] | 542
|
2015-08-12T22:11:32.000Z
|
2022-03-29T22:18:08.000Z
|
tests/unit_tests/test_tethys_apps/test_templatetags/test_tags.py
|
rfun/tethys
|
a19df1658c0e525ef8b0442df6a23c3fd067551b
|
[
"BSD-2-Clause"
] | 71
|
2016-01-16T01:03:41.000Z
|
2022-03-31T17:55:54.000Z
|
import unittest
from unittest import mock
from tethys_apps.templatetags import tags as t
class TestTags(unittest.TestCase):
def setUp(self):
# app_list
self.app_names = ['app1', 'app2', 'app3', 'app4', 'app5', 'app6']
self.tag_names = ['tag1', 'tag_2', 'tag 3', 'tag four', 'Tag Five', 'tag6']
self.tag_classes = ['tag1', 'tag_2', 'tag-3', 'tag-four', 'tag-five', 'tag6']
self.tag_pairs = [
('tag1', 'Tag1'),
('tag_2', 'Tag_2'),
('tag-3', 'Tag 3'),
('tag-four', 'Tag Four'),
('tag-five', 'Tag Five'),
('tag6', 'Tag6'),
]
# Object apps
self.mock_object_apps = {'configured': []}
for i, app_name in enumerate(self.app_names):
mock_app = mock.MagicMock(tags=','.join(self.tag_names[:i+1]))
mock_app.name = app_name
self.mock_object_apps['configured'].append(mock_app)
# Dictionary apps
self.mock_dict_apps = {'configured': []}
for i, app_name in enumerate(self.app_names):
mock_app = dict(tags=','.join(self.tag_names[:i+1]), name=app_name)
self.mock_dict_apps['configured'].append(mock_app)
def tearDown(self):
pass
def test_get_tag_class(self):
ret_tag_str = t.get_tag_class(self.mock_object_apps['configured'][-1])
ret_tag_list = ret_tag_str.split(' ')
self.assertEqual(sorted(self.tag_classes), sorted(ret_tag_list))
def test_get_tag_class_dict(self):
ret_tag_str = t.get_tag_class(self.mock_dict_apps['configured'][-1])
ret_tag_list = ret_tag_str.split(' ')
self.assertEqual(sorted(self.tag_classes), sorted(ret_tag_list))
def test_get_tags_from_apps(self):
ret_tag_list = t.get_tags_from_apps(self.mock_object_apps)
self.assertEqual(sorted(self.tag_pairs), sorted(ret_tag_list))
def test_get_tags_from_apps_dict(self):
ret_tag_list = t.get_tags_from_apps(self.mock_dict_apps)
self.assertEqual(sorted(self.tag_pairs), sorted(ret_tag_list))
def test_get_tags_from_apps_object_disabled(self):
self.mock_object_apps['configured'].append(mock.MagicMock(tags='disabled', enabled=False))
ret_tag_list = t.get_tags_from_apps(self.mock_object_apps)
self.assertNotIn('disabled', ret_tag_list)
def test_get_tags_from_apps_dict_disabled(self):
self.mock_dict_apps['configured'].append({'tags': 'disabled', 'enabled': False})
ret_tag_list = t.get_tags_from_apps(self.mock_dict_apps)
self.assertNotIn('disabled', ret_tag_list)
def test_get_tags_from_apps_object_dont_show(self):
self.mock_object_apps['configured'].append(mock.MagicMock(tags='disabled', show_in_apps_library=False))
ret_tag_list = t.get_tags_from_apps(self.mock_object_apps)
self.assertNotIn('disabled', ret_tag_list)
def test_get_tags_from_apps_dict_dont_show(self):
self.mock_dict_apps['configured'].append({'tags': 'disabled', 'show_in_apps_library': False})
ret_tag_list = t.get_tags_from_apps(self.mock_dict_apps)
self.assertNotIn('disabled', ret_tag_list)
| 42.453333
| 111
| 0.661118
| 454
| 3,184
| 4.264317
| 0.138767
| 0.061983
| 0.082645
| 0.092975
| 0.845041
| 0.772727
| 0.756198
| 0.713843
| 0.713843
| 0.676653
| 0
| 0.01024
| 0.202575
| 3,184
| 74
| 112
| 43.027027
| 0.752265
| 0.011307
| 0
| 0.315789
| 0
| 0
| 0.115458
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 1
| 0.175439
| false
| 0.017544
| 0.052632
| 0
| 0.245614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
352e818885ca0c688b9130dd5f9cb1cbd205b2ef
| 2,412
|
py
|
Python
|
Semana 4/maximal_manhattan_distance/test_pytest_plane.py
|
juandausa/CompetitiveProgrammingCoreSkills
|
1ef0d902b6c05afba4b44c34ade87c376187fd1d
|
[
"MIT"
] | null | null | null |
Semana 4/maximal_manhattan_distance/test_pytest_plane.py
|
juandausa/CompetitiveProgrammingCoreSkills
|
1ef0d902b6c05afba4b44c34ade87c376187fd1d
|
[
"MIT"
] | null | null | null |
Semana 4/maximal_manhattan_distance/test_pytest_plane.py
|
juandausa/CompetitiveProgrammingCoreSkills
|
1ef0d902b6c05afba4b44c34ade87c376187fd1d
|
[
"MIT"
] | null | null | null |
from plane import Plane
def test_create():
assert Plane() is not None
assert Plane().points_count == 0
assert Plane().maximum_distance_between_points == 0
def test_add_first_point():
plane = Plane()
plane.add_point((1,2))
assert plane.points_count == 1
assert plane.maximum_distance_between_points == 0
assert plane.maximum_distance_points[0].point.x_coordinate == 1
assert plane.maximum_distance_points[0].point.y_coordinate == 2
assert plane.maximum_distance_points[1].point.x_coordinate == 1
assert plane.maximum_distance_points[1].point.y_coordinate == 2
def test_add_two_points():
plane = Plane()
plane.add_point((1,2))
plane.add_point((2,2))
assert plane.points_count == 2
assert plane.maximum_distance_between_points > 0
def test_statment_one():
'''
1 1 - 1 1
2 1 - 1 2
1 3 - 2 3
'''
plane = Plane()
plane.add_point((1, 1))
assert 1 in plane.get_points_with_maximal_manhattan_distance()
assert 1 in plane.get_points_with_maximal_manhattan_distance()
plane.add_point((2, 1))
assert 1 in plane.get_points_with_maximal_manhattan_distance()
assert 2 in plane.get_points_with_maximal_manhattan_distance()
plane.add_point((1, 3))
assert 3 in plane.get_points_with_maximal_manhattan_distance()
assert 2 in plane.get_points_with_maximal_manhattan_distance()
def test_statment_two():
'''
2 2 - 1 1
1 3 - 1 2
1 1 - 1 3
3 1 - 4 2
3 3 - 4 2
'''
plane = Plane()
plane.add_point((2, 2))
assert 1 in plane.get_points_with_maximal_manhattan_distance()
assert 1 in plane.get_points_with_maximal_manhattan_distance()
plane.add_point((1, 2))
assert 1 in plane.get_points_with_maximal_manhattan_distance()
assert 2 in plane.get_points_with_maximal_manhattan_distance()
plane.add_point((1, 1))
assert 1 in plane.get_points_with_maximal_manhattan_distance()
assert 3 in plane.get_points_with_maximal_manhattan_distance()
plane.add_point((3, 1))
assert 4 in plane.get_points_with_maximal_manhattan_distance()
assert 2 in plane.get_points_with_maximal_manhattan_distance()
plane.add_point((3, 3))
assert 3 in plane.get_points_with_maximal_manhattan_distance()
assert 5 in plane.get_points_with_maximal_manhattan_distance()
| 33.5
| 68
| 0.703151
| 361
| 2,412
| 4.34903
| 0.096953
| 0.071338
| 0.101911
| 0.163057
| 0.878344
| 0.842675
| 0.810191
| 0.703185
| 0.675159
| 0.552866
| 0
| 0.044025
| 0.208955
| 2,412
| 72
| 69
| 33.5
| 0.778826
| 0.034411
| 0
| 0.520833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5625
| 1
| 0.104167
| false
| 0
| 0.020833
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
10304265f5af402c674c84d620ba3eb990ff3010
| 198
|
py
|
Python
|
HiggsAnalysis/Skimming/python/higgsToWW2Leptons_OutputModule_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 6
|
2017-09-08T14:12:56.000Z
|
2022-03-09T23:57:01.000Z
|
HiggsAnalysis/Skimming/python/higgsToWW2Leptons_OutputModule_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 545
|
2017-09-19T17:10:19.000Z
|
2022-03-07T16:55:27.000Z
|
HiggsAnalysis/Skimming/python/higgsToWW2Leptons_OutputModule_cff.py
|
SWuchterl/cmssw
|
769b4a7ef81796579af7d626da6039dfa0347b8e
|
[
"Apache-2.0"
] | 14
|
2017-10-04T09:47:21.000Z
|
2019-10-23T18:04:45.000Z
|
import FWCore.ParameterSet.Config as cms
from HiggsAnalysis.Skimming.higgsToWW2LeptonsOutputModuleAODSIM_cfi import *
from HiggsAnalysis.Skimming.higgsToWW2LeptonsOutputModuleRECOSIM_cfi import *
| 33
| 77
| 0.888889
| 18
| 198
| 9.666667
| 0.666667
| 0.195402
| 0.287356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.070707
| 198
| 5
| 78
| 39.6
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
103630b52590bb5b6ef714050da184e2e3b0a7f9
| 151
|
py
|
Python
|
api/users/me/__init__.py
|
ezchat/backend
|
ac6ff93156ff5363084c356165f1e6d3e5684675
|
[
"Apache-2.0"
] | 3
|
2019-03-31T14:41:02.000Z
|
2019-04-05T06:45:59.000Z
|
api/users/me/__init__.py
|
ezchat/backend
|
ac6ff93156ff5363084c356165f1e6d3e5684675
|
[
"Apache-2.0"
] | 1
|
2021-01-29T15:50:20.000Z
|
2021-01-29T15:50:20.000Z
|
api/users/me/__init__.py
|
ezchat/backend
|
ac6ff93156ff5363084c356165f1e6d3e5684675
|
[
"Apache-2.0"
] | null | null | null |
# pylint: disable=no-name-in-module
from api.users.me.channels import UserMeChannels # noqa: F401
from api.users.me.index import UserMe # noqa: F401
| 37.75
| 62
| 0.768212
| 24
| 151
| 4.833333
| 0.708333
| 0.12069
| 0.206897
| 0.241379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.125828
| 151
| 3
| 63
| 50.333333
| 0.833333
| 0.364238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
109c12fe80febd4e5ebc90999283a314aaacea80
| 127
|
py
|
Python
|
molo/helpers/request_schemas.py
|
djvaroli/samsung_oct
|
83924a36d18a56b6cdaadffaf47a9218c7084264
|
[
"MIT"
] | 2
|
2021-07-04T16:34:08.000Z
|
2021-07-07T23:55:18.000Z
|
molo/helpers/request_schemas.py
|
janhavi-giri/samsung_oct
|
83924a36d18a56b6cdaadffaf47a9218c7084264
|
[
"MIT"
] | null | null | null |
molo/helpers/request_schemas.py
|
janhavi-giri/samsung_oct
|
83924a36d18a56b6cdaadffaf47a9218c7084264
|
[
"MIT"
] | 3
|
2021-07-10T01:14:00.000Z
|
2021-09-03T04:22:28.000Z
|
from typing import *
from pydantic import BaseModel
class GeneratePDFReportSchema(BaseModel):
predictionData: List[dict]
| 18.142857
| 41
| 0.80315
| 13
| 127
| 7.846154
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141732
| 127
| 7
| 42
| 18.142857
| 0.93578
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
529bb56fe77a311b3ebf4e61d8e2f29753ab9c57
| 11,099
|
py
|
Python
|
tests/test_cmd.py
|
KrishanBhasin/giraffez
|
53dbcb01c447d80109bbe94f7f0730e05abfcfbc
|
[
"Apache-2.0"
] | 122
|
2016-08-18T21:12:58.000Z
|
2021-11-24T14:45:19.000Z
|
tests/test_cmd.py
|
KrishanBhasin/giraffez
|
53dbcb01c447d80109bbe94f7f0730e05abfcfbc
|
[
"Apache-2.0"
] | 68
|
2016-08-31T18:19:16.000Z
|
2021-11-01T19:21:22.000Z
|
tests/test_cmd.py
|
KrishanBhasin/giraffez
|
53dbcb01c447d80109bbe94f7f0730e05abfcfbc
|
[
"Apache-2.0"
] | 44
|
2016-08-19T01:22:21.000Z
|
2022-03-23T17:39:40.000Z
|
# -*- coding: utf-8 -*-
import pytest
from giraffez._teradata import RequestEnded, StatementEnded, StatementInfoEnded
import giraffez
from giraffez.constants import *
from giraffez.errors import *
from giraffez.types import *
class ResultsHelper:
"""
Helps to emulate how exceptions are raised when working with the CLIv2 so
that the control flow will be adequately represented.
"""
def __init__(self, rows):
self.first = True
self.index = 0
self.rows = rows
def get(self):
if self.first:
self.first = False
raise StatementInfoEnded
if self.index >= len(self.rows):
raise RequestEnded
row = self.rows[self.index]
self.index += 1
return row
def __call__(self):
return self.get()
@pytest.mark.usefixtures('config', 'context')
class TestCmd(object):
def test_results(self, mocker):
connect_mock = mocker.patch('giraffez.cmd.TeradataCmd._connect')
mock_columns = mocker.patch("giraffez.cmd.Cursor._columns")
cmd = giraffez.Cmd()
query = "select * from db1.info"
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns.return_value = columns
rows = [
["value1", "value2", "value3"],
["value1", "value2", "value3"],
["value1", "value2", "value3"],
]
expected_rows = [
{"col1": "value1", "col2": "value2", "col3": "value3"},
{"col1": "value1", "col2": "value2", "col3": "value3"},
{"col1": "value1", "col2": "value2", "col3": "value3"},
]
cmd.cmd = mocker.MagicMock()
cmd.cmd.fetchone.side_effect = ResultsHelper(rows)
result = list(cmd.execute(query))
assert [x.items() for x in result] == expected_rows
cmd._close()
# This ensures that the config was proper mocked
connect_mock.assert_called_with('db1', 'user123', 'pass456', None, None)
def test_invalid_credentials(self, mocker):
connect_mock = mocker.patch('giraffez.cmd.TeradataCmd._connect')
connect_mock.side_effect = InvalidCredentialsError("test")
with pytest.raises(InvalidCredentialsError):
cmd = giraffez.Cmd(protect=True)
cmd._close()
@pytest.mark.usefixtures('config', 'context', 'tmpfiles')
class TestInsert(object):
def test_insert_from_file(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(100):
rows.append("|".join(["value1", "value2", "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
assert result.get('count') == 100
def test_insert_from_file_quoted(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(99):
rows.append("|".join(["value1", "value2", "value3"]))
rows.append("|".join(["value1",'"value2|withpipe"', "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
assert result.get('count') == 100
def test_insert_from_file_single_quoted(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(99):
rows.append("|".join(["value1", "value2", "value3"]))
rows.append("|".join(["value1","'value2|withpipe'", "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|", quotechar="'")
assert result.get('count') == 100
def test_insert_from_file_nonstandard_quote(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
rows = []
for i in range(99):
rows.append("|".join(["value1", "value2", "value3"]))
rows.append("|".join(['va"lue1','$value2|withpipe"and"quote$', "value3"]))
f.write("\n".join(rows))
with giraffez.Cmd() as cmd:
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|", quotechar="$")
assert result.get('count') == 100
def test_insert_from_file_error(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3", "value4"]))
f.write("\n")
with giraffez.Cmd() as cmd:
cmd.panic = False
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
def test_insert_from_file_error_panic(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3", "value4"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeEncodeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
def test_insert_from_file_invalid_header(self, mocker, tmpfiles):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
# Invalid column (blank string)
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3", "", ""]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
# Invalid column (wrong name)
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col4"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
# Too many columns (duplicate name)
with open(tmpfiles.load_file, 'w') as f:
f.write("|".join(["col1", "col2", "col3", "col3"]))
f.write("\n")
f.write("|".join(["value1", "value2", "value3"]))
f.write("\n")
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeEncodeError):
result = cmd.insert("db1.test", tmpfiles.load_file, delimiter="|")
print(result)
def test_insert_insert_no_specify_fields(self, mocker):
mock_connect = mocker.patch("giraffez.cmd.TeradataCmd._connect")
mock_execute = mocker.patch("giraffez.cmd.TeradataCmd.execute")
columns = Columns([
("col1", VARCHAR_NN, 50, 0, 0),
("col2", VARCHAR_N, 50, 0, 0),
("col3", VARCHAR_N, 50, 0, 0),
])
mock_columns = mocker.patch("giraffez.cmd.TeradataCmd.fetch_columns")
mock_columns.return_value = columns
rows = [
("value1", "value3"),
("value1", "value3"),
("value1", "value3"),
]
with giraffez.Cmd() as cmd:
with pytest.raises(GiraffeEncodeError):
cmd.insert("db1.test", rows)
| 34.902516
| 93
| 0.556987
| 1,251
| 11,099
| 4.808153
| 0.127098
| 0.071322
| 0.085287
| 0.098753
| 0.794347
| 0.778221
| 0.758603
| 0.748961
| 0.748961
| 0.73217
| 0
| 0.034852
| 0.283899
| 11,099
| 317
| 94
| 35.012618
| 0.721943
| 0.026038
| 0
| 0.735043
| 0
| 0
| 0.170962
| 0.087614
| 0
| 0
| 0
| 0
| 0.025641
| 1
| 0.055556
| false
| 0.004274
| 0.025641
| 0.004274
| 0.102564
| 0.017094
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
52b1e2625190a2475c8d8e6c4e8668c8a9ca285c
| 112
|
py
|
Python
|
iberzetsn/__init__.py
|
jwg4/iberzetsn
|
df40929317fa29c7185994d78121bc62a157a4e6
|
[
"MIT"
] | null | null | null |
iberzetsn/__init__.py
|
jwg4/iberzetsn
|
df40929317fa29c7185994d78121bc62a157a4e6
|
[
"MIT"
] | null | null | null |
iberzetsn/__init__.py
|
jwg4/iberzetsn
|
df40929317fa29c7185994d78121bc62a157a4e6
|
[
"MIT"
] | null | null | null |
from .to_yiddish import roman_to_yiddish # noqa: F401
from .from_yiddish import yiddish_to_roman # noqa: F401
| 37.333333
| 56
| 0.803571
| 18
| 112
| 4.666667
| 0.388889
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 0.142857
| 112
| 2
| 57
| 56
| 0.8125
| 0.1875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
52bc7f07f4cb439bdce041fcb9dacd6775cb9e6a
| 160
|
py
|
Python
|
_teaching/csci127-summer-2020/readings/activities/alternating.py
|
lgw2/lgw2.github.io
|
3e2b0fb849407c26a64afd8e97be0eff7ce07f9b
|
[
"MIT"
] | null | null | null |
_teaching/csci127-summer-2020/readings/activities/alternating.py
|
lgw2/lgw2.github.io
|
3e2b0fb849407c26a64afd8e97be0eff7ce07f9b
|
[
"MIT"
] | null | null | null |
_teaching/csci127-summer-2020/readings/activities/alternating.py
|
lgw2/lgw2.github.io
|
3e2b0fb849407c26a64afd8e97be0eff7ce07f9b
|
[
"MIT"
] | null | null | null |
def alternating(list_of_ints):
pass
print(alternating([1, 2, 3, 4]))
print(alternating([10, 11, 1, 12]))
print(alternating([10, 21, 22, -5, 100, 101, 2]))
| 22.857143
| 49
| 0.64375
| 27
| 160
| 3.740741
| 0.703704
| 0.475248
| 0.356436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.182482
| 0.14375
| 160
| 6
| 50
| 26.666667
| 0.554745
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0
| 0
| 0.2
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 6
|
52c9ae943139a0b2b589b4bca73282f80a6d9b0b
| 548
|
py
|
Python
|
xiaomi_thermo_unified/sensors/uuids.py
|
h4/xiaomi_thermo_unified
|
ee616d15375c9768ce6beb28fb71dc23f80e95ef
|
[
"MIT"
] | 1
|
2020-01-04T13:14:05.000Z
|
2020-01-04T13:14:05.000Z
|
xiaomi_thermo_unified/sensors/uuids.py
|
h4/xiaomi_thermo_unified
|
ee616d15375c9768ce6beb28fb71dc23f80e95ef
|
[
"MIT"
] | 1
|
2020-01-04T13:25:14.000Z
|
2020-01-13T15:32:25.000Z
|
xiaomi_thermo_unified/sensors/uuids.py
|
h4/xiaomi_thermo_unified
|
ee616d15375c9768ce6beb28fb71dc23f80e95ef
|
[
"MIT"
] | null | null | null |
DEVICE_NAME = '00002a00-0000-1000-8000-00805f9b34fb'
MODEL_NUMBER = '00002a24-0000-1000-8000-00805f9b34fb'
SERIAL_NUMBER = '00002a25-0000-1000-8000-00805f9b34fb'
FIRMWARE_VERSION = '00002a26-0000-1000-8000-00805f9b34fb'
HARDWARE_VERSION = '00002a27-0000-1000-8000-00805f9b34fb'
MANUFACTURER_NAME = '00002a29-0000-1000-8000-00805f9b34fb'
LYWSD02_DATA = 'EBE0CCC1-7A0A-4B0C-8A1A-6FF2997DA3A6'
CGG_DATA = '00000100-0000-1000-8000-00805f9b34fb'
MJHT_DATA = '00000001-0000-1000-8000-00805f9b34fb'
MJHT_BATTERY = '00002a19-0000-1000-8000-00805f9b34fb'
| 42.153846
| 58
| 0.813869
| 70
| 548
| 6.228571
| 0.457143
| 0.165138
| 0.247706
| 0.495413
| 0.12844
| 0
| 0
| 0
| 0
| 0
| 0
| 0.507752
| 0.058394
| 548
| 12
| 59
| 45.666667
| 0.337209
| 0
| 0
| 0
| 0
| 0
| 0.658135
| 0.658135
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
52d0599457b3a4d1e9c6fbeb28368bc8b3c86e8f
| 105
|
py
|
Python
|
terrascript/oneandone/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/oneandone/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/oneandone/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/oneandone/__init__.py
import terrascript
class oneandone(terrascript.Provider):
pass
| 15
| 38
| 0.8
| 11
| 105
| 7.272727
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12381
| 105
| 6
| 39
| 17.5
| 0.869565
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
52fa322693b734d7ba035d3e257737304a790258
| 4,625
|
py
|
Python
|
tests/integration/test_project_view.py
|
alexcfaber/katka-core
|
39a09f76b052f879ad5c0d53b74593c049930d5e
|
[
"Apache-2.0"
] | 1
|
2020-01-14T17:39:47.000Z
|
2020-01-14T17:39:47.000Z
|
tests/integration/test_project_view.py
|
alexcfaber/katka-core
|
39a09f76b052f879ad5c0d53b74593c049930d5e
|
[
"Apache-2.0"
] | 86
|
2019-02-01T14:22:16.000Z
|
2020-04-03T14:30:32.000Z
|
tests/integration/test_project_view.py
|
alexcfaber/katka-core
|
39a09f76b052f879ad5c0d53b74593c049930d5e
|
[
"Apache-2.0"
] | 5
|
2019-02-07T12:02:00.000Z
|
2020-03-19T15:57:35.000Z
|
from uuid import UUID
import pytest
from katka import models
@pytest.mark.django_db
class TestProjectViewSet:
def test_list(self, client, logged_in_user, my_team, my_project):
response = client.get("/projects/")
assert response.status_code == 200
parsed = response.json()
assert len(parsed) == 1
assert parsed[0]["name"] == "Project D"
assert parsed[0]["slug"] == "PRJD"
parsed_team = parsed[0]["team"]
assert UUID(parsed_team) == my_team.public_identifier
def test_filtered_list(self, client, logged_in_user, my_team, my_project, my_other_team, my_other_project):
response = client.get("/projects/?team=" + str(my_other_team.public_identifier))
assert response.status_code == 200
parsed = response.json()
assert len(parsed) == 1
assert parsed[0]["name"] == "Project 2"
assert parsed[0]["slug"] == "PRJ2"
parsed_team = parsed[0]["team"]
assert UUID(parsed_team) == my_other_team.public_identifier
def test_filtered_list_non_existing_team(
self, client, logged_in_user, my_team, my_project, my_other_team, my_other_project
):
response = client.get("/applications/?project=12345678-1234-5678-1234-567812345678")
assert response.status_code == 200
parsed = response.json()
assert len(parsed) == 0
def test_list_excludes_inactive(self, client, logged_in_user, my_team, deactivated_project):
response = client.get("/projects/")
assert response.status_code == 200
parsed = response.json()
assert len(parsed) == 0
def test_get(self, client, logged_in_user, my_team, my_project):
response = client.get(f"/projects/{my_project.public_identifier}/")
assert response.status_code == 200
parsed = response.json()
assert parsed["name"] == "Project D"
assert parsed["slug"] == "PRJD"
assert UUID(parsed["team"]) == my_team.public_identifier
def test_get_excludes_inactive(self, client, logged_in_user, my_team, deactivated_project):
response = client.get(f"/projects/{deactivated_project.public_identifier}/")
assert response.status_code == 404
def test_delete(self, client, logged_in_user, my_team, my_project):
response = client.delete(f"/projects/{my_project.public_identifier}/")
assert response.status_code == 204
p = models.Project.objects.get(pk=my_project.public_identifier)
assert p.deleted is True
def test_update(self, client, logged_in_user, my_team, my_project):
url = f"/projects/{my_project.public_identifier}/"
data = {"name": "Project X", "slug": "PRJX", "team": my_team.public_identifier}
response = client.put(url, data, content_type="application/json")
assert response.status_code == 200
p = models.Project.objects.get(pk=my_project.public_identifier)
assert p.name == "Project X"
def test_update_deactivated_team(self, client, logged_in_user, deactivated_team, my_project):
url = f"/projects/{my_project.public_identifier}/"
data = {"name": "Project X", "slug": "PRJX", "team": deactivated_team.public_identifier}
response = client.put(url, data, content_type="application/json")
assert response.status_code == 403
def test_update_nonexistent_team(self, client, logged_in_user, my_project):
url = f"/projects/{my_project.public_identifier}/"
data = {"name": "Project X", "slug": "PRJX", "team": "00000000-0000-0000-0000-000000000000"}
response = client.put(url, data, content_type="application/json")
assert response.status_code == 403
def test_partial_update(self, client, logged_in_user, my_team, my_project):
url = f"/projects/{my_project.public_identifier}/"
data = {"name": "Project X"}
response = client.patch(url, data, content_type="application/json")
assert response.status_code == 200
p = models.Project.objects.get(pk=my_project.public_identifier)
assert p.name == "Project X"
def test_create(self, client, logged_in_user, my_team, my_project):
before = models.Project.objects.count()
url = f"/projects/"
data = {"name": "Project X", "slug": "PRJX", "team": my_team.public_identifier}
response = client.post(url, data=data, content_type="application/json")
assert response.status_code == 201
p = models.Project.objects.get(name="Project X")
assert p.name == "Project X"
assert models.Project.objects.count() == before + 1
| 47.193878
| 111
| 0.671135
| 593
| 4,625
| 5
| 0.145025
| 0.057673
| 0.064755
| 0.07285
| 0.829342
| 0.794266
| 0.782462
| 0.740641
| 0.740641
| 0.709949
| 0
| 0.030707
| 0.204324
| 4,625
| 97
| 112
| 47.680412
| 0.775
| 0
| 0
| 0.445783
| 0
| 0
| 0.160216
| 0.084541
| 0
| 0
| 0
| 0
| 0.361446
| 1
| 0.144578
| false
| 0
| 0.036145
| 0
| 0.192771
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e124e51d51568f26db8aa86bb7811cbfa4f57a4
| 150
|
py
|
Python
|
infobip_channels/email/models/path_paramaters/get_domain_details.py
|
infobip-community/infobip-api-python-sdk
|
5ffc5ab877ee1748aa29391f991c8c5324387487
|
[
"MIT"
] | null | null | null |
infobip_channels/email/models/path_paramaters/get_domain_details.py
|
infobip-community/infobip-api-python-sdk
|
5ffc5ab877ee1748aa29391f991c8c5324387487
|
[
"MIT"
] | null | null | null |
infobip_channels/email/models/path_paramaters/get_domain_details.py
|
infobip-community/infobip-api-python-sdk
|
5ffc5ab877ee1748aa29391f991c8c5324387487
|
[
"MIT"
] | null | null | null |
from infobip_channels.email.models.path_paramaters.core import EmailPathParameter
class GetDomainDetailsPathParameter(EmailPathParameter):
pass
| 25
| 81
| 0.866667
| 14
| 150
| 9.142857
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086667
| 150
| 5
| 82
| 30
| 0.934307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
5e20287ebe482031710208263d611bf8e43e4162
| 25
|
py
|
Python
|
spvcm/both_levels/sma_se/__init__.py
|
weikang9009/spvcm
|
00ec35331e0e1a67bcd841a6b3761a23099617f7
|
[
"MIT"
] | 14
|
2017-05-21T08:29:08.000Z
|
2021-09-22T00:29:15.000Z
|
spvcm/both_levels/sma_se/__init__.py
|
weikang9009/spvcm
|
00ec35331e0e1a67bcd841a6b3761a23099617f7
|
[
"MIT"
] | 12
|
2018-05-11T11:13:21.000Z
|
2020-02-07T14:23:12.000Z
|
spvcm/both_levels/sma_se/__init__.py
|
weikang9009/spvcm
|
00ec35331e0e1a67bcd841a6b3761a23099617f7
|
[
"MIT"
] | 8
|
2017-05-20T00:55:40.000Z
|
2020-07-02T14:52:49.000Z
|
from .model import SMASE
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ead45be983e52c645d18fbf01f84a08a0f8df7ae
| 118
|
py
|
Python
|
polls/tests.py
|
davidefabbrico/Progetto_School
|
e32e345d154764725b96e2d22b441a17fae67ade
|
[
"MIT"
] | 1
|
2021-09-04T08:56:32.000Z
|
2021-09-04T08:56:32.000Z
|
polls/tests.py
|
davidefabbrico/Progetto_School
|
e32e345d154764725b96e2d22b441a17fae67ade
|
[
"MIT"
] | null | null | null |
polls/tests.py
|
davidefabbrico/Progetto_School
|
e32e345d154764725b96e2d22b441a17fae67ade
|
[
"MIT"
] | null | null | null |
import datetime
from django.utils import timezone
from django.test import TestCase
from django.urls import reverse
| 14.75
| 33
| 0.830508
| 17
| 118
| 5.764706
| 0.588235
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144068
| 118
| 7
| 34
| 16.857143
| 0.970297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d82b4ed437fa396ec59414e2c7a08bf15bd72bc0
| 26
|
py
|
Python
|
gadget/instrumentation/api/__init__.py
|
rlnsanz/inspectional-rara-parakeet
|
2c7919ed432616ec016a5afcd6718d16fa65e8af
|
[
"Apache-2.0"
] | 1
|
2018-05-28T12:20:29.000Z
|
2018-05-28T12:20:29.000Z
|
utils/__init__.py
|
system123/PyTorch_template
|
c07329245813bb7b084aa4ccac1c3f4bfe3f7015
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
system123/PyTorch_template
|
c07329245813bb7b084aa4ccac1c3f4bfe3f7015
|
[
"MIT"
] | 1
|
2021-06-25T16:06:59.000Z
|
2021-06-25T16:06:59.000Z
|
from .experiment import *
| 13
| 25
| 0.769231
| 3
| 26
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dc23d2897b68966c67df517328284fac0d5ba106
| 256
|
py
|
Python
|
restfly/__init__.py
|
AlainODea/restfly
|
817c4619a3b33f48529e949563c5bbb26bc8b351
|
[
"MIT"
] | 7
|
2021-05-05T20:20:12.000Z
|
2022-03-25T12:25:51.000Z
|
restfly/__init__.py
|
AlainODea/restfly
|
817c4619a3b33f48529e949563c5bbb26bc8b351
|
[
"MIT"
] | 6
|
2021-08-05T16:34:53.000Z
|
2022-03-30T16:56:41.000Z
|
restfly/__init__.py
|
AlainODea/restfly
|
817c4619a3b33f48529e949563c5bbb26bc8b351
|
[
"MIT"
] | 2
|
2022-01-12T19:38:08.000Z
|
2022-01-31T18:43:32.000Z
|
'''
RESTfly package
'''
from .version import VERSION as __version__ # noqa: F401
from .session import APISession # noqa: F401
from .endpoint import APIEndpoint # noqa: F401
from .iterator import APIIterator # noqa: F401
| 32
| 57
| 0.65625
| 28
| 256
| 5.857143
| 0.5
| 0.195122
| 0.219512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 0.273438
| 256
| 7
| 58
| 36.571429
| 0.817204
| 0.234375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dca4a050d696b49247957064e7d543c568027002
| 111
|
py
|
Python
|
slimevolleygym/__init__.py
|
mgoulao/2v2-Slime-Volleyball
|
7b9b2f345daba3e27e4fed3ed4dda9387b5761da
|
[
"Apache-2.0"
] | 2
|
2021-09-10T11:18:03.000Z
|
2021-09-13T18:46:39.000Z
|
slimevolleygym/__init__.py
|
mgoulao/2v2-Slime-Volleyball
|
7b9b2f345daba3e27e4fed3ed4dda9387b5761da
|
[
"Apache-2.0"
] | null | null | null |
slimevolleygym/__init__.py
|
mgoulao/2v2-Slime-Volleyball
|
7b9b2f345daba3e27e4fed3ed4dda9387b5761da
|
[
"Apache-2.0"
] | null | null | null |
import slimevolleygym.game_settings
import slimevolleygym.slimevolley
from slimevolleygym.slimevolley import *
| 27.75
| 40
| 0.891892
| 11
| 111
| 8.909091
| 0.545455
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 111
| 3
| 41
| 37
| 0.951456
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
dca7f63554b120ff84824fd41c00c9a79c88da04
| 30
|
py
|
Python
|
test/check-gradient.sikuli/check-gradient.py
|
jsmaniac/travis-os
|
b01673890e947f48b244b950d15feb2c7dd6c4df
|
[
"CC0-1.0"
] | null | null | null |
test/check-gradient.sikuli/check-gradient.py
|
jsmaniac/travis-os
|
b01673890e947f48b244b950d15feb2c7dd6c4df
|
[
"CC0-1.0"
] | 32
|
2018-07-27T21:34:54.000Z
|
2018-07-29T19:33:23.000Z
|
test/check-gradient.sikuli/check-gradient.py
|
jsmaniac/travis-os
|
b01673890e947f48b244b950d15feb2c7dd6c4df
|
[
"CC0-1.0"
] | 1
|
2021-10-02T16:16:11.000Z
|
2021-10-02T16:16:11.000Z
|
wait("1529963334209.png", 10)
| 15
| 29
| 0.733333
| 4
| 30
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.535714
| 0.066667
| 30
| 1
| 30
| 30
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0.566667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f4e88081fe3b9b58f848b7cca45f7f3f51a7290f
| 74
|
py
|
Python
|
environments/Gazebo/controller/__init__.py
|
zal/simenvbenchmark
|
269d953b50408411a87950aebfb6adc20afbbead
|
[
"MIT"
] | 9
|
2020-12-16T11:43:35.000Z
|
2022-03-04T18:28:35.000Z
|
environments/Gazebo/controller/__init__.py
|
zal/simenvbenchmark
|
269d953b50408411a87950aebfb6adc20afbbead
|
[
"MIT"
] | null | null | null |
environments/Gazebo/controller/__init__.py
|
zal/simenvbenchmark
|
269d953b50408411a87950aebfb6adc20afbbead
|
[
"MIT"
] | 1
|
2022-02-17T02:49:08.000Z
|
2022-02-17T02:49:08.000Z
|
from .robot_env import RobotEnv_gazebo
from .nnn_env import nnnEnv_gazebo
| 24.666667
| 38
| 0.864865
| 12
| 74
| 5
| 0.666667
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 74
| 2
| 39
| 37
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
874b31b580129c8d63b9238e5def8ba359007fd5
| 251
|
py
|
Python
|
binary_tree/tests/test_level_order_traversal.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
binary_tree/tests/test_level_order_traversal.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
binary_tree/tests/test_level_order_traversal.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | 3
|
2020-10-07T20:24:45.000Z
|
2020-12-16T04:53:19.000Z
|
from test_helpers.test_helpers import get_binary_tree
from binary_tree.level_order_traversal import level_order_traversal
def test_level_order_traversal():
tree = get_binary_tree()
assert level_order_traversal(tree) == [[1], [2, 3], [4, 5]]
| 31.375
| 67
| 0.780876
| 38
| 251
| 4.736842
| 0.447368
| 0.222222
| 0.422222
| 0.255556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.123506
| 251
| 7
| 68
| 35.857143
| 0.795455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8779e25f1cc4a979a682c0b5fb9e2833540fe6df
| 28
|
py
|
Python
|
network/__init__.py
|
K-Mahfoudh/Sign-Language-Translator
|
b75051846b98b5275e9b58ef7a87c6759d27dea9
|
[
"MIT"
] | 26
|
2021-12-02T01:33:33.000Z
|
2022-03-30T15:12:36.000Z
|
network/__init__.py
|
K-Mahfoudh/Sign-Language-Translator
|
b75051846b98b5275e9b58ef7a87c6759d27dea9
|
[
"MIT"
] | 3
|
2020-03-24T17:02:02.000Z
|
2021-02-02T22:00:53.000Z
|
network/__init__.py
|
K-Mahfoudh/Sign-Language-Translator
|
b75051846b98b5275e9b58ef7a87c6759d27dea9
|
[
"MIT"
] | 3
|
2021-12-15T04:07:45.000Z
|
2022-03-04T03:35:28.000Z
|
from .network import Network
| 28
| 28
| 0.857143
| 4
| 28
| 6
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 1
| 28
| 28
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5e53c7cc3559e015e7a5a79412e77c1da2c269cd
| 19,514
|
py
|
Python
|
database.py
|
Mdsid7/Student_Result_Analyzer_using_OOPS_and_Database
|
6c34c1e62bed2d5ff2ef4e9c824b54e63496b37d
|
[
"MIT"
] | 3
|
2020-11-14T15:02:59.000Z
|
2020-11-16T11:10:13.000Z
|
database.py
|
Mdsid7/Student_Result_Analyzer_using_OOPS_and_Database
|
6c34c1e62bed2d5ff2ef4e9c824b54e63496b37d
|
[
"MIT"
] | null | null | null |
database.py
|
Mdsid7/Student_Result_Analyzer_using_OOPS_and_Database
|
6c34c1e62bed2d5ff2ef4e9c824b54e63496b37d
|
[
"MIT"
] | null | null | null |
import mysql.connector
import string
mydb = mysql.connector.connect (
host="localhost",
user="root",
password="myfamily",
database="result"
)
mycursor=mydb.cursor()
def A_view():
mycursor.execute("SELECT * FROM A_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def B_view():
mycursor.execute("SELECT * FROM B_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def C_view():
mycursor.execute("SELECT * FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def results(USN,USN2,USN3):
sql = "SELECT * FROM A_section where USN=%s UNION SELECT * FROM B_section where USN=%s UNION SELECT * FROM C_section WHERE USN=%s"
adr =(USN,USN2,USN3,)
mycursor.execute(sql,adr,)
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def insert_A_sec(Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA):
sql = "insert into A_section(Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
val = (Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA,)
mycursor.execute(sql, val)
mydb.commit()
print(mycursor.rowcount, "record inserted Successfully.")
def insert_B_sec(Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA):
sql = "insert into B_section(Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
val = (Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA,)
mycursor.execute(sql, val)
mydb.commit()
print(mycursor.rowcount, "record inserted Successfully.")
def insert_C_sec(Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA):
sql = "insert into C_section(Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
val = (Name,USN,Class,Section,DBMS,ADA,OOP,DSA,OS,SE,Average,Grade,SGPA,)
mycursor.execute(sql, val)
mydb.commit()
print(mycursor.rowcount, "record inserted Successfully.")
def delete_A_sec(USN):
sql = "DELETE FROM A_section WHERE USN = %s"
adr = (USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) deleted")
def delete_B_sec(USN):
sql = "DELETE FROM B_section WHERE USN = %s"
adr = (USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) deleted")
def delete_C_sec(USN):
sql = "DELETE FROM C_section WHERE USN = %s"
adr = (USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) deleted")
def A_avg():
mycursor.execute("Select AVG(Average) FROM A_section")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage of A_section students is: "+ str(res))
def B_avg():
mycursor.execute("Select AVG(Average) FROM B_section")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage of B_section students is: "+ str(res))
def C_avg():
mycursor.execute("Select AVG(Average) FROM C_section")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage of C_section students is: "+ str(res))
def A_top():
mycursor.execute("Select * FROM A_section WHERE SGPA = (SELECT MAX(SGPA) FROM A_section)")
print("\n The Topper Of the Class is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a=mycursor.fetchall()
for x in a:
print(x)
def B_top():
mycursor.execute("Select * FROM B_section WHERE SGPA = (SELECT MAX(SGPA) FROM B_section)")
print("\n The Topper Of the Class is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a=mycursor.fetchall()
for x in a:
print(x)
def C_top():
mycursor.execute("Select * FROM C_section WHERE SGPA = (SELECT MAX(SGPA) FROM C_section)")
print("\n The Topper Of the Class is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a=mycursor.fetchall()
for x in a:
print(x)
def dbms_view():
mycursor.execute("SELECT Name,USN,Class,Section,DBMS FROM A_section UNION SELECT Name,USN,Class,Section,DBMS FROM B_section UNION SELECT Name,USN,Class,Section,DBMS FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def ada_view():
mycursor.execute("SELECT Name,USN,Class,Section,ADA FROM A_section UNION SELECT Name,USN,Class,Section,ADA FROM B_section UNION SELECT Name,USN,Class,Section,ADA FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def oop_view():
mycursor.execute("SELECT Name,USN,Class,Section,OOP FROM A_section UNION SELECT Name,USN,Class,Section,OOP FROM B_section UNION SELECT Name,USN,Class,Section,OOP FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def dsa_view():
mycursor.execute("SELECT Name,USN,Class,Section,DSA FROM A_section UNION SELECT Name,USN,Class,Section,DSA FROM B_section UNION SELECT Name,USN,Class,Section,DSA FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def os_view():
mycursor.execute("SELECT Name,USN,Class,Section,OS FROM A_section UNION SELECT Name,USN,Class,Section,OS FROM B_section UNION SELECT Name,USN,Class,Section,OS FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def se_view():
mycursor.execute("SELECT Name,USN,Class,Section,SE FROM A_section UNION SELECT Name,USN,Class,Section,SE FROM B_section UNION SELECT Name,USN,Class,Section,SE FROM C_section")
field_names = [i[0] for i in mycursor.description]
print(field_names)
myresult = mycursor.fetchall()
for x in myresult:
print(x)
def dbms_avg():
mycursor.execute("Select AVG(DBMS) FROM ( Select DBMS FROM A_section UNION ALL Select DBMS FROM B_section UNION ALL Select DBMS FROM C_section) s ")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage in DBMS is: "+ str(res))
def ada_avg():
mycursor.execute("Select AVG(ADA) FROM ( Select ADA FROM A_section UNION ALL Select ADA FROM B_section UNION ALL Select ADA FROM C_section) s ")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage in ADA is: "+ str(res))
def oop_avg():
mycursor.execute("Select AVG(OOP) FROM ( Select OOP FROM A_section UNION ALL Select OOP FROM B_section UNION ALL Select OOP FROM C_section) s ")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage in OOP is: "+ str(res))
def dsa_avg():
mycursor.execute("Select AVG(DSA) FROM ( Select DSA FROM A_section UNION ALL Select DSA FROM B_section UNION ALL Select DSA FROM C_section) s ")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage in DSA is: "+ str(res))
def os_avg():
mycursor.execute("Select AVG(OS) FROM ( Select OS FROM A_section UNION ALL Select OS FROM B_section UNION ALL Select OS FROM C_section) s ")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage in OS is: "+ str(res))
def se_avg():
mycursor.execute("Select AVG(SE) FROM ( Select SE FROM A_section UNION ALL Select SE FROM B_section UNION ALL Select SE FROM C_section) s ")
a = mycursor.fetchone()
res=float('.'.join(str(ele)for ele in a))
print("\nThe average percentage in SE is: "+ str(res))
def dbms_top():
mycursor.execute("Select Name,USN,Class,Section,DBMS FROM (SELECT Name,USN,Class,Section,DBMS FROM A_section WHERE DBMS = (SELECT MAX(DBMS) FROM A_section) UNION SELECT Name,USN,Class,Section,DBMS FROM B_section WHERE DBMS = (SELECT MAX(DBMS) FROM B_section) UNION SELECT Name,USN,Class,Section,DBMS FROM C_section WHERE DBMS = (SELECT MAX(DBMS) FROM C_section))s ORDER BY DBMS DESC LIMIT 1 ")
print("\n The Topper Of the DBMS is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a = mycursor.fetchall()
for x in a:
print(x)
def ada_top():
mycursor.execute("Select Name,USN,Class,Section,ADA FROM (SELECT Name,USN,Class,Section,ADA FROM A_section WHERE ADA = (SELECT MAX(ADA) FROM A_section) UNION SELECT Name,USN,Class,Section,ADA FROM B_section WHERE ADA = (SELECT MAX(ADA) FROM B_section) UNION SELECT Name,USN,Class,Section,ADA FROM C_section WHERE ADA = (SELECT MAX(ADA) FROM C_section))s ORDER BY ADA DESC LIMIT 1 ")
print("\n The Topper Of the ADA is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a = mycursor.fetchall()
for x in a:
print(x)
def oop_top():
mycursor.execute("Select Name,USN,Class,Section,OOP FROM (SELECT Name,USN,Class,Section,OOP FROM A_section WHERE OOP = (SELECT MAX(OOP) FROM A_section) UNION SELECT Name,USN,Class,Section,OOP FROM B_section WHERE OOP = (SELECT MAX(OOP) FROM B_section) UNION SELECT Name,USN,Class,Section,OOP FROM C_section WHERE OOP = (SELECT MAX(OOP) FROM C_section))s ORDER BY OOP DESC LIMIT 1 ")
print("\n The Topper Of the ADA is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a = mycursor.fetchall()
for x in a:
print(x)
def dsa_top():
mycursor.execute("Select Name,USN,Class,Section,DSA FROM (SELECT Name,USN,Class,Section,DSA FROM A_section WHERE DSA = (SELECT MAX(DSA) FROM A_section) UNION SELECT Name,USN,Class,Section,DSA FROM B_section WHERE DSA = (SELECT MAX(DSA) FROM B_section) UNION SELECT Name,USN,Class,Section,DSA FROM C_section WHERE DSA = (SELECT MAX(DSA) FROM C_section))s ORDER BY DSA DESC LIMIT 1 ")
print("\n The Topper Of the DSA is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a = mycursor.fetchall()
for x in a:
print(x)
def os_top():
mycursor.execute("Select Name,USN,Class,Section,OS FROM (SELECT Name,USN,Class,Section,OS FROM A_section WHERE OS = (SELECT MAX(OS) FROM A_section) UNION SELECT Name,USN,Class,Section,OS FROM B_section WHERE OS = (SELECT MAX(OS) FROM B_section) UNION SELECT Name,USN,Class,Section,OS FROM C_section WHERE OS = (SELECT MAX(OS) FROM C_section))s ORDER BY OS DESC LIMIT 1 ")
print("\n The Topper Of the OS is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a = mycursor.fetchall()
for x in a:
print(x)
def se_top():
mycursor.execute("Select Name,USN,Class,Section,SE FROM (SELECT Name,USN,Class,Section,SE FROM A_section WHERE SE = (SELECT MAX(SE) FROM A_section) UNION SELECT Name,USN,Class,Section,SE FROM B_section WHERE SE = (SELECT MAX(SE) FROM B_section) UNION SELECT Name,USN,Class,Section,SE FROM C_section WHERE SE = (SELECT MAX(SE) FROM C_section))s ORDER BY SE DESC LIMIT 1 ")
print("\n The Topper Of the SE is :\n")
field_names = [i[0] for i in mycursor.description]
print(field_names)
a = mycursor.fetchall()
for x in a:
print(x)
def update_A_sectionName(Name,USN):
sql = "UPDATE A_section set Name=%s WHERE USN=%s"
adr = (Name,USN,)
mycursor.execute(sql,adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionName(Name,USN):
sql = "UPDATE B_section set Name=%s WHERE USN = %s"
adr = (Name,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionName(Name,USN):
sql ="UPDATE C_section set Name = %s where USN=%s "
adr = (Name,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionUSN(USN,Name):
sql = "UPDATE A_section set USN = %s where Name=%s"
adr = (USN,Name,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionUSN(USN,Name):
sql = "UPDATE B_section set USN = %s where Name=%s"
adr = (USN,Name,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionUSN(USN,Name):
sql = "UPDATE C_section set USN = %s where Name=%s"
adr = (USN,Name,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionClass(Class,USN):
sql = "UPDATE A_section set Class = %s where USN=%s"
adr = (Class,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionClass(Class,USN):
sql = "UPDATE B_section set Class = %s where USN=%s"
adr = (Class,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionClass(Class,USN):
sql = "UPDATE C_section set Class = %s where USN =%s"
adr = (Class,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionSection(Section,USN):
sql = "UPDATE A_section set Section = %s where USN =%s"
adr = (Section,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionSection(Section,USN):
sql = "UPDATE B_section set Section = %s where USN =%s"
adr = (Section,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionSection(Section,USN):
sql = "UPDATE C_section SET Section = %s where USN=%s"
adr = (Section,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionDBMS(DBMS,USN):
sql = "UPDATE A_section SET DBMS = %s where USN=%s"
adr = (DBMS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionDBMS(DBMS,USN):
sql = "UPDATE B_section SET DBMS = %s where USN=%s"
adr = (DBMS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionDBMS(DBMS,USN):
sql = "UPDATE C_section set DBMS = %s where USN=%s"
adr = (DBMS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionADA(ADA,USN):
sql = "UPDATE A_section set ADA = %s where USN=%s"
adr = (ADA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionADA(ADA,USN):
sql = "UPDATE B_section set ADA = %s where USN=%s"
adr = (ADA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionADA(ADA,USN):
sql = "UPDATE C_section set ADA = %s where USN=%s"
adr = (ADA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionOOPS(OOPS,USN):
sql = "UPDATE A_section set OOPS = %s where USN=%s"
adr = (OOPS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionOOPS(OOPS,USN):
sql = "UPDATE B_section set OOPS = %s where USN=%s"
adr = (OOPS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionOOPS(OOPS,USN):
sql = "UPDATE C_section set OOPS = %s where USN=%s"
adr = (OOPS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionDSA(DSA,USN):
sql = "UPDATE A_section set DSA = %s where USN=%s"
adr = (DSA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionDSA(DSA,USN):
sql = "UPDATE B_section set DSA = %s where USN=%s"
adr = (DSA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionDSA(DSA,USN):
sql = "UPDATE C_section set DSA = %s where USN=%s"
adr = (DSA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionOS(OS,USN):
sql = "UPDATE A_section set OS = %s where USN =%s"
adr = (OS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionOS(OS,USN):
sql = "UPDATE B_section set OS = %s where USN =%s"
adr = (OS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionOS(OS,USN):
sql = "UPDATE C_section set OS = %s where USN =%s"
adr = (OS,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionSE(SE,USN):
sql = "UPDATE A_section set SE = %s where USN=%s"
adr = (SE,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionSE(SE,USN):
sql = "UPDATE B_section set SE = %s where USN=%s"
adr = (SE,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionSE(SE,USN):
sql = "UPDATE C_section set SE = %s where USN=%s"
adr = (SE,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_Average_A(Average,USN):
sql = "UPDATE A_section set Average = %s where USN=%s"
adr = (Average,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_Average_B(Average,USN):
sql = "UPDATE B_section set Average = %s where USN=%s"
adr = (Average,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_Average_C(Average,USN):
sql = "UPDATE C_section set Average = %s where USN=%s"
adr = (Average,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_Grade_A(Grade,USN):
sql = "UPDATE A_section set Grade = %s where USN=%s"
adr = (Grade,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_Grade_B(Grade,USN):
sql = "UPDATE B_section set Grade = %s where USN=%s"
adr = (Grade,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_Grade_C(Grade,USN):
sql = "UPDATE C_section set Grade = %s where USN=%s"
adr = (Grade,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_A_sectionSGPA(SGPA,USN):
sql = "UPDATE A_section set SGPA=%s where USN=%s"
adr = (SGPA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_B_sectionSGPA(SGPA,USN):
sql = "UPDATE B_section set SGPA=%s where USN=%s"
adr = (SGPA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
def update_C_sectionSGPA(SGPA,USN):
sql = "UPDATE C_section set SGPA=%s where USN=%s"
adr = (SGPA,USN,)
mycursor.execute(sql, adr)
mydb.commit()
print(mycursor.rowcount, "record(s) updated")
| 35.871324
| 395
| 0.707953
| 3,168
| 19,514
| 4.269255
| 0.03346
| 0.080961
| 0.04525
| 0.071645
| 0.962588
| 0.905656
| 0.820259
| 0.774787
| 0.769316
| 0.704251
| 0
| 0.001755
| 0.153121
| 19,514
| 543
| 396
| 35.937385
| 0.816653
| 0
| 0
| 0.636752
| 0
| 0.047009
| 0.411798
| 0.073288
| 0
| 0
| 0
| 0
| 0
| 1
| 0.155983
| false
| 0.002137
| 0.004274
| 0
| 0.160256
| 0.215812
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5e5da475beb924559aef41e56c9015cb3ad1f412
| 102
|
py
|
Python
|
beproud/django/commons/models/__init__.py
|
beproud/bpcommons
|
c24aed4143d743b1af6c621630ed9faa7e1ccaa4
|
[
"BSD-2-Clause"
] | 2
|
2016-03-07T01:52:12.000Z
|
2017-08-30T06:14:43.000Z
|
beproud/django/commons/models/__init__.py
|
beproud/bpcommons
|
c24aed4143d743b1af6c621630ed9faa7e1ccaa4
|
[
"BSD-2-Clause"
] | 18
|
2015-03-08T13:52:18.000Z
|
2022-01-25T02:46:09.000Z
|
beproud/django/commons/models/__init__.py
|
beproud/bpcommons
|
c24aed4143d743b1af6c621630ed9faa7e1ccaa4
|
[
"BSD-2-Clause"
] | 2
|
2015-02-07T01:33:00.000Z
|
2015-09-08T14:57:44.000Z
|
from __future__ import absolute_import
from .base import *
from .fields import *
from .utils import *
| 20.4
| 38
| 0.784314
| 14
| 102
| 5.357143
| 0.5
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156863
| 102
| 4
| 39
| 25.5
| 0.872093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0d6b608e96b21d80cef0566aab86ebdff3ea30ad
| 36
|
py
|
Python
|
prediction/FaceDetectMicroservice/secrets.py
|
anishapai/CombinedTechStack
|
3152b424445b9dce09932c2f2ae313ff7a02de5c
|
[
"MIT"
] | 1
|
2021-09-02T01:44:36.000Z
|
2021-09-02T01:44:36.000Z
|
prediction/SceneDetectMicroservice-V2/secrets.py
|
anishapai/CombinedTechStack
|
3152b424445b9dce09932c2f2ae313ff7a02de5c
|
[
"MIT"
] | null | null | null |
prediction/SceneDetectMicroservice-V2/secrets.py
|
anishapai/CombinedTechStack
|
3152b424445b9dce09932c2f2ae313ff7a02de5c
|
[
"MIT"
] | null | null | null |
API_KEY = 'paste_your_api_key_here'
| 18
| 35
| 0.833333
| 7
| 36
| 3.571429
| 0.714286
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.757576
| 0
| 0
| 0
| 0
| 0
| 0.638889
| 0.638889
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0d881d7e4c671cbea56d5aa2b2d26b144b44a0a4
| 537
|
py
|
Python
|
{{cookiecutter.project_slug}}/tasks.py
|
i2biz/cookiecutter-python-pylint
|
ba07cd620d982cb11161fee808025257176c62c3
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/tasks.py
|
i2biz/cookiecutter-python-pylint
|
ba07cd620d982cb11161fee808025257176c62c3
|
[
"MIT"
] | null | null | null |
{{cookiecutter.project_slug}}/tasks.py
|
i2biz/cookiecutter-python-pylint
|
ba07cd620d982cb11161fee808025257176c62c3
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from invoke import task
@task
def style(ctx):
ctx.run(
"black --check {{cookiecutter.project_slug}} {{cookiecutter.project_slug}}_test"
)
@task
def lint(ctx):
ctx.run(
"pylint {{cookiecutter.project_slug}} {{cookiecutter.project_slug}}_test -r n"
)
@task
def test(ctx):
ctx.run(
"py.test -v --cov {{cookiecutter.project_slug}} --cov-report=html --cov-report=term-missing {{cookiecutter.project_slug}}_test"
)
@task(pre=[test, style, lint])
def check(ctx):
pass
| 17.9
| 135
| 0.64432
| 71
| 537
| 4.746479
| 0.422535
| 0.338279
| 0.409496
| 0.240356
| 0.400593
| 0.296736
| 0.296736
| 0
| 0
| 0
| 0
| 0.002309
| 0.193669
| 537
| 29
| 136
| 18.517241
| 0.775982
| 0.022346
| 0
| 0.315789
| 0
| 0.052632
| 0.533461
| 0.409178
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0.052632
| 0.052632
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
0d8baebf054454864ab0af3c6388d7fdcb748457
| 48
|
py
|
Python
|
api/services/__init__.py
|
adkl/etf_online_competition_system
|
c857ce78aec65c1d31f48ae8b8dc6facdb022a0c
|
[
"MIT"
] | null | null | null |
api/services/__init__.py
|
adkl/etf_online_competition_system
|
c857ce78aec65c1d31f48ae8b8dc6facdb022a0c
|
[
"MIT"
] | null | null | null |
api/services/__init__.py
|
adkl/etf_online_competition_system
|
c857ce78aec65c1d31f48ae8b8dc6facdb022a0c
|
[
"MIT"
] | null | null | null |
from .etf_oracle_service import EtfOracleService
| 48
| 48
| 0.916667
| 6
| 48
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 48
| 1
| 48
| 48
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0da734c0977b1bf2b39739fd86c1f16981157d5f
| 38
|
py
|
Python
|
test/01.py
|
vkpedia/test1
|
1404149e62e9eb871701895fc20f0fb16cce039a
|
[
"MIT"
] | null | null | null |
test/01.py
|
vkpedia/test1
|
1404149e62e9eb871701895fc20f0fb16cce039a
|
[
"MIT"
] | null | null | null |
test/01.py
|
vkpedia/test1
|
1404149e62e9eb871701895fc20f0fb16cce039a
|
[
"MIT"
] | null | null | null |
import numpy as np
print(np.ones(10))
| 12.666667
| 18
| 0.736842
| 8
| 38
| 3.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.131579
| 38
| 3
| 19
| 12.666667
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
0dcec720a5bc82ea144347b5569b77de0db3f176
| 31
|
py
|
Python
|
build/lib/ezyt/imageEditor/__init__.py
|
AlexBacho/ezyt
|
5d9526d7901270ed5d97adf73ed0df9b21bf0387
|
[
"MIT"
] | 1
|
2020-10-21T09:28:56.000Z
|
2020-10-21T09:28:56.000Z
|
build/lib/ezyt/imageEditor/__init__.py
|
AlexBacho/ezyt
|
5d9526d7901270ed5d97adf73ed0df9b21bf0387
|
[
"MIT"
] | null | null | null |
build/lib/ezyt/imageEditor/__init__.py
|
AlexBacho/ezyt
|
5d9526d7901270ed5d97adf73ed0df9b21bf0387
|
[
"MIT"
] | null | null | null |
from .editor import ImageEditor
| 31
| 31
| 0.870968
| 4
| 31
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 31
| 1
| 31
| 31
| 0.964286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
21a24f03b140642ff458b24a8d83a2e1b29f3531
| 36,263
|
py
|
Python
|
migrations/versions/03f014e8d128_added_basic_prefix.py
|
charlestondance/amoslims
|
c1d051db3e88a92644446744a9027c5699f52b02
|
[
"MIT"
] | null | null | null |
migrations/versions/03f014e8d128_added_basic_prefix.py
|
charlestondance/amoslims
|
c1d051db3e88a92644446744a9027c5699f52b02
|
[
"MIT"
] | 7
|
2020-03-24T15:56:29.000Z
|
2022-01-13T00:48:15.000Z
|
migrations/versions/03f014e8d128_added_basic_prefix.py
|
charlestondance/amoslims
|
c1d051db3e88a92644446744a9027c5699f52b02
|
[
"MIT"
] | null | null | null |
"""added basic prefix
Revision ID: 03f014e8d128
Revises: fed23b3ce53a
Create Date: 2017-01-15 10:25:37.768783
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '03f014e8d128'
down_revision = 'fed23b3ce53a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('basic_buffer_plate_wells',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('buffer_plate_well_id', sa.String(length=64), nullable=True),
sa.Column('loading_volume', sa.Float(), nullable=True),
sa.Column('buffer_plate_barcode', sa.String(length=64), nullable=True),
sa.Column('buffer_plate_number', sa.Integer(), nullable=True),
sa.Column('buffer_name', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_buffer_plate_wells_buffer_name'), 'basic_buffer_plate_wells', ['buffer_name'], unique=False)
op.create_index(op.f('ix_basic_buffer_plate_wells_buffer_plate_barcode'), 'basic_buffer_plate_wells', ['buffer_plate_barcode'], unique=False)
op.create_index(op.f('ix_basic_buffer_plate_wells_buffer_plate_number'), 'basic_buffer_plate_wells', ['buffer_plate_number'], unique=False)
op.create_index(op.f('ix_basic_buffer_plate_wells_buffer_plate_well_id'), 'basic_buffer_plate_wells', ['buffer_plate_well_id'], unique=False)
op.create_index(op.f('ix_basic_buffer_plate_wells_loading_volume'), 'basic_buffer_plate_wells', ['loading_volume'], unique=False)
op.create_index(op.f('ix_basic_buffer_plate_wells_unique_job_id'), 'basic_buffer_plate_wells', ['unique_job_id'], unique=False)
op.create_table('basic_clip_enzyme',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('clip_well_id', sa.String(length=64), nullable=True),
sa.Column('clip_barcode', sa.String(length=64), nullable=True),
sa.Column('concatenated_clip_id', sa.String(length=64), nullable=True),
sa.Column('clip_number', sa.Integer(), nullable=True),
sa.Column('clip_batch_number', sa.Integer(), nullable=True),
sa.Column('clip_plate_number', sa.Integer(), nullable=True),
sa.Column('transfer_volume', sa.Integer(), nullable=True),
sa.Column('enzyme_plate_barcode', sa.String(length=64), nullable=True),
sa.Column('enzyme_plate_well_id', sa.String(length=64), nullable=True),
sa.Column('enzyme_plate_number', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_clip_enzyme_clip_barcode'), 'basic_clip_enzyme', ['clip_barcode'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_clip_batch_number'), 'basic_clip_enzyme', ['clip_batch_number'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_clip_number'), 'basic_clip_enzyme', ['clip_number'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_clip_plate_number'), 'basic_clip_enzyme', ['clip_plate_number'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_clip_well_id'), 'basic_clip_enzyme', ['clip_well_id'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_concatenated_clip_id'), 'basic_clip_enzyme', ['concatenated_clip_id'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_enzyme_plate_barcode'), 'basic_clip_enzyme', ['enzyme_plate_barcode'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_enzyme_plate_number'), 'basic_clip_enzyme', ['enzyme_plate_number'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_enzyme_plate_well_id'), 'basic_clip_enzyme', ['enzyme_plate_well_id'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_transfer_volume'), 'basic_clip_enzyme', ['transfer_volume'], unique=False)
op.create_index(op.f('ix_basic_clip_enzyme_unique_job_id'), 'basic_clip_enzyme', ['unique_job_id'], unique=False)
op.create_table('basic_clip_list',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('part_id', sa.String(length=64), nullable=True),
sa.Column('concatenated_part_id', sa.String(length=64), nullable=True),
sa.Column('job_master_well_id', sa.String(length=64), nullable=True),
sa.Column('job_master_barcode', sa.String(length=64), nullable=True),
sa.Column('part_id_sample_number', sa.Integer(), nullable=True),
sa.Column('clip_well_id', sa.String(length=64), nullable=True),
sa.Column('clip_barcode', sa.String(length=64), nullable=True),
sa.Column('concatenated_clip_id', sa.String(length=64), nullable=True),
sa.Column('clip_number', sa.Integer(), nullable=True),
sa.Column('clip_batch_number', sa.Integer(), nullable=True),
sa.Column('destination_plate_number', sa.Integer(), nullable=True),
sa.Column('transfer_volume', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_clip_list_clip_barcode'), 'basic_clip_list', ['clip_barcode'], unique=False)
op.create_index(op.f('ix_basic_clip_list_clip_batch_number'), 'basic_clip_list', ['clip_batch_number'], unique=False)
op.create_index(op.f('ix_basic_clip_list_clip_number'), 'basic_clip_list', ['clip_number'], unique=False)
op.create_index(op.f('ix_basic_clip_list_clip_well_id'), 'basic_clip_list', ['clip_well_id'], unique=False)
op.create_index(op.f('ix_basic_clip_list_concatenated_clip_id'), 'basic_clip_list', ['concatenated_clip_id'], unique=False)
op.create_index(op.f('ix_basic_clip_list_concatenated_part_id'), 'basic_clip_list', ['concatenated_part_id'], unique=False)
op.create_index(op.f('ix_basic_clip_list_destination_plate_number'), 'basic_clip_list', ['destination_plate_number'], unique=False)
op.create_index(op.f('ix_basic_clip_list_job_master_barcode'), 'basic_clip_list', ['job_master_barcode'], unique=False)
op.create_index(op.f('ix_basic_clip_list_job_master_well_id'), 'basic_clip_list', ['job_master_well_id'], unique=False)
op.create_index(op.f('ix_basic_clip_list_part_id'), 'basic_clip_list', ['part_id'], unique=False)
op.create_index(op.f('ix_basic_clip_list_part_id_sample_number'), 'basic_clip_list', ['part_id_sample_number'], unique=False)
op.create_index(op.f('ix_basic_clip_list_transfer_volume'), 'basic_clip_list', ['transfer_volume'], unique=False)
op.create_index(op.f('ix_basic_clip_list_unique_job_id'), 'basic_clip_list', ['unique_job_id'], unique=False)
op.create_table('basic_clip_qc_fragment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('clip_well_id', sa.String(length=64), nullable=True),
sa.Column('clip_barcode', sa.String(length=64), nullable=True),
sa.Column('concatenated_clip_id', sa.String(length=64), nullable=True),
sa.Column('clip_number', sa.Integer(), nullable=True),
sa.Column('clip_batch_number', sa.Integer(), nullable=True),
sa.Column('clip_plate_number', sa.Integer(), nullable=True),
sa.Column('well_id_96', sa.String(length=64), nullable=True),
sa.Column('well_id_384', sa.String(length=64), nullable=True),
sa.Column('clip_qc_plate_number', sa.Integer(), nullable=True),
sa.Column('clip_qc_barcode', sa.String(length=64), nullable=True),
sa.Column('well_number_96', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_barcode'), 'basic_clip_qc_fragment', ['clip_barcode'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_batch_number'), 'basic_clip_qc_fragment', ['clip_batch_number'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_number'), 'basic_clip_qc_fragment', ['clip_number'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_plate_number'), 'basic_clip_qc_fragment', ['clip_plate_number'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_qc_barcode'), 'basic_clip_qc_fragment', ['clip_qc_barcode'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_qc_plate_number'), 'basic_clip_qc_fragment', ['clip_qc_plate_number'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_clip_well_id'), 'basic_clip_qc_fragment', ['clip_well_id'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_concatenated_clip_id'), 'basic_clip_qc_fragment', ['concatenated_clip_id'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_unique_job_id'), 'basic_clip_qc_fragment', ['unique_job_id'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_well_id_384'), 'basic_clip_qc_fragment', ['well_id_384'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_well_id_96'), 'basic_clip_qc_fragment', ['well_id_96'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_fragment_well_number_96'), 'basic_clip_qc_fragment', ['well_number_96'], unique=False)
op.create_table('basic_clip_qc_plates_per_job',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('number_of_clip_plates', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_clip_qc_plates_per_job_number_of_clip_plates'), 'basic_clip_qc_plates_per_job', ['number_of_clip_plates'], unique=False)
op.create_index(op.f('ix_basic_clip_qc_plates_per_job_unique_job_id'), 'basic_clip_qc_plates_per_job', ['unique_job_id'], unique=False)
op.create_table('basic_job_master',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('part_id', sa.String(length=64), nullable=True),
sa.Column('job_master_well_id', sa.String(length=64), nullable=True),
sa.Column('job_master_barcode', sa.String(length=64), nullable=True),
sa.Column('sample_number', sa.Integer(), nullable=True),
sa.Column('uploaded_filename', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_job_master_job_master_barcode'), 'basic_job_master', ['job_master_barcode'], unique=False)
op.create_index(op.f('ix_basic_job_master_job_master_well_id'), 'basic_job_master', ['job_master_well_id'], unique=False)
op.create_index(op.f('ix_basic_job_master_part_id'), 'basic_job_master', ['part_id'], unique=False)
op.create_index(op.f('ix_basic_job_master_sample_number'), 'basic_job_master', ['sample_number'], unique=False)
op.create_index(op.f('ix_basic_job_master_unique_job_id'), 'basic_job_master', ['unique_job_id'], unique=False)
op.create_index(op.f('ix_basic_job_master_uploaded_filename'), 'basic_job_master', ['uploaded_filename'], unique=False)
op.create_table('basic_stitch_buffer',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('stitch_well_id', sa.String(length=64), nullable=True),
sa.Column('stitch_barcode', sa.String(length=64), nullable=True),
sa.Column('stitch_id', sa.String(length=64), nullable=True),
sa.Column('transfer_volume', sa.Integer(), nullable=True),
sa.Column('enzyme_plate_barcode', sa.String(length=64), nullable=True),
sa.Column('enzyme_plate_well_id', sa.String(length=64), nullable=True),
sa.Column('enzyme_plate_number', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_stitch_buffer_enzyme_plate_barcode'), 'basic_stitch_buffer', ['enzyme_plate_barcode'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_enzyme_plate_number'), 'basic_stitch_buffer', ['enzyme_plate_number'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_enzyme_plate_well_id'), 'basic_stitch_buffer', ['enzyme_plate_well_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_stitch_barcode'), 'basic_stitch_buffer', ['stitch_barcode'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_stitch_id'), 'basic_stitch_buffer', ['stitch_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_stitch_well_id'), 'basic_stitch_buffer', ['stitch_well_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_transfer_volume'), 'basic_stitch_buffer', ['transfer_volume'], unique=False)
op.create_index(op.f('ix_basic_stitch_buffer_unique_job_id'), 'basic_stitch_buffer', ['unique_job_id'], unique=False)
op.create_table('basic_stitch_list',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('stitch_id', sa.String(length=64), nullable=True),
sa.Column('clip_number', sa.Integer(), nullable=True),
sa.Column('clip_batch_number', sa.Integer(), nullable=True),
sa.Column('concatenated_clip_id', sa.String(length=64), nullable=True),
sa.Column('clip_well_id', sa.String(length=64), nullable=True),
sa.Column('clip_barcode', sa.String(length=64), nullable=True),
sa.Column('stitch_well_id', sa.String(length=64), nullable=True),
sa.Column('stitch_plate_barcode', sa.String(length=64), nullable=True),
sa.Column('stitch_plate_number', sa.Integer(), nullable=True),
sa.Column('transfer_volume', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_stitch_list_clip_barcode'), 'basic_stitch_list', ['clip_barcode'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_clip_batch_number'), 'basic_stitch_list', ['clip_batch_number'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_clip_number'), 'basic_stitch_list', ['clip_number'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_clip_well_id'), 'basic_stitch_list', ['clip_well_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_concatenated_clip_id'), 'basic_stitch_list', ['concatenated_clip_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_stitch_id'), 'basic_stitch_list', ['stitch_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_stitch_plate_barcode'), 'basic_stitch_list', ['stitch_plate_barcode'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_stitch_plate_number'), 'basic_stitch_list', ['stitch_plate_number'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_stitch_well_id'), 'basic_stitch_list', ['stitch_well_id'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_transfer_volume'), 'basic_stitch_list', ['transfer_volume'], unique=False)
op.create_index(op.f('ix_basic_stitch_list_unique_job_id'), 'basic_stitch_list', ['unique_job_id'], unique=False)
op.create_table('basic_unique_clip',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('clip_number', sa.Integer(), nullable=True),
sa.Column('part_1_id', sa.String(length=64), nullable=True),
sa.Column('part_2_id', sa.String(length=64), nullable=True),
sa.Column('part_3_id', sa.String(length=64), nullable=True),
sa.Column('number_of_times_used', sa.Integer(), nullable=True),
sa.Column('clip_batches_required', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_unique_clip_clip_batches_required'), 'basic_unique_clip', ['clip_batches_required'], unique=False)
op.create_index(op.f('ix_basic_unique_clip_clip_number'), 'basic_unique_clip', ['clip_number'], unique=False)
op.create_index(op.f('ix_basic_unique_clip_number_of_times_used'), 'basic_unique_clip', ['number_of_times_used'], unique=False)
op.create_index(op.f('ix_basic_unique_clip_part_1_id'), 'basic_unique_clip', ['part_1_id'], unique=False)
op.create_index(op.f('ix_basic_unique_clip_part_2_id'), 'basic_unique_clip', ['part_2_id'], unique=False)
op.create_index(op.f('ix_basic_unique_clip_part_3_id'), 'basic_unique_clip', ['part_3_id'], unique=False)
op.create_index(op.f('ix_basic_unique_clip_unique_job_id'), 'basic_unique_clip', ['unique_job_id'], unique=False)
op.create_table('basic_unique_part',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('unique_job_id', sa.String(length=64), nullable=True),
sa.Column('part_id', sa.String(length=64), nullable=True),
sa.Column('samples_required', sa.Integer(), nullable=True),
sa.Column('number_of_times_used', sa.Integer(), nullable=True),
sa.Column('total_volume_required', sa.Integer(), nullable=True),
sa.Column('volume_per_part', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_basic_unique_part_number_of_times_used'), 'basic_unique_part', ['number_of_times_used'], unique=False)
op.create_index(op.f('ix_basic_unique_part_part_id'), 'basic_unique_part', ['part_id'], unique=False)
op.create_index(op.f('ix_basic_unique_part_samples_required'), 'basic_unique_part', ['samples_required'], unique=False)
op.create_index(op.f('ix_basic_unique_part_total_volume_required'), 'basic_unique_part', ['total_volume_required'], unique=False)
op.create_index(op.f('ix_basic_unique_part_unique_job_id'), 'basic_unique_part', ['unique_job_id'], unique=False)
op.create_index(op.f('ix_basic_unique_part_volume_per_part'), 'basic_unique_part', ['volume_per_part'], unique=False)
op.create_table('job_types',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('job_type', sa.String(length=64), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_job_types_job_type'), 'job_types', ['job_type'], unique=True)
op.drop_table('clip_qc_plates_per_job')
op.drop_table('stitch_buffer')
op.drop_table('clip_qc_fragment')
op.drop_table('buffer_plate_wells')
op.drop_table('clip_enzyme')
op.drop_table('unique_part')
op.drop_table('unique_clip')
op.drop_table('job_master')
op.drop_table('clip_list')
op.drop_table('stitch_list')
op.add_column('job_table', sa.Column('job_type', sa.String(length=64), nullable=True))
op.create_index(op.f('ix_job_table_job_type'), 'job_table', ['job_type'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_job_table_job_type'), table_name='job_table')
op.drop_column('job_table', 'job_type')
op.create_table('stitch_list',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_batch_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('concatenated_clip_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_plate_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_plate_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('transfer_volume', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='stitch_list_pkey')
)
op.create_table('clip_list',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('part_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('concatenated_part_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('job_master_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('job_master_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('part_id_sample_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('concatenated_clip_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_batch_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('destination_plate_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('transfer_volume', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='clip_list_pkey')
)
op.create_table('job_master',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('part_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('job_master_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('job_master_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('sample_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('uploaded_filename', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='job_master_pkey')
)
op.create_table('unique_clip',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('part_1_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('part_2_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('part_3_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('number_of_times_used', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_batches_required', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='unique_clip_pkey')
)
op.create_table('unique_part',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('part_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('samples_required', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('number_of_times_used', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('total_volume_required', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('volume_per_part', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='unique_part_pkey')
)
op.create_table('clip_enzyme',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('concatenated_clip_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_batch_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_plate_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('transfer_volume', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('enzyme_plate_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('enzyme_plate_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('enzyme_plate_number', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='clip_enzyme_pkey')
)
op.create_table('buffer_plate_wells',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('buffer_plate_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('loading_volume', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('buffer_plate_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('buffer_plate_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('buffer_name', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='buffer_plate_wells_pkey')
)
op.create_table('clip_qc_fragment',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('concatenated_clip_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_batch_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_plate_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('well_id_96', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('well_id_384', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('clip_qc_plate_number', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('clip_qc_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('well_number_96', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='clip_qc_fragment_pkey')
)
op.create_table('stitch_buffer',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('stitch_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('transfer_volume', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('enzyme_plate_barcode', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('enzyme_plate_well_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('enzyme_plate_number', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='stitch_buffer_pkey')
)
op.create_table('clip_qc_plates_per_job',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('unique_job_id', sa.VARCHAR(length=64), autoincrement=False, nullable=True),
sa.Column('number_of_clip_plates', sa.INTEGER(), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='clip_qc_plates_per_job_pkey')
)
op.drop_index(op.f('ix_job_types_job_type'), table_name='job_types')
op.drop_table('job_types')
op.drop_index(op.f('ix_basic_unique_part_volume_per_part'), table_name='basic_unique_part')
op.drop_index(op.f('ix_basic_unique_part_unique_job_id'), table_name='basic_unique_part')
op.drop_index(op.f('ix_basic_unique_part_total_volume_required'), table_name='basic_unique_part')
op.drop_index(op.f('ix_basic_unique_part_samples_required'), table_name='basic_unique_part')
op.drop_index(op.f('ix_basic_unique_part_part_id'), table_name='basic_unique_part')
op.drop_index(op.f('ix_basic_unique_part_number_of_times_used'), table_name='basic_unique_part')
op.drop_table('basic_unique_part')
op.drop_index(op.f('ix_basic_unique_clip_unique_job_id'), table_name='basic_unique_clip')
op.drop_index(op.f('ix_basic_unique_clip_part_3_id'), table_name='basic_unique_clip')
op.drop_index(op.f('ix_basic_unique_clip_part_2_id'), table_name='basic_unique_clip')
op.drop_index(op.f('ix_basic_unique_clip_part_1_id'), table_name='basic_unique_clip')
op.drop_index(op.f('ix_basic_unique_clip_number_of_times_used'), table_name='basic_unique_clip')
op.drop_index(op.f('ix_basic_unique_clip_clip_number'), table_name='basic_unique_clip')
op.drop_index(op.f('ix_basic_unique_clip_clip_batches_required'), table_name='basic_unique_clip')
op.drop_table('basic_unique_clip')
op.drop_index(op.f('ix_basic_stitch_list_unique_job_id'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_transfer_volume'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_stitch_well_id'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_stitch_plate_number'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_stitch_plate_barcode'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_stitch_id'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_concatenated_clip_id'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_clip_well_id'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_clip_number'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_clip_batch_number'), table_name='basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_list_clip_barcode'), table_name='basic_stitch_list')
op.drop_table('basic_stitch_list')
op.drop_index(op.f('ix_basic_stitch_buffer_unique_job_id'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_transfer_volume'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_stitch_well_id'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_stitch_id'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_stitch_barcode'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_enzyme_plate_well_id'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_enzyme_plate_number'), table_name='basic_stitch_buffer')
op.drop_index(op.f('ix_basic_stitch_buffer_enzyme_plate_barcode'), table_name='basic_stitch_buffer')
op.drop_table('basic_stitch_buffer')
op.drop_index(op.f('ix_basic_job_master_uploaded_filename'), table_name='basic_job_master')
op.drop_index(op.f('ix_basic_job_master_unique_job_id'), table_name='basic_job_master')
op.drop_index(op.f('ix_basic_job_master_sample_number'), table_name='basic_job_master')
op.drop_index(op.f('ix_basic_job_master_part_id'), table_name='basic_job_master')
op.drop_index(op.f('ix_basic_job_master_job_master_well_id'), table_name='basic_job_master')
op.drop_index(op.f('ix_basic_job_master_job_master_barcode'), table_name='basic_job_master')
op.drop_table('basic_job_master')
op.drop_index(op.f('ix_basic_clip_qc_plates_per_job_unique_job_id'), table_name='basic_clip_qc_plates_per_job')
op.drop_index(op.f('ix_basic_clip_qc_plates_per_job_number_of_clip_plates'), table_name='basic_clip_qc_plates_per_job')
op.drop_table('basic_clip_qc_plates_per_job')
op.drop_index(op.f('ix_basic_clip_qc_fragment_well_number_96'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_well_id_96'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_well_id_384'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_unique_job_id'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_concatenated_clip_id'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_well_id'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_qc_plate_number'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_qc_barcode'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_plate_number'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_number'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_batch_number'), table_name='basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_qc_fragment_clip_barcode'), table_name='basic_clip_qc_fragment')
op.drop_table('basic_clip_qc_fragment')
op.drop_index(op.f('ix_basic_clip_list_unique_job_id'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_transfer_volume'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_part_id_sample_number'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_part_id'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_job_master_well_id'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_job_master_barcode'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_destination_plate_number'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_concatenated_part_id'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_concatenated_clip_id'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_clip_well_id'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_clip_number'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_clip_batch_number'), table_name='basic_clip_list')
op.drop_index(op.f('ix_basic_clip_list_clip_barcode'), table_name='basic_clip_list')
op.drop_table('basic_clip_list')
op.drop_index(op.f('ix_basic_clip_enzyme_unique_job_id'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_transfer_volume'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_enzyme_plate_well_id'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_enzyme_plate_number'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_enzyme_plate_barcode'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_concatenated_clip_id'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_clip_well_id'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_clip_plate_number'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_clip_number'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_clip_batch_number'), table_name='basic_clip_enzyme')
op.drop_index(op.f('ix_basic_clip_enzyme_clip_barcode'), table_name='basic_clip_enzyme')
op.drop_table('basic_clip_enzyme')
op.drop_index(op.f('ix_basic_buffer_plate_wells_unique_job_id'), table_name='basic_buffer_plate_wells')
op.drop_index(op.f('ix_basic_buffer_plate_wells_loading_volume'), table_name='basic_buffer_plate_wells')
op.drop_index(op.f('ix_basic_buffer_plate_wells_buffer_plate_well_id'), table_name='basic_buffer_plate_wells')
op.drop_index(op.f('ix_basic_buffer_plate_wells_buffer_plate_number'), table_name='basic_buffer_plate_wells')
op.drop_index(op.f('ix_basic_buffer_plate_wells_buffer_plate_barcode'), table_name='basic_buffer_plate_wells')
op.drop_index(op.f('ix_basic_buffer_plate_wells_buffer_name'), table_name='basic_buffer_plate_wells')
op.drop_table('basic_buffer_plate_wells')
# ### end Alembic commands ###
| 77.650964
| 155
| 0.762954
| 5,578
| 36,263
| 4.554141
| 0.019183
| 0.058891
| 0.052907
| 0.066134
| 0.975436
| 0.949848
| 0.918592
| 0.896666
| 0.869149
| 0.835177
| 0
| 0.00941
| 0.085707
| 36,263
| 466
| 156
| 77.817597
| 0.756779
| 0.008273
| 0
| 0.320713
| 0
| 0
| 0.390052
| 0.215582
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004454
| false
| 0
| 0.006682
| 0
| 0.011136
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
21c275711fbf243c6f14de1de7965b43694f04ea
| 5,185
|
py
|
Python
|
chsdi/models/vector/zeitreihen.py
|
procrastinatio/mf-chsdi3
|
206e9362860376338cc83ded902790dc216e2563
|
[
"BSD-3-Clause"
] | 29
|
2015-02-12T07:45:57.000Z
|
2020-12-20T08:49:26.000Z
|
chsdi/models/vector/zeitreihen.py
|
procrastinatio/mf-chsdi3
|
206e9362860376338cc83ded902790dc216e2563
|
[
"BSD-3-Clause"
] | 1,753
|
2015-01-05T07:11:27.000Z
|
2022-03-28T15:17:07.000Z
|
chsdi/models/vector/zeitreihen.py
|
procrastinatio/mf-chsdi3
|
206e9362860376338cc83ded902790dc216e2563
|
[
"BSD-3-Clause"
] | 15
|
2015-04-23T08:38:58.000Z
|
2020-09-28T14:17:40.000Z
|
# -*- coding: utf-8 -*-
from sqlalchemy import Column, Integer
from sqlalchemy.types import Numeric, Unicode
from sqlalchemy.dialects import postgresql
from chsdi.models import register, bases
from chsdi.models.vector import Vector, Geometry2D
Base = bases['zeitreihen']
class Zeitreihen15(Base, Vector):
__tablename__ = 'tooltip_15'
__table_args__ = ({'schema': 'public', 'autoload': False})
__template__ = 'templates/htmlpopup/zeitreihen.mako'
__bodId__ = 'ch.swisstopo.zeitreihen'
__minresolution__ = 10.05
__maxresolution__ = 500005
__minscale__ = 37984.176
__timeInstant__ = 'years'
__label__ = 'release_year'
id = Column('bgdi_id', Unicode, primary_key=True)
kbbez = Column('kbbez', Unicode)
produkt = Column('produkt', Unicode)
kbnum = Column('kbnum', Unicode)
release_year = Column('release_year', Integer)
years = Column('years', Integer)
bv_nummer = Column('bv_nummer', Unicode)
bgdi_order = Column('bgdi_order', Integer)
array_release_years = Column('array_release_years', postgresql.ARRAY(Integer))
box2d = Column('box2d', Unicode)
the_geom = Column(Geometry2D)
class Zeitreihen20(Base, Vector):
__tablename__ = 'tooltip_20'
__table_args__ = ({'schema': 'public', 'autoload': False})
__template__ = 'templates/htmlpopup/zeitreihen.mako'
__bodId__ = 'ch.swisstopo.zeitreihen'
__minresolution__ = 5.05
__maxresolution__ = 10.05
__minscale__ = 19086.576
__maxscale__ = 37984.176
__timeInstant__ = 'years'
__label__ = 'release_year'
id = Column('bgdi_id', Unicode, primary_key=True)
kbbez = Column('kbbez', Unicode)
produkt = Column('produkt', Unicode)
kbnum = Column('kbnum', Unicode)
release_year = Column('release_year', Integer)
years = Column('years', Integer)
bv_nummer = Column('bv_nummer', Unicode)
bgdi_order = Column('bgdi_order', Integer)
array_release_years = Column('array_release_years', postgresql.ARRAY(Integer))
box2d = Column('box2d', Unicode)
the_geom = Column(Geometry2D)
class Zeitreihen21(Base, Vector):
__tablename__ = 'tooltip_21'
__table_args__ = ({'schema': 'public', 'autoload': False})
__template__ = 'templates/htmlpopup/zeitreihen.mako'
__bodId__ = 'ch.swisstopo.zeitreihen'
__minresolution__ = 2.55
__maxresolution__ = 5.05
__minscale__ = 9637.776
__maxscale__ = 19086.576
__timeInstant__ = 'years'
__label__ = 'release_year'
id = Column('bgdi_id', Unicode, primary_key=True)
kbbez = Column('kbbez', Unicode)
produkt = Column('produkt', Unicode)
kbnum = Column('kbnum', Unicode)
release_year = Column('release_year', Integer)
years = Column('years', Integer)
bv_nummer = Column('bv_nummer', Unicode)
bgdi_order = Column('bgdi_order', Integer)
array_release_years = Column('array_release_years', postgresql.ARRAY(Integer))
box2d = Column('box2d', Unicode)
the_geom = Column(Geometry2D)
class Zeitreihen22(Base, Vector):
__tablename__ = 'tooltip_22'
__table_args__ = ({'schema': 'public', 'autoload': False})
__template__ = 'templates/htmlpopup/zeitreihen.mako'
__bodId__ = 'ch.swisstopo.zeitreihen'
__minresolution__ = 0
__maxresolution__ = 2.55
__minscale__ = 0
__maxscale__ = 9637.776
__timeInstant__ = 'years'
__label__ = 'release_year'
id = Column('bgdi_id', Unicode, primary_key=True)
kbbez = Column('kbbez', Unicode)
produkt = Column('produkt', Unicode)
kbnum = Column('kbnum', Unicode)
release_year = Column('release_year', Integer)
years = Column('years', Integer)
bv_nummer = Column('bv_nummer', Unicode)
bgdi_order = Column('bgdi_order', Integer)
array_release_years = Column('array_release_years', postgresql.ARRAY(Integer))
box2d = Column('box2d', Unicode)
the_geom = Column(Geometry2D)
class DufourErst(Base, Vector):
__tablename__ = 'view_dufour_erstausgabe'
__table_args__ = ({'schema': 'public', 'autoload': False})
__template__ = 'templates/htmlpopup/dufour_erst.mako'
__bodId__ = 'ch.swisstopo.hiks-dufour'
__label__ = 'datenstand'
id = Column('tilenumber', Unicode, primary_key=True)
kbbez = Column('kbbez', Unicode)
datenstand = Column('datenstand', Integer)
bv_nummer = Column('bv_nummer', Unicode)
the_geom = Column(Geometry2D)
class SiegfriedErst(Base, Vector):
__tablename__ = 'view_siegfried_erstausgabe'
__table_args__ = ({'schema': 'public', 'autoload': False})
__template__ = 'templates/htmlpopup/siegfried_erst.mako'
__bodId__ = 'ch.swisstopo.hiks-siegfried'
__label__ = 'datenstand'
id = Column('tilenumber', Unicode, primary_key=True)
kbbez = Column('kbbez', Unicode)
datenstand = Column('datenstand', Numeric)
bv_nummer = Column('bv_nummer', Unicode)
the_geom = Column(Geometry2D)
register('ch.swisstopo.hiks-siegfried', SiegfriedErst)
register('ch.swisstopo.hiks-dufour', DufourErst)
register('ch.swisstopo.zeitreihen', Zeitreihen15)
register('ch.swisstopo.zeitreihen', Zeitreihen20)
register('ch.swisstopo.zeitreihen', Zeitreihen21)
register('ch.swisstopo.zeitreihen', Zeitreihen22)
| 36.77305
| 82
| 0.706654
| 561
| 5,185
| 6.039216
| 0.163993
| 0.038961
| 0.049587
| 0.03719
| 0.729634
| 0.729634
| 0.711629
| 0.709563
| 0.709563
| 0.709563
| 0
| 0.026438
| 0.16837
| 5,185
| 140
| 83
| 37.035714
| 0.759276
| 0.00405
| 0
| 0.628099
| 0
| 0
| 0.234793
| 0.106548
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041322
| 0
| 0.942149
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
21e3690b0c27a505570a836caa1e2ecd7b6393ec
| 113
|
py
|
Python
|
lino_book/projects/dumps/settings/memory.py
|
lino-framework/lino_book
|
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
|
[
"BSD-2-Clause"
] | 3
|
2016-08-25T05:58:09.000Z
|
2019-12-05T11:13:45.000Z
|
lino_book/projects/dumps/settings/memory.py
|
lino-framework/lino_book
|
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
|
[
"BSD-2-Clause"
] | 18
|
2016-11-12T21:38:58.000Z
|
2019-12-03T17:54:38.000Z
|
lino_book/projects/dumps/settings/memory.py
|
lino-framework/lino_book
|
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
|
[
"BSD-2-Clause"
] | 9
|
2016-10-15T11:12:33.000Z
|
2021-09-22T04:37:37.000Z
|
from .a import *
SITE.verbose_name = SITE.verbose_name + " (:memory:)"
DATABASES['default']['NAME'] = ':memory:'
| 28.25
| 53
| 0.663717
| 14
| 113
| 5.214286
| 0.642857
| 0.30137
| 0.410959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115044
| 113
| 3
| 54
| 37.666667
| 0.73
| 0
| 0
| 0
| 0
| 0
| 0.265487
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
21fb4e3da12fdeb1985374c9ac95601dfea3fe91
| 186
|
py
|
Python
|
api/polls/views.py
|
vetordev/django-api
|
d247a0d564f5f08014856e6b9b951f4628e22437
|
[
"MIT"
] | null | null | null |
api/polls/views.py
|
vetordev/django-api
|
d247a0d564f5f08014856e6b9b951f4628e22437
|
[
"MIT"
] | null | null | null |
api/polls/views.py
|
vetordev/django-api
|
d247a0d564f5f08014856e6b9b951f4628e22437
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse, HttpRequest
# Create your views here.
def index(request: HttpRequest):
return HttpResponse("Hello, world.")
| 23.25
| 49
| 0.790323
| 23
| 186
| 6.391304
| 0.782609
| 0.136054
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 186
| 7
| 50
| 26.571429
| 0.907407
| 0.123656
| 0
| 0
| 0
| 0
| 0.080745
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
1d02c77894a8d9b212233f457676267dbf443f0a
| 47
|
py
|
Python
|
brats_competition/model_training/common/datasets/__init__.py
|
andriiaprysiazhnyk/brats_competition
|
c2fa999c3458a118ca5c5fe81a37a74ef664fef3
|
[
"MIT"
] | 2
|
2020-03-30T20:45:19.000Z
|
2020-06-08T14:01:21.000Z
|
brats_competition/model_training/common/datasets/__init__.py
|
andriiaprysiazhnyk/brats_competition
|
c2fa999c3458a118ca5c5fe81a37a74ef664fef3
|
[
"MIT"
] | null | null | null |
brats_competition/model_training/common/datasets/__init__.py
|
andriiaprysiazhnyk/brats_competition
|
c2fa999c3458a118ca5c5fe81a37a74ef664fef3
|
[
"MIT"
] | null | null | null |
from .brats_2d import *
from .brats_3d import *
| 23.5
| 23
| 0.765957
| 8
| 47
| 4.25
| 0.625
| 0.529412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.148936
| 47
| 2
| 24
| 23.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
df094f132247614ccb934591c998d222fc42ac9c
| 66,478
|
py
|
Python
|
contextily/_providers.py
|
jpn--/contextily
|
3cce6d72dd00355fabcd7fd6977718ecc87f8510
|
[
"BSD-3-Clause"
] | 163
|
2016-09-16T23:50:38.000Z
|
2022-01-07T12:22:24.000Z
|
contextily/_providers.py
|
jpn--/contextily
|
3cce6d72dd00355fabcd7fd6977718ecc87f8510
|
[
"BSD-3-Clause"
] | 126
|
2016-09-28T22:18:00.000Z
|
2020-04-08T12:45:36.000Z
|
contextily/_providers.py
|
jpn--/contextily
|
3cce6d72dd00355fabcd7fd6977718ecc87f8510
|
[
"BSD-3-Clause"
] | 43
|
2016-09-28T19:59:36.000Z
|
2022-01-20T17:03:53.000Z
|
"""
Tile providers.
This file is autogenerated! It is a python representation of the leaflet
providers defined by the leaflet-providers.js extension to Leaflet
(https://github.com/leaflet-extras/leaflet-providers).
Credit to the leaflet-providers.js project (BSD 2-Clause "Simplified" License)
and the Leaflet Providers contributors.
Generated by parse_leaflet_providers.py at 2019-08-01 from leaflet-providers
at commit 9eb968f8442ea492626c9c8f0dac8ede484e6905 (Bumped version to 1.8.0).
"""
class Bunch(dict):
"""A dict with attribute-access"""
def __getattr__(self, key):
try:
return self.__getitem__(key)
except KeyError:
raise AttributeError(key)
def __dir__(self):
return self.keys()
class TileProvider(Bunch):
"""
A dict with attribute-access and that
can be called to update keys
"""
def __call__(self, **kwargs):
new = TileProvider(self) # takes a copy preserving the class
new.update(kwargs)
return new
providers = Bunch(
OpenStreetMap = Bunch(
Mapnik = TileProvider(
url = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = '(C) OpenStreetMap contributors',
name = 'OpenStreetMap.Mapnik'
),
DE = TileProvider(
url = 'https://{s}.tile.openstreetmap.de/tiles/osmde/{z}/{x}/{y}.png',
max_zoom = 18,
attribution = '(C) OpenStreetMap contributors',
name = 'OpenStreetMap.DE'
),
CH = TileProvider(
url = 'https://tile.osm.ch/switzerland/{z}/{x}/{y}.png',
max_zoom = 18,
attribution = '(C) OpenStreetMap contributors',
bounds = [[45, 5], [48, 11]],
name = 'OpenStreetMap.CH'
),
France = TileProvider(
url = 'https://{s}.tile.openstreetmap.fr/osmfr/{z}/{x}/{y}.png',
max_zoom = 20,
attribution = '(C) Openstreetmap France | (C) OpenStreetMap contributors',
name = 'OpenStreetMap.France'
),
HOT = TileProvider(
url = 'https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = '(C) OpenStreetMap contributors, Tiles style by Humanitarian OpenStreetMap Team hosted by OpenStreetMap France',
name = 'OpenStreetMap.HOT'
),
BZH = TileProvider(
url = 'https://tile.openstreetmap.bzh/br/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = '(C) OpenStreetMap contributors, Tiles courtesy of Breton OpenStreetMap Team',
bounds = [[46.2, -5.5], [50, 0.7]],
name = 'OpenStreetMap.BZH'
)
),
OpenSeaMap = TileProvider(
url = 'https://tiles.openseamap.org/seamark/{z}/{x}/{y}.png',
attribution = 'Map data: (C) OpenSeaMap contributors',
name = 'OpenSeaMap'
),
OpenPtMap = TileProvider(
url = 'http://openptmap.org/tiles/{z}/{x}/{y}.png',
max_zoom = 17,
attribution = 'Map data: (C) OpenPtMap contributors',
name = 'OpenPtMap'
),
OpenTopoMap = TileProvider(
url = 'https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png',
max_zoom = 17,
attribution = 'Map data: (C) OpenStreetMap contributors, SRTM | Map style: (C) OpenTopoMap (CC-BY-SA)',
name = 'OpenTopoMap'
),
OpenRailwayMap = TileProvider(
url = 'https://{s}.tiles.openrailwaymap.org/standard/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = 'Map data: (C) OpenStreetMap contributors | Map style: (C) OpenRailwayMap (CC-BY-SA)',
name = 'OpenRailwayMap'
),
OpenFireMap = TileProvider(
url = 'http://openfiremap.org/hytiles/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = 'Map data: (C) OpenStreetMap contributors | Map style: (C) OpenFireMap (CC-BY-SA)',
name = 'OpenFireMap'
),
SafeCast = TileProvider(
url = 'https://s3.amazonaws.com/te512.safecast.org/{z}/{x}/{y}.png',
max_zoom = 16,
attribution = 'Map data: (C) OpenStreetMap contributors | Map style: (C) SafeCast (CC-BY-SA)',
name = 'SafeCast'
),
Thunderforest = Bunch(
OpenCycleMap = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'cycle',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.OpenCycleMap'
),
Transport = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'transport',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.Transport'
),
TransportDark = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'transport-dark',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.TransportDark'
),
SpinalMap = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'spinal-map',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.SpinalMap'
),
Landscape = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'landscape',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.Landscape'
),
Outdoors = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'outdoors',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.Outdoors'
),
Pioneer = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'pioneer',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.Pioneer'
),
MobileAtlas = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'mobile-atlas',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.MobileAtlas'
),
Neighbourhood = TileProvider(
url = 'https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}',
attribution = '(C) Thunderforest, (C) OpenStreetMap contributors',
variant = 'neighbourhood',
apikey = '<insert your api key here>',
max_zoom = 22,
name = 'Thunderforest.Neighbourhood'
)
),
OpenMapSurfer = Bunch(
Roads = TileProvider(
url = 'https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png',
max_zoom = 19,
variant = 'roads',
attribution = 'Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors',
name = 'OpenMapSurfer.Roads'
),
Hybrid = TileProvider(
url = 'https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png',
max_zoom = 19,
variant = 'hybrid',
attribution = 'Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors',
name = 'OpenMapSurfer.Hybrid'
),
AdminBounds = TileProvider(
url = 'https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png',
max_zoom = 18,
variant = 'adminb',
attribution = 'Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors',
name = 'OpenMapSurfer.AdminBounds'
),
ContourLines = TileProvider(
url = 'https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png',
max_zoom = 18,
variant = 'asterc',
attribution = 'Imagery from GIScience Research Group @ University of Heidelberg | Map data ASTER GDEM',
min_zoom = 13,
name = 'OpenMapSurfer.ContourLines'
),
Hillshade = TileProvider(
url = 'https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png',
max_zoom = 18,
variant = 'asterh',
attribution = 'Imagery from GIScience Research Group @ University of Heidelberg | Map data ASTER GDEM, SRTM',
name = 'OpenMapSurfer.Hillshade'
),
ElementsAtRisk = TileProvider(
url = 'https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png',
max_zoom = 19,
variant = 'elements_at_risk',
attribution = 'Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors',
name = 'OpenMapSurfer.ElementsAtRisk'
)
),
Hydda = Bunch(
Full = TileProvider(
url = 'https://{s}.tile.openstreetmap.se/hydda/{variant}/{z}/{x}/{y}.png',
max_zoom = 18,
variant = 'full',
attribution = 'Tiles courtesy of OpenStreetMap Sweden -- Map data (C) OpenStreetMap contributors',
name = 'Hydda.Full'
),
Base = TileProvider(
url = 'https://{s}.tile.openstreetmap.se/hydda/{variant}/{z}/{x}/{y}.png',
max_zoom = 18,
variant = 'base',
attribution = 'Tiles courtesy of OpenStreetMap Sweden -- Map data (C) OpenStreetMap contributors',
name = 'Hydda.Base'
),
RoadsAndLabels = TileProvider(
url = 'https://{s}.tile.openstreetmap.se/hydda/{variant}/{z}/{x}/{y}.png',
max_zoom = 18,
variant = 'roads_and_labels',
attribution = 'Tiles courtesy of OpenStreetMap Sweden -- Map data (C) OpenStreetMap contributors',
name = 'Hydda.RoadsAndLabels'
)
),
MapBox = TileProvider(
url = 'https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}{r}.png?access_token={accessToken}',
attribution = '(C) Mapbox (C) OpenStreetMap contributors Improve this map',
subdomains = 'abcd',
id = 'mapbox.streets',
accessToken = '<insert your access token here>',
name = 'MapBox'
),
Stamen = Bunch(
Toner = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toner',
ext = 'png',
name = 'Stamen.Toner'
),
TonerBackground = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toner-background',
ext = 'png',
name = 'Stamen.TonerBackground'
),
TonerHybrid = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toner-hybrid',
ext = 'png',
name = 'Stamen.TonerHybrid'
),
TonerLines = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toner-lines',
ext = 'png',
name = 'Stamen.TonerLines'
),
TonerLabels = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toner-labels',
ext = 'png',
name = 'Stamen.TonerLabels'
),
TonerLite = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toner-lite',
ext = 'png',
name = 'Stamen.TonerLite'
),
Watercolor = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 1,
max_zoom = 16,
variant = 'watercolor',
ext = 'jpg',
name = 'Stamen.Watercolor'
),
Terrain = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 18,
variant = 'terrain',
ext = 'png',
name = 'Stamen.Terrain'
),
TerrainBackground = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 18,
variant = 'terrain-background',
ext = 'png',
name = 'Stamen.TerrainBackground'
),
TopOSMRelief = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toposm-color-relief',
ext = 'jpg',
bounds = [[22, -132], [51, -56]],
name = 'Stamen.TopOSMRelief'
),
TopOSMFeatures = TileProvider(
url = 'https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}',
attribution = 'Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors',
subdomains = 'abcd',
min_zoom = 0,
max_zoom = 20,
variant = 'toposm-features',
ext = 'png',
bounds = [[22, -132], [51, -56]],
opacity = 0.9,
name = 'Stamen.TopOSMFeatures'
)
),
Esri = Bunch(
WorldStreetMap = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'World_Street_Map',
attribution = 'Tiles (C) Esri -- Source: Esri, DeLorme, NAVTEQ, USGS, Intermap, iPC, NRCAN, Esri Japan, METI, Esri China (Hong Kong), Esri (Thailand), TomTom, 2012',
name = 'Esri.WorldStreetMap'
),
DeLorme = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'Specialty/DeLorme_World_Base_Map',
attribution = 'Tiles (C) Esri -- Copyright: (C)2012 DeLorme',
min_zoom = 1,
max_zoom = 11,
name = 'Esri.DeLorme'
),
WorldTopoMap = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'World_Topo_Map',
attribution = 'Tiles (C) Esri -- Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community',
name = 'Esri.WorldTopoMap'
),
WorldImagery = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'World_Imagery',
attribution = 'Tiles (C) Esri -- Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community',
name = 'Esri.WorldImagery'
),
WorldTerrain = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'World_Terrain_Base',
attribution = 'Tiles (C) Esri -- Source: USGS, Esri, TANA, DeLorme, and NPS',
max_zoom = 13,
name = 'Esri.WorldTerrain'
),
WorldShadedRelief = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'World_Shaded_Relief',
attribution = 'Tiles (C) Esri -- Source: Esri',
max_zoom = 13,
name = 'Esri.WorldShadedRelief'
),
WorldPhysical = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'World_Physical_Map',
attribution = 'Tiles (C) Esri -- Source: US National Park Service',
max_zoom = 8,
name = 'Esri.WorldPhysical'
),
OceanBasemap = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'Ocean_Basemap',
attribution = 'Tiles (C) Esri -- Sources: GEBCO, NOAA, CHS, OSU, UNH, CSUMB, National Geographic, DeLorme, NAVTEQ, and Esri',
max_zoom = 13,
name = 'Esri.OceanBasemap'
),
NatGeoWorldMap = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'NatGeo_World_Map',
attribution = 'Tiles (C) Esri -- National Geographic, Esri, DeLorme, NAVTEQ, UNEP-WCMC, USGS, NASA, ESA, METI, NRCAN, GEBCO, NOAA, iPC',
max_zoom = 16,
name = 'Esri.NatGeoWorldMap'
),
WorldGrayCanvas = TileProvider(
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}',
variant = 'Canvas/World_Light_Gray_Base',
attribution = 'Tiles (C) Esri -- Esri, DeLorme, NAVTEQ',
max_zoom = 16,
name = 'Esri.WorldGrayCanvas'
)
),
OpenWeatherMap = Bunch(
Clouds = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'clouds',
name = 'OpenWeatherMap.Clouds'
),
CloudsClassic = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'clouds_cls',
name = 'OpenWeatherMap.CloudsClassic'
),
Precipitation = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'precipitation',
name = 'OpenWeatherMap.Precipitation'
),
PrecipitationClassic = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'precipitation_cls',
name = 'OpenWeatherMap.PrecipitationClassic'
),
Rain = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'rain',
name = 'OpenWeatherMap.Rain'
),
RainClassic = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'rain_cls',
name = 'OpenWeatherMap.RainClassic'
),
Pressure = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'pressure',
name = 'OpenWeatherMap.Pressure'
),
PressureContour = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'pressure_cntr',
name = 'OpenWeatherMap.PressureContour'
),
Wind = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'wind',
name = 'OpenWeatherMap.Wind'
),
Temperature = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'temp',
name = 'OpenWeatherMap.Temperature'
),
Snow = TileProvider(
url = 'http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}',
max_zoom = 19,
attribution = 'Map data (C) OpenWeatherMap',
apiKey = '<insert your api key here>',
opacity = 0.5,
variant = 'snow',
name = 'OpenWeatherMap.Snow'
)
),
HERE = Bunch(
normalDay = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDay'
),
normalDayCustom = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day.custom',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDayCustom'
),
normalDayGrey = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day.grey',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDayGrey'
),
normalDayMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDayMobile'
),
normalDayGreyMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day.grey.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDayGreyMobile'
),
normalDayTransit = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day.transit',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDayTransit'
),
normalDayTransitMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day.transit.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalDayTransitMobile'
),
normalNight = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.night',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalNight'
),
normalNightMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.night.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalNightMobile'
),
normalNightGrey = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.night.grey',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalNightGrey'
),
normalNightGreyMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.night.grey.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalNightGreyMobile'
),
normalNightTransit = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.night.transit',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalNightTransit'
),
normalNightTransitMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.night.transit.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.normalNightTransitMobile'
),
reducedDay = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'reduced.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.reducedDay'
),
reducedNight = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'reduced.night',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.reducedNight'
),
basicMap = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day',
max_zoom = 20,
type = 'basetile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.basicMap'
),
mapLabels = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'normal.day',
max_zoom = 20,
type = 'labeltile',
language = 'eng',
format = 'png',
size = '256',
name = 'HERE.mapLabels'
),
trafficFlow = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'traffic',
variant = 'normal.day',
max_zoom = 20,
type = 'flowtile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.trafficFlow'
),
carnavDayGrey = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'carnav.day.grey',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.carnavDayGrey'
),
hybridDay = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'hybrid.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.hybridDay'
),
hybridDayMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'hybrid.day.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.hybridDayMobile'
),
hybridDayTransit = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'hybrid.day.transit',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.hybridDayTransit'
),
hybridDayGrey = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'hybrid.grey.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.hybridDayGrey'
),
pedestrianDay = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'pedestrian.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.pedestrianDay'
),
pedestrianNight = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'base',
variant = 'pedestrian.night',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.pedestrianNight'
),
satelliteDay = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'satellite.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.satelliteDay'
),
terrainDay = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'terrain.day',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.terrainDay'
),
terrainDayMobile = TileProvider(
url = 'https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}',
attribution = 'Map (C) 1987-2019 HERE',
subdomains = '1234',
mapID = 'newest',
app_id = '<insert your app_id here>',
app_code = '<insert your app_code here>',
base = 'aerial',
variant = 'terrain.day.mobile',
max_zoom = 20,
type = 'maptile',
language = 'eng',
format = 'png8',
size = '256',
name = 'HERE.terrainDayMobile'
)
),
FreeMapSK = TileProvider(
url = 'http://t{s}.freemap.sk/T/{z}/{x}/{y}.jpeg',
min_zoom = 8,
max_zoom = 16,
subdomains = '1234',
bounds = [[47.204642, 15.996093], [49.830896, 22.576904]],
attribution = '(C) OpenStreetMap contributors, vizualization CC-By-SA 2.0 Freemap.sk',
name = 'FreeMapSK'
),
MtbMap = TileProvider(
url = 'http://tile.mtbmap.cz/mtbmap_tiles/{z}/{x}/{y}.png',
attribution = '(C) OpenStreetMap contributors & USGS',
name = 'MtbMap'
),
CartoDB = Bunch(
Positron = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'light_all',
name = 'CartoDB.Positron'
),
PositronNoLabels = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'light_nolabels',
name = 'CartoDB.PositronNoLabels'
),
PositronOnlyLabels = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'light_only_labels',
name = 'CartoDB.PositronOnlyLabels'
),
DarkMatter = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'dark_all',
name = 'CartoDB.DarkMatter'
),
DarkMatterNoLabels = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'dark_nolabels',
name = 'CartoDB.DarkMatterNoLabels'
),
DarkMatterOnlyLabels = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'dark_only_labels',
name = 'CartoDB.DarkMatterOnlyLabels'
),
Voyager = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'rastertiles/voyager',
name = 'CartoDB.Voyager'
),
VoyagerNoLabels = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'rastertiles/voyager_nolabels',
name = 'CartoDB.VoyagerNoLabels'
),
VoyagerOnlyLabels = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'rastertiles/voyager_only_labels',
name = 'CartoDB.VoyagerOnlyLabels'
),
VoyagerLabelsUnder = TileProvider(
url = 'https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png',
attribution = '(C) OpenStreetMap contributors (C) CARTO',
subdomains = 'abcd',
max_zoom = 19,
variant = 'rastertiles/voyager_labels_under',
name = 'CartoDB.VoyagerLabelsUnder'
)
),
HikeBike = Bunch(
HikeBike = TileProvider(
url = 'https://tiles.wmflabs.org/{variant}/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = '(C) OpenStreetMap contributors',
variant = 'hikebike',
name = 'HikeBike.HikeBike'
),
HillShading = TileProvider(
url = 'https://tiles.wmflabs.org/{variant}/{z}/{x}/{y}.png',
max_zoom = 15,
attribution = '(C) OpenStreetMap contributors',
variant = 'hillshading',
name = 'HikeBike.HillShading'
)
),
BasemapAT = Bunch(
basemap = TileProvider(
url = 'https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}',
max_zoom = 20,
attribution = 'Datenquelle: basemap.at',
subdomains = ['', '1', '2', '3', '4'],
format = 'png',
bounds = [[46.35877, 8.782379], [49.037872, 17.189532]],
variant = 'geolandbasemap',
name = 'BasemapAT.basemap'
),
grau = TileProvider(
url = 'https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}',
max_zoom = 19,
attribution = 'Datenquelle: basemap.at',
subdomains = ['', '1', '2', '3', '4'],
format = 'png',
bounds = [[46.35877, 8.782379], [49.037872, 17.189532]],
variant = 'bmapgrau',
name = 'BasemapAT.grau'
),
overlay = TileProvider(
url = 'https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}',
max_zoom = 19,
attribution = 'Datenquelle: basemap.at',
subdomains = ['', '1', '2', '3', '4'],
format = 'png',
bounds = [[46.35877, 8.782379], [49.037872, 17.189532]],
variant = 'bmapoverlay',
name = 'BasemapAT.overlay'
),
highdpi = TileProvider(
url = 'https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}',
max_zoom = 19,
attribution = 'Datenquelle: basemap.at',
subdomains = ['', '1', '2', '3', '4'],
format = 'jpeg',
bounds = [[46.35877, 8.782379], [49.037872, 17.189532]],
variant = 'bmaphidpi',
name = 'BasemapAT.highdpi'
),
orthofoto = TileProvider(
url = 'https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}',
max_zoom = 20,
attribution = 'Datenquelle: basemap.at',
subdomains = ['', '1', '2', '3', '4'],
format = 'jpeg',
bounds = [[46.35877, 8.782379], [49.037872, 17.189532]],
variant = 'bmaporthofoto30cm',
name = 'BasemapAT.orthofoto'
)
),
nlmaps = Bunch(
standaard = TileProvider(
url = 'https://geodata.nationaalgeoregister.nl/tiles/service/wmts/{variant}/EPSG:3857/{z}/{x}/{y}.png',
min_zoom = 6,
max_zoom = 19,
bounds = [[50.5, 3.25], [54, 7.6]],
attribution = 'Kaartgegevens (C) Kadaster',
variant = 'brtachtergrondkaart',
name = 'nlmaps.standaard'
),
pastel = TileProvider(
url = 'https://geodata.nationaalgeoregister.nl/tiles/service/wmts/{variant}/EPSG:3857/{z}/{x}/{y}.png',
min_zoom = 6,
max_zoom = 19,
bounds = [[50.5, 3.25], [54, 7.6]],
attribution = 'Kaartgegevens (C) Kadaster',
variant = 'brtachtergrondkaartpastel',
name = 'nlmaps.pastel'
),
grijs = TileProvider(
url = 'https://geodata.nationaalgeoregister.nl/tiles/service/wmts/{variant}/EPSG:3857/{z}/{x}/{y}.png',
min_zoom = 6,
max_zoom = 19,
bounds = [[50.5, 3.25], [54, 7.6]],
attribution = 'Kaartgegevens (C) Kadaster',
variant = 'brtachtergrondkaartgrijs',
name = 'nlmaps.grijs'
),
luchtfoto = TileProvider(
url = 'https://geodata.nationaalgeoregister.nl/luchtfoto/rgb/wmts/1.0.0/2016_ortho25/EPSG:3857/{z}/{x}/{y}.png',
min_zoom = 6,
max_zoom = 19,
bounds = [[50.5, 3.25], [54, 7.6]],
attribution = 'Kaartgegevens (C) Kadaster',
name = 'nlmaps.luchtfoto'
)
),
NASAGIBS = Bunch(
ModisTerraTrueColorCR = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 9,
format = 'jpg',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'MODIS_Terra_CorrectedReflectance_TrueColor',
name = 'NASAGIBS.ModisTerraTrueColorCR'
),
ModisTerraBands367CR = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 9,
format = 'jpg',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'MODIS_Terra_CorrectedReflectance_Bands367',
name = 'NASAGIBS.ModisTerraBands367CR'
),
ViirsEarthAtNight2012 = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 8,
format = 'jpg',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'VIIRS_CityLights_2012',
name = 'NASAGIBS.ViirsEarthAtNight2012'
),
ModisTerraLSTDay = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 7,
format = 'png',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'MODIS_Terra_Land_Surface_Temp_Day',
opacity = 0.75,
name = 'NASAGIBS.ModisTerraLSTDay'
),
ModisTerraSnowCover = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 8,
format = 'png',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'MODIS_Terra_Snow_Cover',
opacity = 0.75,
name = 'NASAGIBS.ModisTerraSnowCover'
),
ModisTerraAOD = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 6,
format = 'png',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'MODIS_Terra_Aerosol',
opacity = 0.75,
name = 'NASAGIBS.ModisTerraAOD'
),
ModisTerraChlorophyll = TileProvider(
url = 'https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}',
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.',
bounds = [[-85.0511287776, -179.999999975], [85.0511287776, 179.999999975]],
min_zoom = 1,
max_zoom = 7,
format = 'png',
time = '',
tilematrixset = 'GoogleMapsCompatible_Level',
variant = 'MODIS_Terra_Chlorophyll_A',
opacity = 0.75,
name = 'NASAGIBS.ModisTerraChlorophyll'
)
),
NLS = TileProvider(
url = 'https://nls-{s}.tileserver.com/nls/{z}/{x}/{y}.jpg',
attribution = 'National Library of Scotland Historic Maps',
bounds = [[49.6, -12], [61.7, 3]],
min_zoom = 1,
max_zoom = 18,
subdomains = '0123',
name = 'NLS'
),
JusticeMap = Bunch(
income = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'income',
name = 'JusticeMap.income'
),
americanIndian = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'indian',
name = 'JusticeMap.americanIndian'
),
asian = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'asian',
name = 'JusticeMap.asian'
),
black = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'black',
name = 'JusticeMap.black'
),
hispanic = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'hispanic',
name = 'JusticeMap.hispanic'
),
multi = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'multi',
name = 'JusticeMap.multi'
),
nonWhite = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'nonwhite',
name = 'JusticeMap.nonWhite'
),
white = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'white',
name = 'JusticeMap.white'
),
plurality = TileProvider(
url = 'http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png',
attribution = 'Justice Map',
size = 'county',
bounds = [[14, -180], [72, -56]],
variant = 'plural',
name = 'JusticeMap.plurality'
)
),
Wikimedia = TileProvider(
url = 'https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}{r}.png',
attribution = 'Wikimedia',
min_zoom = 1,
max_zoom = 19,
name = 'Wikimedia'
),
GeoportailFrance = Bunch(
parcels = TileProvider(
url = 'https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}',
attribution = 'Geoportail France',
bounds = [[-75, -180], [81, 180]],
min_zoom = 2,
max_zoom = 20,
apikey = 'choisirgeoportail',
format = 'image/png',
style = 'bdparcellaire',
variant = 'CADASTRALPARCELS.PARCELS',
name = 'GeoportailFrance.parcels'
),
ignMaps = TileProvider(
url = 'https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}',
attribution = 'Geoportail France',
bounds = [[-75, -180], [81, 180]],
min_zoom = 2,
max_zoom = 18,
apikey = 'choisirgeoportail',
format = 'image/jpeg',
style = 'normal',
variant = 'GEOGRAPHICALGRIDSYSTEMS.MAPS',
name = 'GeoportailFrance.ignMaps'
),
maps = TileProvider(
url = 'https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}',
attribution = 'Geoportail France',
bounds = [[-75, -180], [81, 180]],
min_zoom = 2,
max_zoom = 18,
apikey = 'choisirgeoportail',
format = 'image/jpeg',
style = 'normal',
variant = 'GEOGRAPHICALGRIDSYSTEMS.MAPS.SCAN-EXPRESS.STANDARD',
name = 'GeoportailFrance.maps'
),
orthos = TileProvider(
url = 'https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}',
attribution = 'Geoportail France',
bounds = [[-75, -180], [81, 180]],
min_zoom = 2,
max_zoom = 19,
apikey = 'choisirgeoportail',
format = 'image/jpeg',
style = 'normal',
variant = 'ORTHOIMAGERY.ORTHOPHOTOS',
name = 'GeoportailFrance.orthos'
)
),
OneMapSG = Bunch(
Default = TileProvider(
url = 'https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png',
variant = 'Default',
min_zoom = 11,
max_zoom = 18,
bounds = [[1.56073, 104.11475], [1.16, 103.502]],
attribution = ' New OneMap | Map data (C) contributors, Singapore Land Authority',
name = 'OneMapSG.Default'
),
Night = TileProvider(
url = 'https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png',
variant = 'Night',
min_zoom = 11,
max_zoom = 18,
bounds = [[1.56073, 104.11475], [1.16, 103.502]],
attribution = ' New OneMap | Map data (C) contributors, Singapore Land Authority',
name = 'OneMapSG.Night'
),
Original = TileProvider(
url = 'https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png',
variant = 'Original',
min_zoom = 11,
max_zoom = 18,
bounds = [[1.56073, 104.11475], [1.16, 103.502]],
attribution = ' New OneMap | Map data (C) contributors, Singapore Land Authority',
name = 'OneMapSG.Original'
),
Grey = TileProvider(
url = 'https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png',
variant = 'Grey',
min_zoom = 11,
max_zoom = 18,
bounds = [[1.56073, 104.11475], [1.16, 103.502]],
attribution = ' New OneMap | Map data (C) contributors, Singapore Land Authority',
name = 'OneMapSG.Grey'
),
LandLot = TileProvider(
url = 'https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png',
variant = 'LandLot',
min_zoom = 11,
max_zoom = 18,
bounds = [[1.56073, 104.11475], [1.16, 103.502]],
attribution = ' New OneMap | Map data (C) contributors, Singapore Land Authority',
name = 'OneMapSG.LandLot'
)
)
)
| 44.826703
| 223
| 0.525076
| 6,859
| 66,478
| 5.018662
| 0.084852
| 0.061441
| 0.067978
| 0.025564
| 0.781861
| 0.764315
| 0.753304
| 0.742294
| 0.738576
| 0.73326
| 0
| 0.041398
| 0.318331
| 66,478
| 1,482
| 224
| 44.85695
| 0.718223
| 0.009296
| 0
| 0.647868
| 1
| 0.1011
| 0.435071
| 0.032176
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002063
| false
| 0
| 0
| 0.000688
| 0.005502
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
df81e619d1bdbe7cebf2cf13b305e6b1fad1c4d5
| 6,395
|
py
|
Python
|
idl2py/wcs/add_distort.py
|
RapidLzj/idl2py
|
193051cd8d01db0d125b8975713b885ad521a992
|
[
"MIT"
] | null | null | null |
idl2py/wcs/add_distort.py
|
RapidLzj/idl2py
|
193051cd8d01db0d125b8975713b885ad521a992
|
[
"MIT"
] | null | null | null |
idl2py/wcs/add_distort.py
|
RapidLzj/idl2py
|
193051cd8d01db0d125b8975713b885ad521a992
|
[
"MIT"
] | null | null | null |
"""
By Dr Jie Zheng -Q, NAOC
v1 2019-04-27
"""
import numpy as np
from..util import *
def add_distort():
pass
# pro add_distort, hdr, astr
#; NAME:
#; ADD_DISTORT
#; PURPOSE:
#; Add the distortion parameters in an astrometry structure to a FITS header.
#; EXPLANATION:
#; Called by PUTAST to add SIP (http://fits.gsfc.nasa.gov/registry/sip.html )
#; or TNX ( http://fits.gsfc.nasa.gov/registry/tnx.html ) distortion
#; parameters in an astrometry structure to a FITS header
#;
#; Prior to April 2012, PUTAST did not add distortion parameters so one
#; had to call ADD_DISTORT after PUTAST.
#;
#; IDL> putast,h ,astr0
#; IDL> add_distort,h,astr0
#;
#; CALLING SEQUENCE:
#; add_distort, hdr, astr
#;
#; INPUTS:
#; HDR - FITS header, string array. HDR will be updated to contain
#; the supplied astrometry.
#; ASTR - IDL structure containing values of the astrometry parameters
#; CDELT, CRPIX, CRVAL, CTYPE, LONGPOLE, PV2, and DISTORT
#; See EXTAST.PRO for more info about the structure definition
#;
#; PROCEDURES USED:
#; SXADDPAR, TAG_EXIST()
#; REVISION HISTORY:
#; Written by W. Landsman May 2005
#; Enforce i+j = n for ij coefficients of order n W. Landsman April 2012
#; Support IRAF TNX distortion M. Sullivan March 2014
#;;-
# npar = N_params()
#
# if ( npar LT 2 ) then begin ;Was header supplied?
# print,'Syntax: ADD_DISTORT, Hdr, astr'
# return
# endif
#
# add_distort = tag_exist(astr,'distort')
# IF(~ add_distort)THEN RETURN
#
# IF(astr.distort.name EQ 'SIP') then begin
#
# sxaddpar,hdr,'CTYPE1','RA---TAN-SIP'
# sxaddpar,hdr,'CTYPE2','DEC--TAN-SIP'
# distort = astr.distort
# a_dimen = size(distort.a,/dimen)
# b_dimen = size(distort.b,/dimen)
# ap_dimen = size(distort.ap,/dimen)
# bp_dimen = size(distort.bp,/dimen)
#
# if a_dimen[0] GT 0 then begin
# a_order = a_dimen[0]-1
# sxaddpar, hdr, 'A_ORDER', a_order, /savec, $
# 'polynomial order, axis 1, detector to sky '
# for i=0, a_order do begin
# for j = 0, a_order-i do begin
# aij = distort.a[i,j]
# if aij NE 0.0 then $
# sxaddpar, hdr, 'A_' + strtrim(i,2)+ '_' + strtrim(j,2), aij, $
# ' distortion coefficient', /savec
# endfor
# endfor
# endif
#
# if b_dimen[0] GT 0 then begin
# b_order = b_dimen[0]-1
# sxaddpar, hdr, 'B_ORDER', a_order, /savec , $
# 'polynomial order, axis 2, detector to sky'
# for i=0, b_order do begin
# for j = 0, b_order-i do begin
# bij = distort.b[i,j]
# if bij NE 0.0 then $
# sxaddpar, hdr, 'B_' + strtrim(i,2)+ '_' + strtrim(j,2), bij, $
# ' distortion coefficient', /savec
# endfor
# endfor
# endif
#
# if ap_dimen[0] GT 0 then begin
# ap_order = ap_dimen[0]-1
# sxaddpar, hdr, 'AP_ORDER', a_order, /savec, $
# ' polynomial order, axis 1, sky to detector '
# for i=0, ap_order do begin
# for j = 0, ap_order-i do begin
# apij = distort.ap[i,j]
# if apij NE 0.0 then $
# sxaddpar, hdr, 'AP_' + strtrim(i,2)+ '_' + strtrim(j,2), apij, $
# ' distortion coefficient', /savec
# endfor
# endfor
# endif
#
#
# if bp_dimen[0] GT 0 then begin
# bp_order = bp_dimen[0]-1
# sxaddpar, hdr, 'BP_ORDER', a_order, /savec, $
# ' polynomial order, axis 2, sky to detector '
# for i=0, bp_order do begin
# for j = 0, bp_order-i do begin
# bpij = distort.bp[i,j]
# if bpij NE 0.0 then $
# sxaddpar, hdr, 'BP_' + strtrim(i,2)+ '_' + strtrim(j,2), bpij, $
# ' distortion coefficient', /savec
# endfor
# endfor
# endif
#
# ENDIF ELSE IF(astr.distort.name EQ 'TNX')THEN BEGIN
#
# sxaddpar, hdr,'WAT0_001','system=image'
#
# string1='wtype=tnx axtype=ra lngcor = "3.'
# string1+= ' '+STRN(astr.distort.lngcor.xiorder,FORMAT='(F2.0)')
# string1+= ' '+STRN(astr.distort.lngcor.etaorder,FORMAT='(F2.0)')
# string1+= ' '+STRN(astr.distort.lngcor.xterms,FORMAT='(F2.0)')
# string1+= ' '+STRN(astr.distort.lngcor.ximin,FORMAT='(F19.16)')
# string1+= ' '+STRN(astr.distort.lngcor.ximax,FORMAT='(F19.16)')
# string1+= ' '+STRN(astr.distort.lngcor.etamin,FORMAT='(F19.16)')
# string1+= ' '+STRN(astr.distort.lngcor.etamax,FORMAT='(F19.16)')
# FOR i=0,N_ELEMENTS(astr.distort.lngcor.coeff)-1 DO BEGIN
# string1+=' '+STRN(astr.distort.lngcor.coeff[i],FORMAT='(F19.16)')
# ENDFOR
# string1+= '"'
#
# string2='wtype=tnx axtype=dec latcor = "3. '
# string2+= ' '+STRN(astr.distort.latcor.xiorder,FORMAT='(F2.0)')
# string2+= ' '+STRN(astr.distort.latcor.etaorder,FORMAT='(F2.0)')
# string2+= ' '+STRN(astr.distort.latcor.xterms,FORMAT='(F2.0)')
# string2+= ' '+STRN(astr.distort.latcor.ximin,FORMAT='(F19.16)')
# string2+= ' '+STRN(astr.distort.latcor.ximax,FORMAT='(F19.16)')
# string2+= ' '+STRN(astr.distort.latcor.etamin,FORMAT='(F19.16)')
# string2+= ' '+STRN(astr.distort.latcor.etamax,FORMAT='(F19.16)')
# FOR i=0,N_ELEMENTS(astr.distort.latcor.coeff)-1 DO BEGIN
# string2+= ' '+STRN(astr.distort.latcor.coeff[i],FORMAT='(F19.16)')
# ENDFOR
# string2+= '"'
#
# len1=STRLEN(string1)
# n1=len1/70
# IF(len1 MOD 68 GT 0)THEN n1++
# FOR i=0,n1-1 DO BEGIN
# s=STRMID(string1,i*68,68)
#; PRINT,'WAT1_'+STRN(i+1,FORMAT='(I3.3)'),' ',s
# sxaddpar, hdr,'WAT1_'+STRN(i+1,FORMAT='(I3.3)'),s
# ENDFOR
# len2=STRLEN(string2)
# n2=len2/70
# IF(len2 MOD 68 GT 0)THEN n2++
# FOR i=0,n2-1 DO BEGIN
# s=STRMID(string2,i*68,68)
#; PRINT,'WAT1_'+STRN(i+1,FORMAT='(I3.3)'),' ',s
# sxaddpar, hdr,'WAT2_'+STRN(i+1,FORMAT='(I3.3)'),s
# ENDFOR
#
# ENDIF
#
# return
# end
| 36.335227
| 82
| 0.544488
| 847
| 6,395
| 4.041322
| 0.227863
| 0.070698
| 0.070114
| 0.051417
| 0.53754
| 0.456909
| 0.309962
| 0.270523
| 0.081215
| 0.081215
| 0
| 0.047098
| 0.299453
| 6,395
| 175
| 83
| 36.542857
| 0.716964
| 0.88663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
df877e42561ff614b4f5ecab85bbd2bb3d3a6be8
| 183
|
py
|
Python
|
mmskeleton/utils/__init__.py
|
sj-li/PR-GCN
|
decd0d1b15e154cac32597d0bd1cf4e2dfef20a6
|
[
"Apache-2.0"
] | 5
|
2021-05-31T09:10:44.000Z
|
2022-01-22T08:06:28.000Z
|
mmskeleton/utils/__init__.py
|
sj-li/PR-GCN
|
decd0d1b15e154cac32597d0bd1cf4e2dfef20a6
|
[
"Apache-2.0"
] | null | null | null |
mmskeleton/utils/__init__.py
|
sj-li/PR-GCN
|
decd0d1b15e154cac32597d0bd1cf4e2dfef20a6
|
[
"Apache-2.0"
] | null | null | null |
from .importer import import_obj, call_obj, set_attr, get_attr
from .checkpoint import load_checkpoint
__all__ = ['import_obj', 'call_obj', 'set_attr', 'get_attr', 'load_checkpoint']
| 45.75
| 79
| 0.781421
| 27
| 183
| 4.777778
| 0.407407
| 0.139535
| 0.20155
| 0.248062
| 0.465116
| 0.465116
| 0.465116
| 0.465116
| 0
| 0
| 0
| 0
| 0.098361
| 183
| 4
| 79
| 45.75
| 0.781818
| 0
| 0
| 0
| 0
| 0
| 0.266304
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
df93146e3b7e436bac034a53203c507791aa7d45
| 8,244
|
py
|
Python
|
servo/stats/queries.py
|
ipwnosx/Servo
|
3418ece690ca90d676a7d8ae654da7770ae312fb
|
[
"BSD-2-Clause"
] | null | null | null |
servo/stats/queries.py
|
ipwnosx/Servo
|
3418ece690ca90d676a7d8ae654da7770ae312fb
|
[
"BSD-2-Clause"
] | null | null | null |
servo/stats/queries.py
|
ipwnosx/Servo
|
3418ece690ca90d676a7d8ae654da7770ae312fb
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import decimal
from django.db import connection
class StatsManager:
def __init__(self):
self.cursor = connection.cursor()
def _result(self, args):
result = []
self.cursor.execute(self.sql, args)
for k, v in self.cursor.fetchall():
if isinstance(v, decimal.Decimal):
v = float(v)
result.append((k, v,))
return result
def cases_per_tech(self, location, queues, labels, start, end):
users = User.object.filter(location=location)
def statuses_per_location(self, timescale, location, status, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, se.triggered_at))*1000 as p,
COUNT(*) AS v
FROM servo_order so, servo_event se
WHERE (se.triggered_at, se.triggered_at) OVERLAPS (%s, %s)
AND se.action = 'set_status'
AND se.object_id = so.id
AND so.location_id = %s
AND se.description = %s
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, start, end, location, status])
def statuses_per_user(self, timescale, user, status, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, se.triggered_at))*1000 as p,
COUNT(*) AS v
FROM servo_order so, servo_event se
WHERE (se.triggered_at, se.triggered_at) OVERLAPS (%s, %s)
AND se.action = 'set_status'
AND se.object_id = so.id
AND so.user_id = %s
AND se.description = %s
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, start, end, user, status])
def sales_invoices(self, timescale, queue, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, so.created_at))*1000 as p,
SUM(total_gross) AS v
FROM servo_invoice si, servo_order so
WHERE (si.created_at, si.created_at) OVERLAPS (%s, %s)
AND si.order_id = so.id
AND so.queue_id = %s
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, start, end, queue])
def sales_purchases(self, timescale, queue, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, po.created_at))*1000 as p,
SUM(total) AS v
FROM servo_purchaseorder po, servo_order so
WHERE (po.created_at, po.created_at) OVERLAPS (%s, %s)
AND po.sales_order_id = so.id
AND so.queue_id = %s
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, start, end, queue])
def sales_parts_per_labtier(self, start, end):
self.sql = """SELECT labour_tier, count(*)
FROM servo_product p, servo_servicepart sp, servo_serviceorderitem soi
WHERE soi.product_id = p.id
AND sp.order_item_id = soi.id
AND (soi.created_at, soi.created_at) OVERLAPS (%s, %s)
AND char_length(labour_tier) = 4
GROUP BY labour_tier
ORDER BY labour_tier"""
return self._result([start, end])
def order_runrate(self, timescale, location, user, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, started_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE user_id = %s
AND location_id = %s
AND (started_at, started_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, user, location, start, end])
def turnaround_per_location(self, timescale, location, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
EXTRACT(HOUR FROM AVG(closed_at - created_at)) as v
FROM servo_order
WHERE closed_at IS NOT NULL
AND location_id = %s
AND queue_id IS NOT NULL
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, start, end])
def runrate_per_location(self, timescale, location, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE location_id = %s
AND closed_at IS NOT NULL
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, start, end])
def distribution_per_location(self, start, end):
result = []
self.sql = """SELECT l.title, COUNT(*)
FROM servo_order o LEFT OUTER JOIN servo_location l on (o.location_id = l.id)
WHERE (o.created_at, o.created_at) OVERLAPS (%s, %s)
GROUP BY l.title"""
self.cursor.execute(self.sql, [start, end])
for k, v in self.cursor.fetchall():
result.append({'label': k, 'data': v})
return result
def orders_created_by(self, timescale, location, user, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE location_id = %s
AND created_by_id = %s
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, user, start, end])
def orders_created_at(self, timescale, location, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE location_id = %s
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, start, end])
def orders_closed_at(self, timescale, location, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE location_id = %s
AND (closed_at, closed_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, start, end])
def orders_closed_in(self, timescale, location, queue, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE location_id = %s
AND queue_id = %s
AND (closed_at, closed_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, queue, start, end])
def order_count(self, timescale, location, queue, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
COUNT(*) AS v
FROM servo_order
WHERE location_id = %s
AND queue_id = %s
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, queue, start, end])
def order_turnaround(self, timescale, location, queue, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
EXTRACT(HOUR FROM AVG(closed_at - created_at)) as v
FROM servo_order
WHERE closed_at IS NOT NULL
AND location_id = %s
AND queue_id = %s
AND queue_id IS NOT NULL
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, queue, start, end])
def order_turnaround(self, timescale, location, queue, start, end):
self.sql = """SELECT EXTRACT(EPOCH FROM date_trunc(%s, created_at))*1000 as p,
EXTRACT(HOUR FROM AVG(closed_at - created_at)) as v
FROM servo_order
WHERE closed_at IS NOT NULL
AND location_id = %s
AND queue_id = %s
AND queue_id IS NOT NULL
AND (created_at, created_at) OVERLAPS (%s, %s)
GROUP BY p
ORDER BY p ASC"""
return self._result([timescale, location, queue, start, end])
| 36.64
| 91
| 0.594129
| 1,145
| 8,244
| 4.117904
| 0.10131
| 0.070626
| 0.022906
| 0.040721
| 0.776034
| 0.748462
| 0.734464
| 0.708165
| 0.708165
| 0.708165
| 0
| 0.010057
| 0.300461
| 8,244
| 224
| 92
| 36.803571
| 0.807526
| 0.002547
| 0
| 0.659341
| 0
| 0
| 0.60905
| 0.010948
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104396
| false
| 0
| 0.010989
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
10bfe84988c9609b4e84eac42b19097e1ac966fa
| 45
|
py
|
Python
|
webtreasures/models/__init__.py
|
songzxDev/fortunecat
|
594b5df8341c997617a9b28a36c86d09b0f5b1b3
|
[
"Apache-2.0"
] | null | null | null |
webtreasures/models/__init__.py
|
songzxDev/fortunecat
|
594b5df8341c997617a9b28a36c86d09b0f5b1b3
|
[
"Apache-2.0"
] | null | null | null |
webtreasures/models/__init__.py
|
songzxDev/fortunecat
|
594b5df8341c997617a9b28a36c86d09b0f5b1b3
|
[
"Apache-2.0"
] | null | null | null |
from webtreasures.models.fortunedrp import *
| 22.5
| 44
| 0.844444
| 5
| 45
| 7.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
10cd77ef99f0ea1fccd4fd2141d07ceb9306bfb7
| 295
|
py
|
Python
|
deep_rl/actor_critic/__init__.py
|
jkulhanek/deep-rl-pytorch
|
6fa7ceee8524f002d4a8d93295b231f6b9b7c29c
|
[
"MIT"
] | 7
|
2019-03-24T19:51:11.000Z
|
2022-01-27T17:20:29.000Z
|
deep_rl/actor_critic/__init__.py
|
jkulhanek/deep-rl-pytorch
|
6fa7ceee8524f002d4a8d93295b231f6b9b7c29c
|
[
"MIT"
] | null | null | null |
deep_rl/actor_critic/__init__.py
|
jkulhanek/deep-rl-pytorch
|
6fa7ceee8524f002d4a8d93295b231f6b9b7c29c
|
[
"MIT"
] | 4
|
2020-04-11T01:06:24.000Z
|
2021-07-18T01:22:36.000Z
|
from .agent import ActorCriticAgent # noqa: F401
from .ppo import PPO # noqa: F401
from .a2c import A2C # noqa: F401
from .a2c import A2CDynamicBatch # noqa: F401
from .acktr import ACKTR # noqa: F401
from .a3c import A3C # noqa: F401
from .unreal import Unreal, UnrealAgent # noqa: F401
| 36.875
| 53
| 0.732203
| 43
| 295
| 5.023256
| 0.302326
| 0.259259
| 0.333333
| 0.138889
| 0.194444
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113445
| 0.19322
| 295
| 7
| 54
| 42.142857
| 0.794118
| 0.257627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
10eaec0c1a0c17d1b6ebfa84f2aaadede84afe27
| 45
|
py
|
Python
|
stRT/tdr/__init__.py
|
Yao-14/stAnalysis
|
d08483ce581f5b03cfcad8be500aaa64b0293f74
|
[
"BSD-3-Clause"
] | null | null | null |
stRT/tdr/__init__.py
|
Yao-14/stAnalysis
|
d08483ce581f5b03cfcad8be500aaa64b0293f74
|
[
"BSD-3-Clause"
] | null | null | null |
stRT/tdr/__init__.py
|
Yao-14/stAnalysis
|
d08483ce581f5b03cfcad8be500aaa64b0293f74
|
[
"BSD-3-Clause"
] | null | null | null |
from .models import *
from .widgets import *
| 15
| 22
| 0.733333
| 6
| 45
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177778
| 45
| 2
| 23
| 22.5
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
339068600b325fe15eb9182b5e1fae0e0efcecbd
| 352
|
py
|
Python
|
S4/S4 Library/simulation/venues/park_zone_director.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/venues/park_zone_director.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/venues/park_zone_director.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from situations.complex.yoga_class import YogaClassScheduleMixin
from venues.relaxation_center_zone_director import VisitorSituationOnArrivalZoneDirectorMixin
from venues.scheduling_zone_director import SchedulingZoneDirector
class ParkZoneDirector(YogaClassScheduleMixin, VisitorSituationOnArrivalZoneDirectorMixin, SchedulingZoneDirector):
pass
| 50.285714
| 115
| 0.911932
| 28
| 352
| 11.25
| 0.607143
| 0.063492
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 352
| 6
| 116
| 58.666667
| 0.954545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 6
|
33d78aa5fb442ed0e93930e239acb959aeb1a27e
| 389
|
py
|
Python
|
wrappers/arlexecute/calibration/pointing.py
|
ska-telescope/algorithm-reference-library
|
1b2c8d6079249202864abf8c60cdea40f0f123cb
|
[
"Apache-2.0"
] | 22
|
2016-12-14T11:20:07.000Z
|
2021-08-13T15:23:41.000Z
|
wrappers/arlexecute/calibration/pointing.py
|
ska-telescope/algorithm-reference-library
|
1b2c8d6079249202864abf8c60cdea40f0f123cb
|
[
"Apache-2.0"
] | 30
|
2017-06-27T09:15:38.000Z
|
2020-09-11T18:16:37.000Z
|
wrappers/arlexecute/calibration/pointing.py
|
SKA-ScienceDataProcessor/algorithm-reference-library
|
1b2c8d6079249202864abf8c60cdea40f0f123cb
|
[
"Apache-2.0"
] | 20
|
2017-07-02T03:45:49.000Z
|
2019-12-11T17:19:01.000Z
|
""" Functions for calibration, including creation of pointingtables, application of pointingtables, and
merging pointingtables.
"""
from processing_components.calibration.pointing import create_pointingtable_from_blockvisibility
from processing_components.calibration.pointing import create_pointingtable_from_rows
from processing_components.calibration.pointing import qa_pointingtable
| 43.222222
| 103
| 0.884319
| 41
| 389
| 8.146341
| 0.487805
| 0.125749
| 0.215569
| 0.314371
| 0.577844
| 0.577844
| 0.431138
| 0.431138
| 0.431138
| 0
| 0
| 0
| 0.07455
| 389
| 8
| 104
| 48.625
| 0.927778
| 0.316195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
33dbb4469c992e0f150bd5776f9bea4d7636d33f
| 111
|
py
|
Python
|
deep_qa/contrib/layers/__init__.py
|
mrbot-ai/deep_qa
|
a1731331e12b921b4dbb43433f9c028b362495e8
|
[
"Apache-2.0"
] | 2
|
2017-01-26T13:07:13.000Z
|
2019-03-27T16:11:56.000Z
|
deep_qa/contrib/layers/__init__.py
|
nelson-liu/deep_qa
|
00d36306759cb1c232489f68844371fb727ce2c8
|
[
"Apache-2.0"
] | null | null | null |
deep_qa/contrib/layers/__init__.py
|
nelson-liu/deep_qa
|
00d36306759cb1c232489f68844371fb727ce2c8
|
[
"Apache-2.0"
] | 1
|
2019-01-04T13:08:27.000Z
|
2019-01-04T13:08:27.000Z
|
from .knowledge_backed_lstm import KnowledgeBackedLSTM
from .tree_composition_lstm import TreeCompositionLSTM
| 27.75
| 54
| 0.900901
| 12
| 111
| 8
| 0.75
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 111
| 3
| 55
| 37
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
33eaad2bf481158c9dea54943fe80d3c96733ec5
| 43
|
py
|
Python
|
hw1/test/tree/__init__.py
|
LePotatoChef/CS583-Machine-Learning
|
b23a4d9f3ed0b9b97bab46fd9c69192c374890df
|
[
"Apache-2.0"
] | 1
|
2020-11-11T15:41:00.000Z
|
2020-11-11T15:41:00.000Z
|
hw1/test/tree/__init__.py
|
LePotatoChef/CS583-Machine-Learning
|
b23a4d9f3ed0b9b97bab46fd9c69192c374890df
|
[
"Apache-2.0"
] | null | null | null |
hw1/test/tree/__init__.py
|
LePotatoChef/CS583-Machine-Learning
|
b23a4d9f3ed0b9b97bab46fd9c69192c374890df
|
[
"Apache-2.0"
] | null | null | null |
from .dtnode import *
from .dtree import *
| 14.333333
| 21
| 0.72093
| 6
| 43
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 22
| 21.5
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d2fa60632669012a78e0f8f37441803b324af30
| 120
|
py
|
Python
|
mysite/asd.py
|
iml1111/django-study
|
b0b20af5a495f8680b41e20b89e632a9a2e41026
|
[
"MIT"
] | null | null | null |
mysite/asd.py
|
iml1111/django-study
|
b0b20af5a495f8680b41e20b89e632a9a2e41026
|
[
"MIT"
] | null | null | null |
mysite/asd.py
|
iml1111/django-study
|
b0b20af5a495f8680b41e20b89e632a9a2e41026
|
[
"MIT"
] | null | null | null |
from django_jwt_extended.decorators import jwt_required
@jwt_required()
def asdasdasd():
return "sad"
asdasdasd()
| 15
| 55
| 0.775
| 15
| 120
| 5.933333
| 0.733333
| 0.247191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 120
| 8
| 56
| 15
| 0.855769
| 0
| 0
| 0
| 0
| 0
| 0.024793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
1d4c72c1075e0b86fb81467a757b30e31e0e2c79
| 36
|
py
|
Python
|
dsplot/tree/__init__.py
|
avere001/dsplot
|
89948c2f1b16e00bb3a240f73d0cb100b3eac847
|
[
"MIT"
] | 8
|
2021-08-08T06:06:39.000Z
|
2022-02-04T18:30:38.000Z
|
dsplot/tree/__init__.py
|
avere001/dsplot
|
89948c2f1b16e00bb3a240f73d0cb100b3eac847
|
[
"MIT"
] | 1
|
2022-01-04T02:01:36.000Z
|
2022-01-04T02:01:36.000Z
|
dsplot/tree/__init__.py
|
avere001/dsplot
|
89948c2f1b16e00bb3a240f73d0cb100b3eac847
|
[
"MIT"
] | 2
|
2021-08-18T12:28:40.000Z
|
2022-01-03T23:56:41.000Z
|
from .binary_tree import BinaryTree
| 18
| 35
| 0.861111
| 5
| 36
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d848b9ed788cf0ed82464d0f562233f9a3e9ef9
| 132
|
py
|
Python
|
bank/src/deposit_app/views/__init__.py
|
yuramorozov01/bank_system
|
8d0cad692a89b913adb2df9a2a03d1793938a911
|
[
"Apache-2.0"
] | null | null | null |
bank/src/deposit_app/views/__init__.py
|
yuramorozov01/bank_system
|
8d0cad692a89b913adb2df9a2a03d1793938a911
|
[
"Apache-2.0"
] | null | null | null |
bank/src/deposit_app/views/__init__.py
|
yuramorozov01/bank_system
|
8d0cad692a89b913adb2df9a2a03d1793938a911
|
[
"Apache-2.0"
] | null | null | null |
from deposit_app.views.deposit_contract import DepositContractViewSet
from deposit_app.views.deposit_type import DepositTypeViewSet
| 44
| 69
| 0.909091
| 16
| 132
| 7.25
| 0.5625
| 0.189655
| 0.241379
| 0.327586
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 132
| 2
| 70
| 66
| 0.935484
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
1d8659c901233fa5a39cd6a4af8bd60f2c5f2d7a
| 85
|
py
|
Python
|
linedrive/__init__.py
|
PoorBillionaire/linedrive
|
8880af4c095ac309b469352f80bf89a3c1b5deb5
|
[
"Apache-2.0"
] | null | null | null |
linedrive/__init__.py
|
PoorBillionaire/linedrive
|
8880af4c095ac309b469352f80bf89a3c1b5deb5
|
[
"Apache-2.0"
] | null | null | null |
linedrive/__init__.py
|
PoorBillionaire/linedrive
|
8880af4c095ac309b469352f80bf89a3c1b5deb5
|
[
"Apache-2.0"
] | 1
|
2022-01-20T19:10:03.000Z
|
2022-01-20T19:10:03.000Z
|
from . import utils
from . import constants
from .websocket import GamecastWebsocket
| 21.25
| 40
| 0.823529
| 10
| 85
| 7
| 0.6
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 85
| 3
| 41
| 28.333333
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1d8a9eb020d69708931c4b847e9f37f88c96b217
| 50,912
|
py
|
Python
|
original-paas/copy_to_container/www/spdpaas/src/app/features/commonUse/inMemoryDBHandle.py
|
yishan1331/docker-practice
|
91a1a434cbffc33790678af5e09de310386812d1
|
[
"MIT"
] | null | null | null |
original-paas/copy_to_container/www/spdpaas/src/app/features/commonUse/inMemoryDBHandle.py
|
yishan1331/docker-practice
|
91a1a434cbffc33790678af5e09de310386812d1
|
[
"MIT"
] | null | null | null |
original-paas/copy_to_container/www/spdpaas/src/app/features/commonUse/inMemoryDBHandle.py
|
yishan1331/docker-practice
|
91a1a434cbffc33790678af5e09de310386812d1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#module description
"""
==============================================================================
created :
Last update: 03/31/2021
Developer: Yi-Shan Tsai
Lite Version 2 @Yishan08212019
API Version 1.0
Filename: inMemoryDBHandle.py
Description: 連接redis api
Total = 8 APIs
==============================================================================
"""
#=======================================================
# System level modules
#=======================================================
#{{{
from sqlalchemy import *
#}}}
#=======================================================
# User-defined modules
#=======================================================
# {{{
from app import *
#Yishan@05212020 added for common modules
from app.modules import *
# }}}
#blueprint
INMEMORYDB_API = Blueprint('INMEMORYDB_API', __name__)
class _RedisQueryAction(object):
def __init__(self,dbRedis,datatype,key):
self.dbRedis = dbRedis
self.datatype = datatype
self.key = key
def get_value(self):
dataDict = {
"string":self._string,
"list":self._list, #lrange(0,-1)
"set":self._set,
"zset":self._zset, #zrange(0,-1)
"hash":self._hash,
}
return dataDict[self.datatype]()
def _string(self):
return self.dbRedis.get(self.key)
def _list(self):
return self.dbRedis.lrange(self.key,0,-1)
def _set(self):
return self.dbRedis.smembers(self.key)
def _zset(self):
return self.dbRedis.zrange(self.key,0,-1)
def _hash(self):
return self.dbRedis.hgetall(self.key)
class _RedisInsertUpdateAction(object):
def __init__(self, dbRedis, datatype, data, whichAction, setExpireTime):
self.dbRedis = dbRedis
self.datatype = datatype
self.data = data
self.whichAction = whichAction
self.doAction = "update"
if whichAction == "Insert":
self.doAction = "add"
self.setExpireTime = setExpireTime
self.err_msg = []
def set_value(self):
dataDict = {
"string":self._string,
"list":self._list_set,
"set":self._list_set,
"zset":self._zset_hash,
"hash":self._zset_hash,
}
return dataDict[self.datatype]()
def _string(self):
try:
if self.setExpireTime:
for key,value in self.data["data"].items():
if self.dbRedis.exists(key) and self.whichAction == "Insert":
self.err_msg.append("Failed to {} Key:'{}' because already exists".format(self.doAction,key))
continue
if not self.dbRedis.exists(key) and not self.whichAction == "Insert":
self.err_msg.append("Failed to {} Key:'{}' because does not exist".format(self.doAction,key))
continue
if isinstance(value,dict) or isinstance(value,list):
self.err_msg.append("Failed to {} Key:'{}' -> Value:'{}' because Value cannot be an Array or Object".format(self.doAction,key,value))
continue
if self.data["expire_time"].get(key) is None:
self.dbRedis.set(key,value)
self.err_msg.append("Key:'{}' doesn't have expire_time,but {} successfully with no expire_time".format(key,self.doAction))
continue
if not (isinstance(self.data["expire_time"][key],int) and self.data["expire_time"][key] > 0):
self.err_msg.append("Failed to add Key:'{}' beacuse expire_time:{} must be positive integer and greater than zero".format(key,self.data["expire_time"][key]))
continue
self.dbRedis.setex(key, self.data["expire_time"][key], value)
else:
self.dbRedis.mset(self.data["data"])
except Exception as e:
self.err_msg.append(str(e))
return self.err_msg
def _list_set(self):
try:
for key,value in self.data["data"].items():
if self.dbRedis.exists(key) and self.whichAction == "Insert":
self.err_msg.append("Failed to {} Key:'{}' because already exists".format(self.doAction,key))
continue
if not self.dbRedis.exists(key) and not self.whichAction == "Insert":
self.err_msg.append("Failed to {} Key:'{}' because does not exist".format(self.doAction,key))
continue
if not isinstance(value,list):
self.err_msg.append("Failed to add Key:'{}' -> Value:'{}' because value must be an Array".format(key,value))
continue
if not value:
self.err_msg.append("Failed to add Key:'{}' -> Value:'{}' because value cannot be empty.".format(key,value))
continue
#若是更新key則先把key刪了再建一個新的
if self.whichAction != "Insert": self.dbRedis.delete(key)
if self.setExpireTime:
if self.data["expire_time"].get(key) is None:
self.dbRedis.rpush(key, *value) if self.datatype == "list" else self.dbRedis.sadd(key, *value)
self.err_msg.append("Key:'{}' doesn't have expire_time,but added successfully with no expire_time".format(key))
continue
if not (isinstance(self.data["expire_time"][key],int) and self.data["expire_time"][key] > 0):
self.err_msg.append("Failed to add Key:'{}' beacuse expire_time:{} must be positive integer and greater than zero".format(key,self.data["expire_time"][key]))
continue
self.dbRedis.rpush(key, *value) if self.datatype == "list" else self.dbRedis.sadd(key, *value)
if self.setExpireTime: self.dbRedis.expire(key,self.data["expire_time"][key])
except Exception as e:
self.err_msg.append(str(e))
return self.err_msg
def _zset_hash(self):
try:
for key,value in self.data["data"].items():
if self.dbRedis.exists(key) and self.whichAction == "Insert":
self.err_msg.append("Failed to {} Key:'{}' because already exists".format(self.doAction,key))
continue
if not self.dbRedis.exists(key) and not self.whichAction == "Insert":
self.err_msg.append("Failed to {} Key:'{}' because does not exist".format(self.doAction,key))
continue
if not isinstance(value,dict):
self.err_msg.append("Failed to add Key:'{}' -> Value:'{}' because value must be an Object".format(key,value))
continue
if not value:
self.err_msg.append("Failed to add Key:'{}' -> Value:'{}' because value cannot be empty.".format(key,value))
continue
if self.datatype == "zset":
legalvalue = True
for i in value.values():
if not (isinstance(i,int) or isinstance(i,float)):
self.err_msg.append("Failed to add Key:'{}' -> Value:'{}' beacuse value's value must be an integer".format(key,value))
legalvalue = False
break
if not legalvalue: continue
#若是更新key則先把key刪了再建一個新的
if self.whichAction == "Put": self.dbRedis.delete(key)
if self.setExpireTime:
if self.data["expire_time"].get(key) is None:
self.dbRedis.zadd(key, value) if self.datatype == "zset" else self.dbRedis.hmset(key, value)
self.err_msg.append("Key:'{}' doesn't have expire_time,but added successfully with no expire_time".format(key))
continue
if not (isinstance(self.data["expire_time"][key],int) and self.data["expire_time"][key] > 0):
self.err_msg.append("Failed to add Key:'{}' beacuse expire_time:{} must be positive integer and greater than zero".format(key,self.data["expire_time"][key]))
continue
self.dbRedis.zadd(key, value) if self.datatype == "zset" else self.dbRedis.hmset(key, value)
if self.setExpireTime: self.dbRedis.expire(key,self.data["expire_time"][key])
except Exception as e:
self.err_msg.append(str(e))
return self.err_msg
def operate_CU_integration(reqdataDict, dbRedis, whichAction):
if whichAction in ["Insert","Put"]:
redis_key_type = ["string","list","set","zset","hash"]
else:
redis_key_type = ["zset","hash"]
status = {}
anydata = False
for this_key_type in redis_key_type:
if reqdataDict.get(this_key_type) is not None:
anydata = True
if not check_post_parameter_exist(reqdataDict.get(this_key_type),["data"]):
status[this_key_type] = ["Data : {} ,Missing post parameters : '{}'".format(reqdataDict.get(this_key_type),post_parameter)]
continue
setExpireTime = False
if "expire_time" in reqdataDict.get(this_key_type).keys(): setExpireTime = True
#若key有要設定有效期限,則一筆一筆修改(較慢),否則一次修改(較快)
status[this_key_type] = _RedisInsertUpdateAction(dbRedis,this_key_type,reqdataDict.get(this_key_type),whichAction,setExpireTime).set_value()
if not anydata:
return False, "Faild to {},because no correct data".format(whichAction)
return True, status
#=======================================================
# API to CommonUse query redis key
# Date: 12172020@Yishan
# FOR REDIS
#=======================================================
#{{{ INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Interval/<key>', methods = ['GET']),
@INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/SpecificKey/<key>', methods = ['GET'])
def redis_commonuse_get_specific_key(SYSTEM, key):
#{{{APIINFO
'''
{
"API_application":"提供查詢redis資料庫指定key的值,若pattern為yes表示欲使用*、[]進行條件匹配查詢,例如:h*llo匹配hllo和heeeeello;h[ae]llo匹配hello和hallo,但不匹配hillo",
"API_path_parameters":{"SYSTEM":"合法的系統名稱","key":"Key值"},
"API_parameters":{"uid":"使用者帳號","pattern":"是否使用條件查詢(yes/no)"},
"API_message_parameters":{"QueryValueData":"JSON","DB":"string"},
"API_example":{
"APIS": "GET /api/IOT/1.0/rd/CommonUse/Specific/test_zset",
"OperationTime": "0.001",
"DB": "REDIS",
"System": "IOT",
"BytesTransferred": 140,
"QueryValueData": [
"member",
"dadsd"
],
"Response": "ok"
}
}
'''
#}}}
err_msg = "error"
if SYSTEM == "test": SYSTEM = "IOT"
dicRet = appPaaS.preProcessRequest(request,system=SYSTEM)
if SYSTEM not in globalvar.SYSTEMLIST[globalvar.SERVERIP]:
dicRet["Response"] = "system:{} has no privillege to use this API".format(SYSTEM)
return jsonify( **dicRet)
uri_parameter = ["uid","pattern"]
result, result_msg = check_uri_parameter_exist(request,uri_parameter)
if not result:
dicRet["Response"] = result_msg
return jsonify( **dicRet)
pattern = request.args.get("pattern").encode('utf-8')
if pattern not in ("yes","no"):
dicRet["Response"] = "parameter: pattern -> {} must be yes or no".format(pattern)
return jsonify( **dicRet)
pattern = True if pattern == "yes" else False
try:
redis_db = globalvar.SYSTEMLIST[globalvar.SERVERIP].index(SYSTEM)
dbRedis,_,result= appPaaS.getDbSessionType(system=SYSTEM,dbName=redis_db,forRawData="redis")
if dbRedis is None:
#表示連接資料庫有問題
dicRet["Response"] = result
return jsonify( **dicRet)
_keys = [key]
if pattern: _keys = dbRedis.keys(key)
contents = {}
for i in _keys:
if not dbRedis.exists(i):
dicRet["Response"] = "Key : {} doesn't existed".format(i)
return jsonify( **dicRet)
redis_key_type = ["string","list","set","zset","hash"]
this_key_type = dbRedis.type(i)
if this_key_type not in redis_key_type:
dicRet["Response"] = "Key : {} doesn't existed".format(i)
return jsonify( **dicRet)
contents[i] = _RedisQueryAction(dbRedis,this_key_type,i).get_value()
if isinstance(contents[i],set): contents[i] = list(contents[i])
if not pattern:
dicRet["QueryValueData"] = contents[_keys[0]]
else:
dicRet["QueryValueData"] = contents
err_msg = "ok"
except Exception as e:
err_msg = appPaaS.catch_exception(e,sys.exc_info(),SYSTEM)
dicRet["Response"] = err_msg
dicRet["DB"] = "REDIS"
return jsonify( **dicRet)
# }}}
#=======================================================
# API to CommonUse register redis key
# Date: 12172020@Yishan
# FOR REDIS
#=======================================================
# {{{ appPaaS.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['POST'])
@INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['POST'])
def redis_commonuse_register(SYSTEM):
#{{{APIINFO
'''
{
"API_application":"提供新增redis多個key&value資料",
"API_parameters":{"uid":"使用者帳號"},
"API_path_parameters":{"SYSTEM":"合法的系統名稱"},
"API_postData":{
"bodytype":"Object",
"bodyschema":"{}",
"parameters":{
"string":{
"type":"Object",
"requirement":"optional",
"directions":[
"新增字符串形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲新增的key&value(object)",
"是否必需":"必要",
"注意事項":"value只接受字串、數字、json字串類型;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲新增的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_string":123,"test_string2":"test"},"expire_time":{"test_string":60}}
},
"list":{
"type":"Object",
"requirement":"optional",
"directions":[
"新增列表形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲新增的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為陣列且陣列的值只接受字串、數字、json字串類型;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲新增的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data": {"test_list1": [34123,"dwqad"]},"expire_time":{"test_list1":60,"test_list2":30}}
},
"set":{
"type":"Object",
"requirement":"optional",
"directions":[
"新增無序集合形態的value,集合成員是唯一的,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲新增的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為陣列,陣列的值只接受字串、數字、json字串類型且不得重複;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲新增的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data": {"test_set1": [34123,"dwqad"]},"expire_time":{"test_set1":60}}
},
"zset":{
"type":"Object",
"requirement":"optional",
"directions":[
"新增有序集合形態的value,集合成員是唯一的且都會關聯一個double 類型的分數,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲新增的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為物件,value物件的value必需為數字或浮點數;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲新增的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_zset": {"data1":22,"data2":10,"data3":20}},"expire_time":{"test_zset":60}}
},
"hash":{
"type":"Object",
"requirement":"optional",
"directions":[
"新增JSON物件形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲新增的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為物件;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲新增的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_hash1":{"data1":"qqq","data2":10,"data3":20}},"expire_time":{"test_hash1":60}}
}
},
"precautions":{
"注意事項1":"data若有設定期限,則會data-key一一比對並逐一新增(速度較慢);若無則一次新增(速度較快)"
},
"example":[
{
"string": {
"data": {
"test_string1": 1.2,
"test_string2": ["sas",1321]
},
"expire_time":{
"test_string1":30
}
},
"list": {
"data": {
"test_list1": [34123,"dwqad"],
"test_list2": [{"data1":22,"data2":10,"data3":20},"afasfa"]
},
"expire_time":{
"test_list1":60,
"test_list2":30
}
},
"set": {
"data": {
"test_set1": [34123,"dwqad"],
"test_set2": ["tete","tete"]
},
"expire_time":{
"test_set1":15,
"test_set2":30
}
},
"zset": {
"data": {
"test_zset1": {"data1":"qqq","data2":10,"data3":20},
"test_zset2": {"data1":22,"data2":10,"data3":20}
},
"expire_time":{
"test_zset1":15
}
},
"hash": {
"data": {
"test_hash1": {"data1":"qqq","data2":10,"data3":20},
"test_hash2": ["tete","tete"]
},
"expire_time":{
"test_hash1":15,
"test_hash2":30
}
}
}
]
},
"API_message_parameters":{"DB":"string","InsertStatus":"object+各類型資料新增狀態,若全部新增成功則無此Response"},
"API_example":{
"APIS": "POST /api/IOT/1.0/rd/CommonUse/Keys",
"InsertStatus": {
"hash": [
"Failed to add Key:'test_hash2' -> Value:'['tete', 'tete']' because value must be an Object"
],
"string": [
"Invalid input of type: 'list'. Convert to a byte, string or number first."
],
"zset": [
"Failed to add Key:'test_zset1' -> Value:'{'data1': 'qqq', 'data3': 20, 'data2': 10}' beacuse value's value must be an integer",
"Key:'test_zset2' doesn't have expire_time,but added successfully with no expire_time"
]
},
"BytesTransferred": 521,
"OperationTime": "0.004",
"DB": "REDIS",
"System": "IOT",
"Response": "ok"
}
}
'''
#}}}
err_msg = "error"
dicRet = appPaaS.preProcessRequest(request,system=SYSTEM)
if SYSTEM not in globalvar.SYSTEMLIST[globalvar.SERVERIP]:
dicRet["Response"] = "system:{} has no privillege to use this API".format(SYSTEM)
return jsonify( **dicRet)
uri_parameter = ["uid"]
result, result_msg = check_uri_parameter_exist(request,uri_parameter)
if not result:
dicRet["Response"] = result_msg
return jsonify( **dicRet)
if not VerifyDataStrLawyer(request.data).verify_json():
dicRet["Response"] = "error input '{}' is illegal JSON".format(request.data)
return jsonify( **dicRet)
#collect data items from a request
reqdataDict = ConvertData().convert(json.loads(request.data))
#多筆資料新增的狀態
insertstatus = {}
try:
redis_db = globalvar.SYSTEMLIST[globalvar.SERVERIP].index(SYSTEM)
dbRedis,_,result= appPaaS.getDbSessionType(system=SYSTEM,dbName=redis_db,forRawData="redis")
if dbRedis is None:
#表示連接資料庫有問題
dicRet["Response"] = result
return jsonify( **dicRet)
result, insertstatus = operate_CU_integration(reqdataDict, dbRedis, "Insert")
if not result:
dicRet["Response"] = insertstatus
return jsonify( **dicRet)
# anydata = False
# for this_key_type in redis_key_type:
# if reqdataDict.get(this_key_type) is not None:
# anydata = True
# if not check_post_parameter_exist(reqdataDict.get(this_key_type),["data"]):
# insertstatus[this_key_type] = ["Data : {} ,Missing post parameters : '{}'".format(reqdataDict.get(this_key_type),post_parameter)]
# continue
# setExpireTime = False
# if "expire_time" in reqdataDict.get(this_key_type).keys(): setExpireTime = True
# #若key有要設定有效期限,則一筆一筆新增(較慢),否則一次新增(較快)
# insertstatus[this_key_type] = _RedisInsertUpdateAction(dbRedis,this_key_type,reqdataDict.get(this_key_type),"Insert",setExpireTime).set_value()
# else:
# if not anydata:
# dicRet["Response"] = "Faild to add,because no correct data"
# return jsonify( **dicRet)
for key,value in insertstatus.items():
if not value: del insertstatus[key]
if insertstatus: dicRet["InsertStatus"] = insertstatus
err_msg = "ok"
except Exception as e:
err_msg = appPaaS.catch_exception(e,sys.exc_info(),SYSTEM)
dicRet["Response"] = err_msg
dicRet["DB"] = "REDIS"
return jsonify( **dicRet)
# }}}
#=======================================================
# API to CommonUse register redis key
# Date: 12172020@Yishan
# FOR REDIS
#=======================================================
# {{{ INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['PUT'])
@INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['PUT'])
def redis_commonuse_update_all(SYSTEM):
#{{{APIINFO
'''
{
"API_application":"提供修改redis多個key&value資料,為全部更新",
"API_parameters":{"uid":"使用者帳號"},
"API_path_parameters":{"SYSTEM":"合法的系統名稱"},
"API_postData":{
"bodytype":"Object",
"bodyschema":"{}",
"parameters":{
"string":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改字符串形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object)",
"是否必需":"必要",
"注意事項":"value只接受字串、數字、json字串類型;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_string":123,"test_string2":"test"},"expire_time":{"test_string":60}}
},
"list":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改列表形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為陣列且陣列的值只接受字串、數字、json字串類型;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data": {"test_list1": [34123,"dwqad"]},"expire_time":{"test_list1":60,"test_list2":30}}
},
"set":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改無序集合形態的value,集合成員是唯一的,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為陣列,陣列的值只接受字串、數字、json字串類型且不得重複;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data": {"test_set1": [34123,"dwqad"]},"expire_time":{"test_set1":60}}
},
"zset":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改有序集合形態的value,集合成員是唯一的且都會關聯一個double 類型的分數,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為物件,value物件的value必需為數字或浮點數;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_zset": {"data1":22,"data2":10,"data3":20}},"expire_time":{"test_zset":60}}
},
"hash":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改JSON物件形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object)",
"是否必需":"必要",
"注意事項":"value必須為物件;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_hash1":{"data1":"qqq","data2":10,"data3":20}},"expire_time":{"test_hash1":60}}
}
},
"precautions":{
"注意事項1":"data若有設定期限,則會data-key一一比對並逐一修改(速度較慢);若無則一次修改(速度較快)"
},
"example":[
{
"string": {
"data": {
"test_string1": 1.2,
"test_string2": ["sas",1321]
},
"expire_time":{
"test_string1":30
}
},
"list": {
"data": {
"test_list1": [34123,"dwqad"],
"test_list2": [{"data1":22,"data2":10,"data3":20},"afasfa"]
},
"expire_time":{
"test_list1":60,
"test_list2":30
}
},
"set": {
"data": {
"test_set1": [34123,"dwqad"],
"test_set2": ["tete","tete"]
},
"expire_time":{
"test_set1":15,
"test_set2":30
}
},
"zset": {
"data": {
"test_zset1": {"data1":"qqq","data2":10,"data3":20},
"test_zset2": {"data1":22,"data2":10,"data3":20}
},
"expire_time":{
"test_zset1":15
}
},
"hash": {
"data": {
"test_hash1": {"data1":"qqq","data2":10,"data3":20},
"test_hash2": ["tete","tete"]
},
"expire_time":{
"test_hash1":15,
"test_hash2":30
}
}
}
]
},
"API_message_parameters":{"DB":"string","UpdateStatus":"object+各類型資料修改狀態,若全部修改成功則無此Response"},
"API_example":{
"APIS": "PUT /api/IOT/1.0/rd/CommonUse/Keys",
"UpdateStatus": {
"hash": [
"Failed to add Key:'test_hash2' -> Value:'['tete', 'tete']' because value must be an Object"
],
"string": [
"Invalid input of type: 'list'. Convert to a byte, string or number first."
],
"zset": [
"Failed to add Key:'test_zset1' -> Value:'{'data1': 'qqq', 'data3': 20, 'data2': 10}' beacuse value's value must be an integer",
"Key:'test_zset2' doesn't have expire_time,but added successfully with no expire_time"
]
},
"BytesTransferred": 521,
"OperationTime": "0.004",
"DB": "REDIS",
"System": "IOT",
"Response": "ok"
}
}
'''
#}}}
err_msg = "error"
dicRet = appPaaS.preProcessRequest(request,system=SYSTEM)
if SYSTEM not in globalvar.SYSTEMLIST[globalvar.SERVERIP]:
dicRet["Response"] = "system:{} has no privillege to use this API".format(SYSTEM)
return jsonify( **dicRet)
uri_parameter = ["uid"]
result, result_msg = check_uri_parameter_exist(request,uri_parameter)
if not result:
dicRet["Response"] = result_msg
return jsonify( **dicRet)
if not VerifyDataStrLawyer(request.data).verify_json():
dicRet["Response"] = "error input '{}' is illegal JSON".format(request.data)
return jsonify( **dicRet)
#collect data items from a request
reqdataDict = ConvertData().convert(json.loads(request.data))
#多筆資料修改的狀態
updatestatus = {}
try:
redis_db = globalvar.SYSTEMLIST[globalvar.SERVERIP].index(SYSTEM)
dbRedis,_,result= appPaaS.getDbSessionType(system=SYSTEM,dbName=redis_db,forRawData="redis")
if dbRedis is None:
#表示連接資料庫有問題
dicRet["Response"] = result
return jsonify( **dicRet)
result, updatestatus = operate_CU_integration(reqdataDict, dbRedis, "Put")
if not result:
dicRet["Response"] = updatestatus
return jsonify( **dicRet)
# anydata = False
# for this_key_type in redis_key_type:
# if reqdataDict.get(this_key_type) is not None:
# anydata = True
# if not check_post_parameter_exist(reqdataDict.get(this_key_type),["data"]):
# updatestatus[this_key_type] = ["Data : {} ,Missing post parameters : '{}'".format(reqdataDict.get(this_key_type),post_parameter)]
# continue
# setExpireTime = False
# if "expire_time" in reqdataDict.get(this_key_type).keys(): setExpireTime = True
# #若key有要設定有效期限,則一筆一筆修改(較慢),否則一次修改(較快)
# updatestatus[this_key_type] = _RedisInsertUpdateAction(dbRedis,this_key_type,reqdataDict.get(this_key_type),"Put",setExpireTime).set_value()
# else:
# if not anydata:
# dicRet["Response"] = "Faild to add,because no correct data"
# return jsonify( **dicRet)
for key,value in updatestatus.items():
if not value: del updatestatus[key]
if updatestatus: dicRet["UpdateStatus"] = updatestatus
err_msg = "ok"
except Exception as e:
err_msg = appPaaS.catch_exception(e,sys.exc_info(),SYSTEM)
dicRet["Response"] = err_msg
dicRet["DB"] = "REDIS"
return jsonify( **dicRet)
# }}}
#=======================================================
# API to CommonUse register redis key
# Date: 12172020@Yishan
# FOR REDIS
#=======================================================
# {{{ INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['PATCH'])
@INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['PATCH'])
def redis_commonuse_partial_update(SYSTEM):
#{{{APIINFO
'''
{
"API_application":"提供修改redis型態為zset、hash的key資料,為部分更新",
"API_parameters":{"uid":"使用者帳號"},
"API_path_parameters":{"SYSTEM":"合法的系統名稱"},
"API_postData":{
"bodytype":"Object",
"bodyschema":"{}",
"parameters":{
"zset":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改有序集合形態的value,集合成員是唯一的且都會關聯一個double 類型的分數,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object),若key不存在會直接建立",
"是否必需":"必要",
"注意事項":"value必須為物件,value物件的value必需為數字或浮點數;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_zset": {"data1":22,"data2":10,"data3":20}},"expire_time":{"test_zset":60}}
},
"hash":{
"type":"Object",
"requirement":"optional",
"directions":[
"修改JSON物件形態的value,詳細內容格式請看'Show Details'",
{
"data":{
"說明":"欲修改的key&value(object),若key不存在會直接建立",
"是否必需":"必要",
"注意事項":"value必須為物件;資料以字串型態儲存"
},
"expire_time":{
"說明":"欲修改的key之有效期限(object)",
"是否必需":"選填",
"注意事項":"若需要設定key的有效期限(秒),key需對應data的key且value必須為正整數;不需要全部data的Key都同時設定期限,可選擇指定key"
}
}
],
"example":{"data":{"test_hash1":{"data1":"qqq","data2":10,"data3":20}},"expire_time":{"test_hash1":60}}
}
},
"precautions":{
"注意事項1":"data若有設定期限,則會data-key一一比對並逐一修改(速度較慢);若無則一次修改(速度較快)"
},
"example":[
{
"zset": {
"data": {
"test_zset1": {"data1":50,"data2":10,"data3":20},
"test_zset2": {"data1":22,"data2":10,"data3":20}
},
"expire_time":{
"test_zset1":15
}
},
"hash": {
"data": {
"test_hash1": {"data1":"qqq","data2":10,"data3":20},
"test_hash2": {"data11":"qqq","data22":10,"data33":20}
},
"expire_time":{
"test_hash1":15,
"test_hash2":30
}
}
}
]
},
"API_message_parameters":{"DB":"string","UpdateStatus":"object+各類型資料修改狀態,若全部修改成功則無此Response"},
"API_example":{
"APIS": "PATCH /api/IOT/1.0/rd/CommonUse/Keys",
"UpdateStatus": {
"hash": [
"Failed to add Key:'test_hash2' -> Value:'['tete', 'tete']' because value must be an Object"
],
"zset": [
"Failed to add Key:'test_zset1' -> Value:'{'data1': 'qqq', 'data3': 20, 'data2': 10}' beacuse value's value must be an integer",
"Key:'test_zset2' doesn't have expire_time,but added successfully with no expire_time"
]
},
"BytesTransferred": 521,
"OperationTime": "0.004",
"DB": "REDIS",
"System": "IOT",
"Response": "ok"
}
}
'''
#}}}
err_msg = "error"
dicRet = appPaaS.preProcessRequest(request,system=SYSTEM)
if SYSTEM not in globalvar.SYSTEMLIST[globalvar.SERVERIP]:
dicRet["Response"] = "system:{} has no privillege to use this API".format(SYSTEM)
return jsonify( **dicRet)
uri_parameter = ["uid"]
result, result_msg = check_uri_parameter_exist(request,uri_parameter)
if not result:
dicRet["Response"] = result_msg
return jsonify( **dicRet)
if not VerifyDataStrLawyer(request.data).verify_json():
dicRet["Response"] = "error input '{}' is illegal JSON".format(request.data)
return jsonify( **dicRet)
#collect data items from a request
reqdataDict = ConvertData().convert(json.loads(request.data))
#多筆資料修改的狀態
updatestatus = {}
try:
redis_db = globalvar.SYSTEMLIST[globalvar.SERVERIP].index(SYSTEM)
dbRedis,_,result= appPaaS.getDbSessionType(system=SYSTEM,dbName=redis_db,forRawData="redis")
if dbRedis is None:
#表示連接資料庫有問題
dicRet["Response"] = result
return jsonify( **dicRet)
result, updatestatus = operate_CU_integration(reqdataDict, dbRedis, "Patch")
if not result:
dicRet["Response"] = updatestatus
return jsonify( **dicRet)
# anydata = False
# for this_key_type in redis_key_type:
# if reqdataDict.get(this_key_type) is not None:
# anydata = True
# if not check_post_parameter_exist(reqdataDict.get(this_key_type),["data"]):
# updatestatus[this_key_type] = ["Data : {} ,Missing post parameters : '{}'".format(reqdataDict.get(this_key_type),post_parameter)]
# continue
# setExpireTime = False
# if "expire_time" in reqdataDict.get(this_key_type).keys(): setExpireTime = True
# #若key有要設定有效期限,則一筆一筆修改(較慢),否則一次修改(較快)
# updatestatus[this_key_type] = _RedisInsertUpdateAction(dbRedis, this_key_type, reqdataDict.get(this_key_type), "Patch", setExpireTime).set_value()
# else:
# if not anydata:
# dicRet["Response"] = "Faild to add,because no correct data"
# return jsonify( **dicRet)
for key,value in updatestatus.items():
if not value: del updatestatus[key]
if updatestatus: dicRet["UpdateStatus"] = updatestatus
err_msg = "ok"
except Exception as e:
err_msg = appPaaS.catch_exception(e,sys.exc_info(),SYSTEM)
dicRet["Response"] = err_msg
dicRet["DB"] = "REDIS"
return jsonify( **dicRet)
# }}}
#=======================================================
# API to CommonUse register redis key
# Date: 12172020@Yishan
# FOR REDIS
#=======================================================
# {{{ INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Hash/Keys/SpecificField', methods = ['PATCH'])
@INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Hash/Keys/SpecificField', methods = ['PATCH'])
def redis_commonuse_hash_update_specific_field(SYSTEM):
#{{{APIINFO
'''
{
"API_application":"提供修改redis Hash型態key的指定field值,為部分更新",
"API_parameters":{"uid":"使用者帳號"},
"API_path_parameters":{"SYSTEM":"合法的系統名稱"},
"API_postData":{
"bodytype":"Object",
"bodyschema":"{}",
"key": {"type":"Object","requirement":"required","directions":"欲更新的key,value需為物件且value->key值必須存在才能更新","example":{"test":{"qq":111,"cc":"daa"}}},
"precautions":{
"注意事項1":"第一層與第二層的key必須存在"
},
"example":[
{
"test":{
"qq":"sdfsdf",
"ww":"asdasd",
"cc":222,
"ss":"dsadas",
"rr":"dasd"
}
}
]
},
"API_message_parameters":{"UpdateStatus":"object+各類型資料更新狀態,若全部更新成功則無此Response"},
"API_example":{
"Response": "ok",
"APIS": "PATCH /api/IOT/1.0/rd/CommonUse/Hash/Keys/SpecificField",
"OperationTime": "0.002",
"BytesTransferred": 187,
"DB": "REDIS",
"System": "IOT",
"UpdateStatus": {
"test": [
"key:ss doesn't existed",
"key:cc doesn't existed"
]
}
}
}
'''
#}}}
err_msg = "error"
if SYSTEM == "test": SYSTEM = "IOT"
dicRet = appPaaS.preProcessRequest(request,system=SYSTEM)
if SYSTEM not in globalvar.SYSTEMLIST[globalvar.SERVERIP]:
dicRet["Response"] = "system:{} has no privillege to use this API".format(SYSTEM)
return jsonify( **dicRet)
uri_parameter = ["uid"]
result, result_msg = check_uri_parameter_exist(request,uri_parameter)
if not result:
dicRet["Response"] = result_msg
return jsonify( **dicRet)
is_illegal,is_dict = VerifyDataStrLawyer(request.data).verify_json(check_dict=True)
if not (is_illegal and is_dict):
dicRet["Response"] = "error input '{}' is illegal JSON".format(request.data)
return jsonify( **dicRet)
#collect data items from a request
reqdataDict = ConvertData().convert(json.loads(request.data))
#多筆資料修改的狀態
updatestatus = {}
try:
redis_db = globalvar.SYSTEMLIST[globalvar.SERVERIP].index(SYSTEM)
dbRedis,_,result= appPaaS.getDbSessionType(system=SYSTEM,dbName=redis_db,forRawData="redis")
if dbRedis is None:
#表示連接資料庫有問題
dicRet["Response"] = result
return jsonify( **dicRet)
for key,value in reqdataDict.items():
this_obj = {}
if not dbRedis.exists(key):
dicRet["Response"] = "Key : {} doesn't existed".format(key)
return jsonify( **dicRet)
if not isinstance(value,dict):
dicRet["Response"] = "Error type of '{}',it must be an Object".format(value)
return jsonify( **dicRet)
updatestatus[key] = []
for field,field_value in value.items():
if field_value is not None:
if not dbRedis.hexists(key, field):
updatestatus[key].append("key:{} doesn't existed".format(field))
else:
this_value = field_value
if isinstance(field_value,dict) or isinstance(field_value,list): this_value = json.dumps(field_value)
this_obj[field] = this_value
dbRedis.hmset(key, this_obj)
for key,value in updatestatus.items():
if not value: del updatestatus[key]
if updatestatus: dicRet["UpdateStatus"] = updatestatus
err_msg = "ok"
except Exception as e:
err_msg = appPaaS.catch_exception(e,sys.exc_info(),SYSTEM)
dicRet["Response"] = err_msg
dicRet["DB"] = "REDIS"
return jsonify( **dicRet)
# }}}
#=======================================================
# API to CommonUse register redis key
# Date: 12172020@Yishan
# FOR REDIS
#=======================================================
# {{{ INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['DELETE'])
@INMEMORYDB_API.route('/api/<SYSTEM>/1.0/rd/CommonUse/Keys', methods = ['DELETE'])
def redis_commonuse_delete(SYSTEM):
#{{{APIINFO
'''
{
"API_application":"提供刪除redis多個key",
"API_parameters":{"uid":"使用者帳號"},
"API_path_parameters":{"SYSTEM":"合法的系統名稱"},
"API_postData":{
"bodytype":"Object",
"bodyschema":"{}",
"parameters":{
"keys":{"type":"Array","requirement":"required","directions":"欲刪除的key列表","example":"{'keys':[123,'test',....]}"}
},
"example":[
{
"keys":[123,"1234"]
}
]
},
"API_message_parameters":{"DB":"string"},
"API_example":{
"APIS": "DELETE /api/IOT/1.0/rd/CommonUse/Keys",
"BytesTransferred": 521,
"OperationTime": "0.004",
"DB": "MSSQL",
"System": "IOT",
"Response": "ok"
}
}
'''
#}}}
err_msg = "error"
dicRet = appPaaS.preProcessRequest(request,system=SYSTEM)
if SYSTEM not in globalvar.SYSTEMLIST[globalvar.SERVERIP]:
dicRet["Response"] = "system:{} has no privillege to use this API".format(SYSTEM)
return jsonify( **dicRet)
uri_parameter = ["uid"]
result, result_msg = check_uri_parameter_exist(request,uri_parameter)
if not result:
dicRet["Response"] = result_msg
return jsonify( **dicRet)
if not VerifyDataStrLawyer(request.data).verify_json():
dicRet["Response"] = "error input '{}' is illegal JSON".format(request.data)
return jsonify( **dicRet)
#collect data items from a request
reqdataDict = ConvertData().convert(json.loads(request.data))
if not check_post_parameter_exist(reqdataDict,["keys"]):
dicRet["Response"] = "Missing post parameters : '{}'".format(post_parameter)
return jsonify( **dicRet)
if not isinstance(reqdataDict.get("keys"),list):
dicRet["Response"] = "Error type of '{}',it must be an Array".format(reqdataDict.get("keys"))
return jsonify( **dicRet)
try:
redis_db = globalvar.SYSTEMLIST[globalvar.SERVERIP].index(SYSTEM)
dbRedis,_,result= appPaaS.getDbSessionType(system=SYSTEM,dbName=redis_db,forRawData="redis")
if dbRedis is None:
#表示連接資料庫有問題
dicRet["Response"] = result
return jsonify( **dicRet)
dbRedis.delete(*reqdataDict.get("keys"))
err_msg = "ok"
except Exception as e:
err_msg = appPaaS.catch_exception(e,sys.exc_info(),SYSTEM)
dicRet["Response"] = err_msg
dicRet["DB"] = "REDIS"
return jsonify( **dicRet)
# }}}
| 41.493073
| 181
| 0.469104
| 4,370
| 50,912
| 5.339588
| 0.09016
| 0.029999
| 0.034199
| 0.0144
| 0.830505
| 0.808477
| 0.797334
| 0.784263
| 0.774921
| 0.762835
| 0
| 0.017497
| 0.382582
| 50,912
| 1,227
| 182
| 41.493073
| 0.72482
| 0.523924
| 0
| 0.671642
| 0
| 0
| 0.145541
| 0.01109
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047264
| false
| 0
| 0.007463
| 0.012438
| 0.186567
| 0.002488
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d57fb5f39ddb81a6459a74bf1b57d0a7419171de
| 24
|
py
|
Python
|
static/data/heatmap/edit.py
|
noahbkim/finances
|
98e2d0cf1bcabcc9785177d36c581e6d3f7caba7
|
[
"BSD-3-Clause"
] | 1
|
2020-07-16T05:50:23.000Z
|
2020-07-16T05:50:23.000Z
|
Download_Financial_Statement/test.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | null | null | null |
Download_Financial_Statement/test.py
|
John1001Song/Big-Data-Robo-Adviser
|
9444dce96954c546333d5aecc92a06c3bfd19aa5
|
[
"MIT"
] | 1
|
2019-02-14T08:09:46.000Z
|
2019-02-14T08:09:46.000Z
|
import json
import os
| 4.8
| 11
| 0.75
| 4
| 24
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 24
| 4
| 12
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
635ae7ddfa49a6cbc6652a991a63ec35ce2e4cc0
| 70
|
py
|
Python
|
certificate_mailer/__init__.py
|
GauravPatel89/EPAi2_capstone
|
b29fd89001f2d09d4e961a8ea24b81ba460f964b
|
[
"MIT"
] | null | null | null |
certificate_mailer/__init__.py
|
GauravPatel89/EPAi2_capstone
|
b29fd89001f2d09d4e961a8ea24b81ba460f964b
|
[
"MIT"
] | null | null | null |
certificate_mailer/__init__.py
|
GauravPatel89/EPAi2_capstone
|
b29fd89001f2d09d4e961a8ea24b81ba460f964b
|
[
"MIT"
] | null | null | null |
from .core import *
from .helpers import *
from .mailer_utils import *
| 23.333333
| 27
| 0.757143
| 10
| 70
| 5.2
| 0.6
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157143
| 70
| 3
| 27
| 23.333333
| 0.881356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6375bb73737fecab1ed61604419362251f1e1455
| 37,893
|
py
|
Python
|
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/50.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/50.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
instances/passenger_demand/pas-20210421-2109-int14000000000000001e/50.py
|
LHcau/scheduling-shared-passenger-and-freight-transport-on-a-fixed-infrastructure
|
bba1e6af5bc8d9deaa2dc3b83f6fe9ddf15d2a11
|
[
"BSD-3-Clause"
] | null | null | null |
"""
PASSENGERS
"""
numPassengers = 3217
passenger_arriving = (
(6, 3, 5, 3, 2, 0, 6, 8, 4, 2, 4, 0), # 0
(5, 11, 6, 2, 1, 0, 7, 9, 7, 3, 1, 0), # 1
(3, 8, 6, 4, 3, 0, 5, 12, 3, 7, 1, 0), # 2
(6, 6, 6, 5, 3, 0, 3, 7, 5, 5, 3, 0), # 3
(1, 4, 7, 4, 4, 0, 7, 11, 9, 6, 3, 0), # 4
(5, 11, 5, 5, 0, 0, 5, 9, 4, 4, 1, 0), # 5
(3, 8, 16, 3, 2, 0, 7, 12, 5, 7, 2, 0), # 6
(2, 8, 6, 4, 1, 0, 4, 9, 4, 6, 2, 0), # 7
(5, 9, 5, 1, 3, 0, 5, 11, 0, 6, 1, 0), # 8
(4, 8, 6, 6, 3, 0, 5, 9, 5, 3, 3, 0), # 9
(6, 8, 11, 2, 1, 0, 7, 8, 6, 5, 0, 0), # 10
(5, 9, 2, 3, 3, 0, 3, 10, 5, 3, 3, 0), # 11
(4, 12, 9, 4, 3, 0, 4, 11, 6, 10, 2, 0), # 12
(4, 11, 10, 4, 1, 0, 8, 7, 4, 3, 1, 0), # 13
(3, 16, 9, 3, 2, 0, 7, 8, 6, 3, 1, 0), # 14
(3, 10, 2, 2, 1, 0, 9, 7, 5, 1, 3, 0), # 15
(7, 3, 9, 0, 1, 0, 6, 6, 10, 7, 6, 0), # 16
(5, 10, 6, 5, 5, 0, 4, 14, 6, 2, 2, 0), # 17
(1, 14, 4, 3, 3, 0, 6, 7, 8, 7, 3, 0), # 18
(3, 14, 10, 1, 1, 0, 13, 8, 5, 8, 3, 0), # 19
(3, 8, 7, 6, 2, 0, 5, 7, 8, 4, 3, 0), # 20
(5, 5, 6, 6, 3, 0, 6, 9, 7, 6, 1, 0), # 21
(3, 7, 11, 6, 0, 0, 4, 13, 5, 5, 0, 0), # 22
(2, 8, 7, 4, 2, 0, 5, 11, 7, 3, 0, 0), # 23
(6, 5, 8, 5, 1, 0, 5, 4, 9, 6, 4, 0), # 24
(4, 10, 7, 0, 0, 0, 11, 6, 8, 5, 2, 0), # 25
(3, 14, 10, 4, 3, 0, 7, 7, 11, 4, 3, 0), # 26
(5, 7, 9, 2, 2, 0, 4, 10, 6, 5, 3, 0), # 27
(5, 6, 8, 7, 2, 0, 5, 10, 3, 3, 1, 0), # 28
(6, 6, 8, 5, 1, 0, 1, 10, 2, 4, 3, 0), # 29
(6, 10, 7, 4, 3, 0, 10, 16, 7, 12, 1, 0), # 30
(2, 5, 9, 3, 1, 0, 7, 12, 5, 4, 1, 0), # 31
(5, 5, 14, 2, 2, 0, 7, 11, 6, 6, 1, 0), # 32
(7, 12, 8, 3, 9, 0, 2, 3, 7, 4, 5, 0), # 33
(6, 2, 11, 1, 0, 0, 7, 9, 4, 4, 4, 0), # 34
(8, 7, 10, 2, 3, 0, 6, 7, 3, 7, 3, 0), # 35
(6, 13, 11, 4, 2, 0, 5, 7, 8, 6, 2, 0), # 36
(4, 8, 10, 2, 2, 0, 6, 8, 3, 4, 2, 0), # 37
(8, 10, 2, 5, 2, 0, 7, 16, 9, 6, 0, 0), # 38
(3, 12, 7, 6, 3, 0, 8, 5, 2, 4, 6, 0), # 39
(3, 4, 6, 4, 2, 0, 7, 6, 3, 6, 0, 0), # 40
(3, 9, 7, 2, 1, 0, 3, 13, 8, 7, 5, 0), # 41
(3, 6, 6, 8, 1, 0, 4, 5, 4, 6, 2, 0), # 42
(3, 8, 6, 9, 1, 0, 11, 7, 5, 6, 1, 0), # 43
(5, 13, 8, 4, 2, 0, 5, 4, 7, 4, 5, 0), # 44
(5, 10, 7, 1, 3, 0, 5, 7, 3, 7, 2, 0), # 45
(2, 14, 4, 2, 2, 0, 6, 7, 8, 7, 4, 0), # 46
(6, 14, 10, 3, 1, 0, 10, 12, 10, 2, 4, 0), # 47
(4, 7, 7, 3, 1, 0, 4, 7, 4, 9, 2, 0), # 48
(5, 7, 7, 2, 0, 0, 4, 11, 5, 3, 3, 0), # 49
(5, 10, 6, 3, 1, 0, 3, 10, 10, 4, 7, 0), # 50
(5, 6, 9, 1, 4, 0, 6, 14, 5, 5, 5, 0), # 51
(5, 6, 6, 3, 3, 0, 11, 8, 5, 3, 5, 0), # 52
(4, 7, 4, 2, 1, 0, 5, 8, 6, 10, 1, 0), # 53
(4, 16, 6, 5, 1, 0, 6, 9, 3, 3, 3, 0), # 54
(3, 10, 7, 4, 1, 0, 6, 4, 8, 3, 3, 0), # 55
(4, 13, 6, 3, 2, 0, 9, 13, 5, 11, 6, 0), # 56
(2, 9, 11, 8, 3, 0, 6, 8, 7, 5, 3, 0), # 57
(4, 12, 4, 3, 2, 0, 7, 4, 4, 4, 3, 0), # 58
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 59
)
station_arriving_intensity = (
(3.7095121817383676, 9.515044981060607, 11.19193043059126, 8.87078804347826, 10.000240384615385, 6.659510869565219), # 0
(3.7443308140669203, 9.620858238197952, 11.252381752534994, 8.920190141908213, 10.075193108974359, 6.657240994867151), # 1
(3.7787518681104277, 9.725101964085297, 11.31139817195087, 8.968504830917876, 10.148564102564103, 6.654901690821256), # 2
(3.8127461259877085, 9.827663671875001, 11.368936576156813, 9.01569089673913, 10.22028605769231, 6.652493274456523), # 3
(3.8462843698175795, 9.928430874719417, 11.424953852470724, 9.061707125603865, 10.290291666666668, 6.6500160628019325), # 4
(3.879337381718857, 10.027291085770905, 11.479406888210512, 9.106512303743962, 10.358513621794872, 6.647470372886473), # 5
(3.9118759438103607, 10.12413181818182, 11.53225257069409, 9.150065217391306, 10.424884615384617, 6.644856521739131), # 6
(3.943870838210907, 10.218840585104518, 11.58344778723936, 9.19232465277778, 10.489337339743592, 6.64217482638889), # 7
(3.975292847039314, 10.311304899691358, 11.632949425164242, 9.233249396135266, 10.551804487179488, 6.639425603864735), # 8
(4.006112752414399, 10.401412275094698, 11.680714371786634, 9.272798233695653, 10.61221875, 6.636609171195653), # 9
(4.03630133645498, 10.489050224466892, 11.72669951442445, 9.310929951690824, 10.670512820512823, 6.633725845410628), # 10
(4.065829381279876, 10.5741062609603, 11.7708617403956, 9.347603336352659, 10.726619391025642, 6.630775943538648), # 11
(4.094667669007903, 10.656467897727273, 11.813157937017996, 9.382777173913043, 10.780471153846154, 6.627759782608695), # 12
(4.122786981757876, 10.736022647920176, 11.85354499160954, 9.416410250603866, 10.832000801282053, 6.624677679649759), # 13
(4.15015810164862, 10.81265802469136, 11.891979791488144, 9.448461352657004, 10.881141025641025, 6.621529951690821), # 14
(4.1767518107989465, 10.886261541193182, 11.928419223971721, 9.478889266304348, 10.92782451923077, 6.618316915760871), # 15
(4.202538891327675, 10.956720710578002, 11.96282017637818, 9.507652777777778, 10.971983974358976, 6.61503888888889), # 16
(4.227490125353625, 11.023923045998176, 11.995139536025421, 9.53471067330918, 11.013552083333336, 6.611696188103866), # 17
(4.25157629499561, 11.087756060606061, 12.025334190231364, 9.560021739130436, 11.052461538461543, 6.608289130434783), # 18
(4.274768182372451, 11.148107267554012, 12.053361026313912, 9.58354476147343, 11.088645032051284, 6.604818032910629), # 19
(4.297036569602966, 11.204864179994388, 12.079176931590974, 9.60523852657005, 11.122035256410259, 6.601283212560387), # 20
(4.318352238805971, 11.257914311079544, 12.102738793380466, 9.625061820652174, 11.152564903846153, 6.597684986413044), # 21
(4.338685972100283, 11.307145173961842, 12.124003499000287, 9.642973429951692, 11.180166666666667, 6.5940236714975855), # 22
(4.358008551604722, 11.352444281793632, 12.142927935768354, 9.658932140700484, 11.204773237179488, 6.590299584842997), # 23
(4.3762907594381035, 11.393699147727272, 12.159468991002571, 9.672896739130437, 11.226317307692307, 6.586513043478261), # 24
(4.393503377719247, 11.430797284915124, 12.173583552020853, 9.684826011473431, 11.244731570512819, 6.582664364432368), # 25
(4.409617188566969, 11.46362620650954, 12.185228506141103, 9.694678743961353, 11.259948717948719, 6.5787538647343), # 26
(4.424602974100088, 11.492073425662877, 12.194360740681233, 9.702413722826089, 11.271901442307694, 6.574781861413045), # 27
(4.438431516437421, 11.516026455527497, 12.200937142959157, 9.707989734299519, 11.280522435897437, 6.570748671497586), # 28
(4.4510735976977855, 11.535372809255753, 12.204914600292774, 9.711365564613528, 11.285744391025641, 6.566654612016909), # 29
(4.4625, 11.55, 12.20625, 9.7125, 11.287500000000001, 6.562500000000001), # 30
(4.47319183983376, 11.56215031960227, 12.205248928140096, 9.712295118464054, 11.286861125886526, 6.556726763701484), # 31
(4.4836528452685425, 11.574140056818184, 12.202274033816424, 9.711684477124184, 11.28495815602837, 6.547834661835751), # 32
(4.493887715792838, 11.585967720170455, 12.197367798913046, 9.710674080882354, 11.281811569148937, 6.535910757121439), # 33
(4.503901150895141, 11.597631818181819, 12.19057270531401, 9.709269934640524, 11.277441843971632, 6.521042112277196), # 34
(4.513697850063939, 11.609130859374998, 12.181931234903383, 9.707478043300654, 11.27186945921986, 6.503315790021656), # 35
(4.523282512787724, 11.62046335227273, 12.171485869565219, 9.705304411764708, 11.265114893617023, 6.482818853073463), # 36
(4.532659838554988, 11.631627805397729, 12.159279091183576, 9.70275504493464, 11.257198625886524, 6.4596383641512585), # 37
(4.5418345268542195, 11.642622727272729, 12.145353381642513, 9.699835947712419, 11.248141134751775, 6.433861385973679), # 38
(4.5508112771739135, 11.653446626420456, 12.129751222826087, 9.696553125000001, 11.23796289893617, 6.40557498125937), # 39
(4.559594789002558, 11.664098011363638, 12.11251509661836, 9.692912581699348, 11.22668439716312, 6.37486621272697), # 40
(4.568189761828645, 11.674575390625, 12.093687484903382, 9.68892032271242, 11.214326108156028, 6.34182214309512), # 41
(4.576600895140665, 11.684877272727276, 12.07331086956522, 9.684582352941177, 11.2009085106383, 6.3065298350824595), # 42
(4.584832888427111, 11.69500216619318, 12.051427732487923, 9.679904677287583, 11.186452083333334, 6.26907635140763), # 43
(4.592890441176471, 11.704948579545455, 12.028080555555556, 9.674893300653595, 11.17097730496454, 6.229548754789272), # 44
(4.600778252877237, 11.714715021306818, 12.003311820652177, 9.669554227941177, 11.15450465425532, 6.188034107946028), # 45
(4.6085010230179035, 11.724300000000003, 11.97716400966184, 9.663893464052288, 11.137054609929079, 6.144619473596536), # 46
(4.616063451086957, 11.733702024147728, 11.9496796044686, 9.65791701388889, 11.118647650709221, 6.099391914459438), # 47
(4.623470236572891, 11.742919602272728, 11.920901086956523, 9.651630882352942, 11.099304255319149, 6.052438493253375), # 48
(4.630726078964194, 11.751951242897727, 11.890870939009663, 9.645041074346407, 11.079044902482272, 6.003846272696985), # 49
(4.6378356777493615, 11.760795454545454, 11.85963164251208, 9.638153594771243, 11.057890070921987, 5.953702315508913), # 50
(4.6448037324168805, 11.769450745738636, 11.827225679347826, 9.630974448529413, 11.035860239361703, 5.902093684407797), # 51
(4.651634942455243, 11.777915625, 11.793695531400965, 9.623509640522876, 11.012975886524824, 5.849107442112278), # 52
(4.658334007352941, 11.786188600852274, 11.759083680555555, 9.615765175653596, 10.989257491134753, 5.794830651340996), # 53
(4.6649056265984665, 11.79426818181818, 11.723432608695653, 9.60774705882353, 10.964725531914894, 5.739350374812594), # 54
(4.671354499680307, 11.802152876420456, 11.686784797705313, 9.599461294934642, 10.939400487588653, 5.682753675245711), # 55
(4.677685326086957, 11.809841193181818, 11.649182729468599, 9.59091388888889, 10.913302836879433, 5.625127615358988), # 56
(4.683902805306906, 11.817331640625003, 11.610668885869565, 9.582110845588236, 10.886453058510638, 5.566559257871065), # 57
(4.690011636828645, 11.824622727272727, 11.57128574879227, 9.573058169934642, 10.858871631205675, 5.507135665500583), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_arriving_acc = (
(6, 3, 5, 3, 2, 0, 6, 8, 4, 2, 4, 0), # 0
(11, 14, 11, 5, 3, 0, 13, 17, 11, 5, 5, 0), # 1
(14, 22, 17, 9, 6, 0, 18, 29, 14, 12, 6, 0), # 2
(20, 28, 23, 14, 9, 0, 21, 36, 19, 17, 9, 0), # 3
(21, 32, 30, 18, 13, 0, 28, 47, 28, 23, 12, 0), # 4
(26, 43, 35, 23, 13, 0, 33, 56, 32, 27, 13, 0), # 5
(29, 51, 51, 26, 15, 0, 40, 68, 37, 34, 15, 0), # 6
(31, 59, 57, 30, 16, 0, 44, 77, 41, 40, 17, 0), # 7
(36, 68, 62, 31, 19, 0, 49, 88, 41, 46, 18, 0), # 8
(40, 76, 68, 37, 22, 0, 54, 97, 46, 49, 21, 0), # 9
(46, 84, 79, 39, 23, 0, 61, 105, 52, 54, 21, 0), # 10
(51, 93, 81, 42, 26, 0, 64, 115, 57, 57, 24, 0), # 11
(55, 105, 90, 46, 29, 0, 68, 126, 63, 67, 26, 0), # 12
(59, 116, 100, 50, 30, 0, 76, 133, 67, 70, 27, 0), # 13
(62, 132, 109, 53, 32, 0, 83, 141, 73, 73, 28, 0), # 14
(65, 142, 111, 55, 33, 0, 92, 148, 78, 74, 31, 0), # 15
(72, 145, 120, 55, 34, 0, 98, 154, 88, 81, 37, 0), # 16
(77, 155, 126, 60, 39, 0, 102, 168, 94, 83, 39, 0), # 17
(78, 169, 130, 63, 42, 0, 108, 175, 102, 90, 42, 0), # 18
(81, 183, 140, 64, 43, 0, 121, 183, 107, 98, 45, 0), # 19
(84, 191, 147, 70, 45, 0, 126, 190, 115, 102, 48, 0), # 20
(89, 196, 153, 76, 48, 0, 132, 199, 122, 108, 49, 0), # 21
(92, 203, 164, 82, 48, 0, 136, 212, 127, 113, 49, 0), # 22
(94, 211, 171, 86, 50, 0, 141, 223, 134, 116, 49, 0), # 23
(100, 216, 179, 91, 51, 0, 146, 227, 143, 122, 53, 0), # 24
(104, 226, 186, 91, 51, 0, 157, 233, 151, 127, 55, 0), # 25
(107, 240, 196, 95, 54, 0, 164, 240, 162, 131, 58, 0), # 26
(112, 247, 205, 97, 56, 0, 168, 250, 168, 136, 61, 0), # 27
(117, 253, 213, 104, 58, 0, 173, 260, 171, 139, 62, 0), # 28
(123, 259, 221, 109, 59, 0, 174, 270, 173, 143, 65, 0), # 29
(129, 269, 228, 113, 62, 0, 184, 286, 180, 155, 66, 0), # 30
(131, 274, 237, 116, 63, 0, 191, 298, 185, 159, 67, 0), # 31
(136, 279, 251, 118, 65, 0, 198, 309, 191, 165, 68, 0), # 32
(143, 291, 259, 121, 74, 0, 200, 312, 198, 169, 73, 0), # 33
(149, 293, 270, 122, 74, 0, 207, 321, 202, 173, 77, 0), # 34
(157, 300, 280, 124, 77, 0, 213, 328, 205, 180, 80, 0), # 35
(163, 313, 291, 128, 79, 0, 218, 335, 213, 186, 82, 0), # 36
(167, 321, 301, 130, 81, 0, 224, 343, 216, 190, 84, 0), # 37
(175, 331, 303, 135, 83, 0, 231, 359, 225, 196, 84, 0), # 38
(178, 343, 310, 141, 86, 0, 239, 364, 227, 200, 90, 0), # 39
(181, 347, 316, 145, 88, 0, 246, 370, 230, 206, 90, 0), # 40
(184, 356, 323, 147, 89, 0, 249, 383, 238, 213, 95, 0), # 41
(187, 362, 329, 155, 90, 0, 253, 388, 242, 219, 97, 0), # 42
(190, 370, 335, 164, 91, 0, 264, 395, 247, 225, 98, 0), # 43
(195, 383, 343, 168, 93, 0, 269, 399, 254, 229, 103, 0), # 44
(200, 393, 350, 169, 96, 0, 274, 406, 257, 236, 105, 0), # 45
(202, 407, 354, 171, 98, 0, 280, 413, 265, 243, 109, 0), # 46
(208, 421, 364, 174, 99, 0, 290, 425, 275, 245, 113, 0), # 47
(212, 428, 371, 177, 100, 0, 294, 432, 279, 254, 115, 0), # 48
(217, 435, 378, 179, 100, 0, 298, 443, 284, 257, 118, 0), # 49
(222, 445, 384, 182, 101, 0, 301, 453, 294, 261, 125, 0), # 50
(227, 451, 393, 183, 105, 0, 307, 467, 299, 266, 130, 0), # 51
(232, 457, 399, 186, 108, 0, 318, 475, 304, 269, 135, 0), # 52
(236, 464, 403, 188, 109, 0, 323, 483, 310, 279, 136, 0), # 53
(240, 480, 409, 193, 110, 0, 329, 492, 313, 282, 139, 0), # 54
(243, 490, 416, 197, 111, 0, 335, 496, 321, 285, 142, 0), # 55
(247, 503, 422, 200, 113, 0, 344, 509, 326, 296, 148, 0), # 56
(249, 512, 433, 208, 116, 0, 350, 517, 333, 301, 151, 0), # 57
(253, 524, 437, 211, 118, 0, 357, 521, 337, 305, 154, 0), # 58
(253, 524, 437, 211, 118, 0, 357, 521, 337, 305, 154, 0), # 59
)
passenger_arriving_rate = (
(3.7095121817383676, 7.612035984848484, 6.715158258354756, 3.5483152173913037, 2.000048076923077, 0.0, 6.659510869565219, 8.000192307692307, 5.322472826086956, 4.476772172236504, 1.903008996212121, 0.0), # 0
(3.7443308140669203, 7.696686590558361, 6.751429051520996, 3.5680760567632848, 2.0150386217948717, 0.0, 6.657240994867151, 8.060154487179487, 5.352114085144928, 4.500952701013997, 1.9241716476395903, 0.0), # 1
(3.7787518681104277, 7.780081571268237, 6.786838903170522, 3.58740193236715, 2.0297128205128203, 0.0, 6.654901690821256, 8.118851282051281, 5.381102898550726, 4.524559268780347, 1.9450203928170593, 0.0), # 2
(3.8127461259877085, 7.8621309375, 6.821361945694087, 3.6062763586956517, 2.044057211538462, 0.0, 6.652493274456523, 8.176228846153847, 5.409414538043478, 4.547574630462725, 1.965532734375, 0.0), # 3
(3.8462843698175795, 7.942744699775533, 6.854972311482434, 3.624682850241546, 2.0580583333333333, 0.0, 6.6500160628019325, 8.232233333333333, 5.437024275362319, 4.569981540988289, 1.9856861749438832, 0.0), # 4
(3.879337381718857, 8.021832868616723, 6.887644132926307, 3.6426049214975844, 2.0717027243589743, 0.0, 6.647470372886473, 8.286810897435897, 5.463907382246377, 4.591762755284204, 2.005458217154181, 0.0), # 5
(3.9118759438103607, 8.099305454545455, 6.919351542416455, 3.660026086956522, 2.084976923076923, 0.0, 6.644856521739131, 8.339907692307692, 5.490039130434783, 4.612901028277636, 2.0248263636363637, 0.0), # 6
(3.943870838210907, 8.175072468083613, 6.950068672343615, 3.6769298611111116, 2.0978674679487184, 0.0, 6.64217482638889, 8.391469871794873, 5.515394791666668, 4.633379114895743, 2.043768117020903, 0.0), # 7
(3.975292847039314, 8.249043919753085, 6.979769655098544, 3.693299758454106, 2.1103608974358976, 0.0, 6.639425603864735, 8.44144358974359, 5.5399496376811594, 4.653179770065696, 2.062260979938271, 0.0), # 8
(4.006112752414399, 8.321129820075758, 7.00842862307198, 3.709119293478261, 2.12244375, 0.0, 6.636609171195653, 8.489775, 5.563678940217391, 4.672285748714653, 2.0802824550189394, 0.0), # 9
(4.03630133645498, 8.391240179573513, 7.03601970865467, 3.724371980676329, 2.134102564102564, 0.0, 6.633725845410628, 8.536410256410257, 5.586557971014494, 4.690679805769779, 2.0978100448933783, 0.0), # 10
(4.065829381279876, 8.459285008768239, 7.06251704423736, 3.739041334541063, 2.145323878205128, 0.0, 6.630775943538648, 8.581295512820512, 5.608562001811595, 4.70834469615824, 2.1148212521920597, 0.0), # 11
(4.094667669007903, 8.525174318181818, 7.087894762210797, 3.7531108695652167, 2.156094230769231, 0.0, 6.627759782608695, 8.624376923076923, 5.6296663043478254, 4.725263174807198, 2.1312935795454546, 0.0), # 12
(4.122786981757876, 8.58881811833614, 7.112126994965724, 3.766564100241546, 2.1664001602564102, 0.0, 6.624677679649759, 8.665600641025641, 5.649846150362319, 4.741417996643816, 2.147204529584035, 0.0), # 13
(4.15015810164862, 8.650126419753088, 7.135187874892886, 3.779384541062801, 2.1762282051282047, 0.0, 6.621529951690821, 8.704912820512819, 5.669076811594202, 4.756791916595257, 2.162531604938272, 0.0), # 14
(4.1767518107989465, 8.709009232954545, 7.157051534383032, 3.7915557065217387, 2.1855649038461538, 0.0, 6.618316915760871, 8.742259615384615, 5.6873335597826085, 4.771367689588688, 2.177252308238636, 0.0), # 15
(4.202538891327675, 8.7653765684624, 7.177692105826908, 3.803061111111111, 2.194396794871795, 0.0, 6.61503888888889, 8.77758717948718, 5.7045916666666665, 4.785128070551272, 2.1913441421156, 0.0), # 16
(4.227490125353625, 8.81913843679854, 7.197083721615253, 3.8138842693236716, 2.202710416666667, 0.0, 6.611696188103866, 8.810841666666668, 5.720826403985508, 4.798055814410168, 2.204784609199635, 0.0), # 17
(4.25157629499561, 8.870204848484848, 7.215200514138818, 3.824008695652174, 2.2104923076923084, 0.0, 6.608289130434783, 8.841969230769234, 5.736013043478262, 4.810133676092545, 2.217551212121212, 0.0), # 18
(4.274768182372451, 8.918485814043208, 7.232016615788346, 3.8334179045893717, 2.2177290064102566, 0.0, 6.604818032910629, 8.870916025641026, 5.750126856884058, 4.8213444105255645, 2.229621453510802, 0.0), # 19
(4.297036569602966, 8.96389134399551, 7.247506158954584, 3.8420954106280196, 2.2244070512820517, 0.0, 6.601283212560387, 8.897628205128207, 5.76314311594203, 4.831670772636389, 2.2409728359988774, 0.0), # 20
(4.318352238805971, 9.006331448863634, 7.261643276028279, 3.8500247282608693, 2.2305129807692303, 0.0, 6.597684986413044, 8.922051923076921, 5.775037092391305, 4.841095517352186, 2.2515828622159084, 0.0), # 21
(4.338685972100283, 9.045716139169473, 7.274402099400172, 3.8571893719806765, 2.2360333333333333, 0.0, 6.5940236714975855, 8.944133333333333, 5.785784057971015, 4.849601399600115, 2.2614290347923682, 0.0), # 22
(4.358008551604722, 9.081955425434906, 7.285756761461012, 3.8635728562801934, 2.2409546474358972, 0.0, 6.590299584842997, 8.963818589743589, 5.79535928442029, 4.857171174307341, 2.2704888563587264, 0.0), # 23
(4.3762907594381035, 9.114959318181818, 7.295681394601543, 3.869158695652174, 2.2452634615384612, 0.0, 6.586513043478261, 8.981053846153845, 5.803738043478262, 4.863787596401028, 2.2787398295454544, 0.0), # 24
(4.393503377719247, 9.1446378279321, 7.304150131212511, 3.8739304045893723, 2.2489463141025636, 0.0, 6.582664364432368, 8.995785256410255, 5.810895606884059, 4.869433420808341, 2.286159456983025, 0.0), # 25
(4.409617188566969, 9.17090096520763, 7.311137103684661, 3.8778714975845405, 2.2519897435897436, 0.0, 6.5787538647343, 9.007958974358974, 5.816807246376811, 4.874091402456441, 2.2927252413019077, 0.0), # 26
(4.424602974100088, 9.193658740530301, 7.31661644440874, 3.880965489130435, 2.2543802884615385, 0.0, 6.574781861413045, 9.017521153846154, 5.821448233695653, 4.877744296272493, 2.2984146851325753, 0.0), # 27
(4.438431516437421, 9.212821164421996, 7.320562285775494, 3.8831958937198072, 2.256104487179487, 0.0, 6.570748671497586, 9.024417948717948, 5.824793840579711, 4.8803748571836625, 2.303205291105499, 0.0), # 28
(4.4510735976977855, 9.228298247404602, 7.322948760175664, 3.884546225845411, 2.257148878205128, 0.0, 6.566654612016909, 9.028595512820512, 5.826819338768117, 4.881965840117109, 2.3070745618511506, 0.0), # 29
(4.4625, 9.24, 7.32375, 3.885, 2.2575000000000003, 0.0, 6.562500000000001, 9.030000000000001, 5.8275, 4.8825, 2.31, 0.0), # 30
(4.47319183983376, 9.249720255681815, 7.323149356884057, 3.884918047385621, 2.257372225177305, 0.0, 6.556726763701484, 9.02948890070922, 5.827377071078432, 4.882099571256038, 2.312430063920454, 0.0), # 31
(4.4836528452685425, 9.259312045454546, 7.3213644202898545, 3.884673790849673, 2.2569916312056737, 0.0, 6.547834661835751, 9.027966524822695, 5.82701068627451, 4.880909613526569, 2.3148280113636366, 0.0), # 32
(4.493887715792838, 9.268774176136363, 7.3184206793478275, 3.8842696323529413, 2.2563623138297872, 0.0, 6.535910757121439, 9.025449255319149, 5.826404448529412, 4.878947119565218, 2.3171935440340907, 0.0), # 33
(4.503901150895141, 9.278105454545454, 7.314343623188405, 3.8837079738562093, 2.2554883687943263, 0.0, 6.521042112277196, 9.021953475177305, 5.825561960784314, 4.876229082125604, 2.3195263636363634, 0.0), # 34
(4.513697850063939, 9.287304687499997, 7.3091587409420296, 3.882991217320261, 2.2543738918439717, 0.0, 6.503315790021656, 9.017495567375887, 5.824486825980392, 4.872772493961353, 2.3218261718749993, 0.0), # 35
(4.523282512787724, 9.296370681818182, 7.302891521739131, 3.8821217647058828, 2.253022978723404, 0.0, 6.482818853073463, 9.012091914893617, 5.823182647058824, 4.868594347826087, 2.3240926704545455, 0.0), # 36
(4.532659838554988, 9.305302244318183, 7.295567454710145, 3.881102017973856, 2.2514397251773044, 0.0, 6.4596383641512585, 9.005758900709218, 5.821653026960784, 4.86371163647343, 2.3263255610795457, 0.0), # 37
(4.5418345268542195, 9.314098181818181, 7.287212028985508, 3.8799343790849674, 2.249628226950355, 0.0, 6.433861385973679, 8.99851290780142, 5.819901568627452, 4.858141352657005, 2.3285245454545453, 0.0), # 38
(4.5508112771739135, 9.322757301136363, 7.277850733695652, 3.87862125, 2.247592579787234, 0.0, 6.40557498125937, 8.990370319148935, 5.817931875, 4.8519004891304345, 2.330689325284091, 0.0), # 39
(4.559594789002558, 9.33127840909091, 7.267509057971015, 3.8771650326797387, 2.245336879432624, 0.0, 6.37486621272697, 8.981347517730496, 5.815747549019608, 4.845006038647344, 2.3328196022727274, 0.0), # 40
(4.568189761828645, 9.3396603125, 7.256212490942029, 3.8755681290849675, 2.2428652216312055, 0.0, 6.34182214309512, 8.971460886524822, 5.813352193627452, 4.837474993961353, 2.334915078125, 0.0), # 41
(4.576600895140665, 9.34790181818182, 7.2439865217391315, 3.8738329411764707, 2.2401817021276598, 0.0, 6.3065298350824595, 8.960726808510639, 5.810749411764706, 4.829324347826088, 2.336975454545455, 0.0), # 42
(4.584832888427111, 9.356001732954544, 7.230856639492753, 3.8719618709150327, 2.2372904166666667, 0.0, 6.26907635140763, 8.949161666666667, 5.80794280637255, 4.820571092995169, 2.339000433238636, 0.0), # 43
(4.592890441176471, 9.363958863636363, 7.216848333333333, 3.8699573202614377, 2.2341954609929076, 0.0, 6.229548754789272, 8.93678184397163, 5.804935980392157, 4.811232222222222, 2.3409897159090907, 0.0), # 44
(4.600778252877237, 9.371772017045453, 7.201987092391306, 3.8678216911764705, 2.230900930851064, 0.0, 6.188034107946028, 8.923603723404256, 5.801732536764706, 4.80132472826087, 2.3429430042613633, 0.0), # 45
(4.6085010230179035, 9.379440000000002, 7.186298405797103, 3.8655573856209147, 2.2274109219858156, 0.0, 6.144619473596536, 8.909643687943262, 5.798336078431372, 4.790865603864735, 2.3448600000000006, 0.0), # 46
(4.616063451086957, 9.386961619318182, 7.16980776268116, 3.8631668055555552, 2.223729530141844, 0.0, 6.099391914459438, 8.894918120567375, 5.794750208333333, 4.77987184178744, 2.3467404048295455, 0.0), # 47
(4.623470236572891, 9.394335681818182, 7.152540652173913, 3.8606523529411763, 2.21986085106383, 0.0, 6.052438493253375, 8.87944340425532, 5.790978529411765, 4.7683604347826085, 2.3485839204545456, 0.0), # 48
(4.630726078964194, 9.401560994318181, 7.134522563405797, 3.8580164297385626, 2.2158089804964543, 0.0, 6.003846272696985, 8.863235921985817, 5.787024644607844, 4.7563483756038645, 2.3503902485795454, 0.0), # 49
(4.6378356777493615, 9.408636363636361, 7.115778985507247, 3.8552614379084966, 2.211578014184397, 0.0, 5.953702315508913, 8.846312056737588, 5.782892156862745, 4.743852657004831, 2.3521590909090904, 0.0), # 50
(4.6448037324168805, 9.415560596590907, 7.096335407608696, 3.852389779411765, 2.2071720478723407, 0.0, 5.902093684407797, 8.828688191489363, 5.778584669117648, 4.73089027173913, 2.353890149147727, 0.0), # 51
(4.651634942455243, 9.4223325, 7.0762173188405795, 3.84940385620915, 2.2025951773049646, 0.0, 5.849107442112278, 8.810380709219858, 5.774105784313726, 4.717478212560386, 2.355583125, 0.0), # 52
(4.658334007352941, 9.428950880681818, 7.055450208333333, 3.8463060702614382, 2.1978514982269504, 0.0, 5.794830651340996, 8.791405992907801, 5.769459105392158, 4.703633472222222, 2.3572377201704544, 0.0), # 53
(4.6649056265984665, 9.435414545454544, 7.034059565217391, 3.843098823529412, 2.192945106382979, 0.0, 5.739350374812594, 8.771780425531915, 5.764648235294119, 4.689373043478261, 2.358853636363636, 0.0), # 54
(4.671354499680307, 9.441722301136364, 7.012070878623187, 3.8397845179738566, 2.1878800975177306, 0.0, 5.682753675245711, 8.751520390070922, 5.759676776960785, 4.674713919082125, 2.360430575284091, 0.0), # 55
(4.677685326086957, 9.447872954545453, 6.989509637681159, 3.8363655555555556, 2.1826605673758865, 0.0, 5.625127615358988, 8.730642269503546, 5.754548333333334, 4.65967309178744, 2.361968238636363, 0.0), # 56
(4.683902805306906, 9.453865312500001, 6.966401331521738, 3.832844338235294, 2.1772906117021273, 0.0, 5.566559257871065, 8.70916244680851, 5.749266507352941, 4.644267554347826, 2.3634663281250003, 0.0), # 57
(4.690011636828645, 9.459698181818181, 6.942771449275362, 3.8292232679738563, 2.1717743262411346, 0.0, 5.507135665500583, 8.687097304964539, 5.743834901960785, 4.628514299516908, 2.3649245454545453, 0.0), # 58
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 59
)
passenger_allighting_rate = (
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 0
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 1
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 2
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 3
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 4
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 5
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 6
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 7
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 8
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 9
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 10
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 11
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 12
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 13
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 14
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 15
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 16
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 17
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 18
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 19
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 20
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 21
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 22
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 23
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 24
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 25
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 26
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 27
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 28
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 29
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 30
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 31
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 32
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 33
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 34
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 35
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 36
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 37
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 38
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 39
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 40
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 41
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 42
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 43
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 44
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 45
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 46
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 47
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 48
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 49
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 50
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 51
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 52
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 53
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 54
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 55
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 56
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 57
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 58
(0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1, 0, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 0.16666666666666666, 1), # 59
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 258194110137029475889902652135037600173
#index for seed sequence child
child_seed_index = (
1, # 0
49, # 1
)
| 113.113433
| 212
| 0.729132
| 5,147
| 37,893
| 5.365844
| 0.227317
| 0.312839
| 0.247665
| 0.469259
| 0.328481
| 0.327757
| 0.327757
| 0.327757
| 0.327757
| 0.327757
| 0
| 0.819048
| 0.119125
| 37,893
| 334
| 213
| 113.452096
| 0.008359
| 0.031958
| 0
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.015823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
63b0f0020c6539a99eaf950ff9965373e2f30d38
| 7,062
|
py
|
Python
|
apps/pretrained_protein/tape/data_process.py
|
Noisyntrain/PaddleHelix
|
f9e62eec9e8e5220de2c633ce0b97acc0230df1a
|
[
"Apache-2.0"
] | 2
|
2020-12-08T09:20:16.000Z
|
2020-12-18T07:50:56.000Z
|
apps/pretrained_protein/tape/data_process.py
|
Noisyntrain/PaddleHelix
|
f9e62eec9e8e5220de2c633ce0b97acc0230df1a
|
[
"Apache-2.0"
] | null | null | null |
apps/pretrained_protein/tape/data_process.py
|
Noisyntrain/PaddleHelix
|
f9e62eec9e8e5220de2c633ce0b97acc0230df1a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Multiple protein datasets.
"""
import json
import numpy as np
import sys
from pahelix.utils.protein_tools import ProteinTokenizer
class Pfam(object):
"""Class for pfam dataset.
For more details, please check paper "Evaluating Protein Transfer Learning with TAPE".
"""
def __init__(self):
self.tokenizer = ProteinTokenizer()
self.clear()
def gen_sequence_data(self, data):
"""Genearte sequence data.
"""
amino_acids = data['amino_acids']
token_ids = self.tokenizer.gen_token_ids(amino_acids)
return token_ids
def append(self, data):
"""Append data.
"""
token_ids = self.gen_sequence_data(data)
self.token_ids.extend(token_ids)
self.lengths.append(len(token_ids))
def clear(self):
"""Clear data.
"""
self.token_ids = []
self.lengths = []
def save_npz(self, filename):
"""Save data to npz format file.
"""
np.savez('%s' % filename,
token_ids=np.array(self.token_ids, dtype='int8'),
lengths=np.array(self.lengths, dtype='int64'))
class SecondaryStructure(object):
"""Class for second structure dataset.
For more details, please check paper "Evaluating Protein Transfer Learning with TAPE".
"""
def __init__(self):
self.tokenizer = ProteinTokenizer()
self.clear()
def gen_sequence_data(self, data):
"""Genearte sequence data.
"""
amino_acids = data['amino_acids']
token_ids = self.tokenizer.gen_token_ids(amino_acids)
labels3 = [0] + data['ss3'] + [0]
labels8 = [0] + data['ss8'] + [0]
return token_ids, labels3, labels8
def append(self, data):
"""Append data.
"""
token_ids, labels3, labels8 = self.gen_sequence_data(data)
self.token_ids.extend(token_ids)
self.labels3.extend(labels3)
self.labels8.extend(labels8)
self.lengths.append(len(token_ids))
def clear(self):
"""Clear data.
"""
self.token_ids = []
self.labels3 = []
self.labels8 = []
self.lengths = []
def save_npz(self, filename):
"""Save data to npz format file.
"""
np.savez('%s' % filename,
token_ids=np.array(self.token_ids, dtype='int8'),
labels3=np.array(self.labels3, dtype='int8'),
labels8=np.array(self.labels8, dtype='int8'),
lengths=np.array(self.lengths, dtype='int64'))
class RemoteHomology(object):
"""Class for remote homology dataset.
For more details, please check paper "Evaluating Protein Transfer Learning with TAPE".
"""
def __init__(self):
self.tokenizer = ProteinTokenizer()
self.clear()
def gen_sequence_data(self, data):
"""Genearte sequence data.
"""
amino_acids = data['amino_acids']
token_ids = self.tokenizer.gen_token_ids(amino_acids)
label = data['fold_label']
return token_ids, label
def append(self, data):
"""Append data.
"""
token_ids, labels = self.gen_sequence_data(data)
self.token_ids.extend(token_ids)
self.labels.extend(labels)
self.lengths.append(len(token_ids))
def clear(self):
"""Clear data.
"""
self.token_ids = []
self.labels = []
self.lengths = []
def save_npz(self, filename):
"""Save data to npz format file.
"""
np.savez('%s' % filename,
token_ids=np.array(self.token_ids, dtype='int8'),
labels=np.array(self.labels, dtype='int8'),
lengths=np.array(self.lengths, dtype='int64'))
class Fluorescence(object):
"""Class for fluorescene dataset.
For more details, please check paper "Evaluating Protein Transfer Learning with TAPE".
"""
def __init__(self):
self.tokenizer = ProteinTokenizer()
self.clear()
def gen_sequence_data(self, data):
"""Genearte sequence data.
"""
amino_acids = data['amino_acids']
label = data['log_fluorescence']
token_ids = self.tokenizer.gen_token_ids(amino_acids)
return token_ids, label
def append(self, data):
"""Append data.
"""
token_ids, labels = self.gen_sequence_data(data)
self.token_ids.extend(token_ids)
self.labels.extend(labels)
self.lengths.append(len(token_ids))
def clear(self):
"""Clear data.
"""
self.token_ids = []
self.labels = []
self.lengths = []
def save_npz(self, filename):
"""Save data to npz format file.
"""
np.savez('%s' % filename,
token_ids=np.array(self.token_ids, dtype='int8'),
labels=np.array(self.labels, dtype='int8'),
lengths=np.array(self.lengths, dtype='int64'))
class Stability(object):
"""Class for stability dataset.
For more details, please check paper "Evaluating Protein Transfer Learning with TAPE".
"""
def __init__(self):
self.tokenizer = ProteinTokenizer()
self.clear()
def gen_sequence_data(self, data):
"""Genearte sequence data.
"""
amino_acids = data['amino_acids']
label = data['stability_score']
token_ids = self.tokenizer.gen_token_ids(amino_acids)
return token_ids, label
def append(self, data):
"""Append data.
"""
token_ids, labels = self.gen_sequence_data(data)
self.token_ids.extend(token_ids)
self.labels.extend(labels)
self.lengths.append(len(token_ids))
def clear(self):
"""Clear data.
"""
self.token_ids = []
self.labels = []
self.lengths = []
def save_npz(self, filename):
"""Save data to npz format file.
"""
np.savez('%s' % filename,
token_ids=np.array(self.token_ids, dtype='int8'),
labels=np.array(self.labels, dtype='int8'),
lengths=np.array(self.lengths, dtype='int64'))
if __name__ == '__main__':
dataset = SecondaryStructure()
with open('raw_data', 'r') as fin:
for line in fin:
data = json.loads(line)
dataset.append(data)
dataset.save_npz('data')
| 30.179487
| 90
| 0.597564
| 833
| 7,062
| 4.911164
| 0.177671
| 0.097776
| 0.046932
| 0.03911
| 0.726228
| 0.726228
| 0.726228
| 0.726228
| 0.709118
| 0.709118
| 0
| 0.00987
| 0.282639
| 7,062
| 233
| 91
| 30.309013
| 0.797671
| 0.25177
| 0
| 0.753968
| 0
| 0
| 0.038869
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.198413
| false
| 0
| 0.031746
| 0
| 0.309524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
894b62c4df130fab2d0b6e18ab30eaedcd59b417
| 43
|
py
|
Python
|
jupyterlabpymolpysnips/Programming/synch.py
|
MooersLab/pymolpysnips
|
50a89c85adf8006d85c1d6cd3f8aad7e440a0b92
|
[
"MIT"
] | null | null | null |
jupyterlabpymolpysnips/Programming/synch.py
|
MooersLab/pymolpysnips
|
50a89c85adf8006d85c1d6cd3f8aad7e440a0b92
|
[
"MIT"
] | null | null | null |
jupyterlabpymolpysnips/Programming/synch.py
|
MooersLab/pymolpysnips
|
50a89c85adf8006d85c1d6cd3f8aad7e440a0b92
|
[
"MIT"
] | null | null | null |
cmd.do('cmd.sync(timeout=1.0,poll=0.05);')
| 21.5
| 42
| 0.651163
| 10
| 43
| 2.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 0.023256
| 43
| 1
| 43
| 43
| 0.547619
| 0
| 0
| 0
| 0
| 0
| 0.744186
| 0.744186
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
894d014d0ca5ff4dd0c47ad690b39505de82c19c
| 4,056
|
py
|
Python
|
app/controllers/vote.py
|
Axtell-io/Axtell
|
2f660450ca2eb70cc0520ad970c9aabdc65a3bb7
|
[
"MIT"
] | 15
|
2018-04-20T14:56:59.000Z
|
2021-03-31T20:16:29.000Z
|
app/controllers/vote.py
|
Axtell/Axtell
|
2f660450ca2eb70cc0520ad970c9aabdc65a3bb7
|
[
"MIT"
] | 148
|
2018-04-17T01:47:44.000Z
|
2020-05-14T13:24:03.000Z
|
app/controllers/vote.py
|
Axtell-io/Axtell
|
2f660450ca2eb70cc0520ad970c9aabdc65a3bb7
|
[
"MIT"
] | 7
|
2018-06-01T11:15:18.000Z
|
2020-08-14T04:24:50.000Z
|
from flask import g, abort, redirect, url_for
from app.instances import db
from app.models.Post import Post
from app.models.Answer import Answer
from app.models.PostVote import PostVote
from app.models.AnswerVote import AnswerVote
# noinspection PyUnresolvedReferences
import app.routes.post
# noinspection PyUnresolvedReferences
import app.routes.user_settings
# noinspection PyUnresolvedReferences
import app.routes.auth
def get_post_vote_breakdown(post_id):
post = Post.query.filter_by(id=post_id).first()
if post is None:
return abort(404)
votes = list(map(lambda vote: vote.vote, PostVote.query.filter_by(post_id=post_id).all()))
upvotes = votes.count(1)
downvotes = votes.count(-1)
return {"upvote": upvotes, "downvote": downvotes}
def get_answer_vote_breakdown(answer_id):
answer = Answer.query.filter_by(id=answer_id).first()
if answer is None:
return abort(404)
votes = list(map(lambda vote: vote.vote, AnswerVote.query.filter_by(answer_id=answer_id).all()))
upvotes = votes.count(1)
downvotes = votes.count(-1)
return {"upvote": upvotes, "downvote": downvotes}
def get_post_vote(post_id):
current_user = g.user
if current_user is None:
return {"vote": 0, "breakdown": get_post_vote_breakdown(post_id)}
post_votes = PostVote.query.filter_by(post_id=post_id, user_id=current_user.id).first()
if post_votes is None:
vote = 0
else:
vote = post_votes.vote
return {"vote": vote, "breakdown": get_post_vote_breakdown(post_id)}
def get_answer_vote(answer_id):
current_user = g.user
if current_user is None:
return {"vote": 0, "breakdown": get_answer_vote_breakdown(answer_id)}
answer_votes = AnswerVote.query.filter_by(answer_id=answer_id, user_id=current_user.id).first()
if answer_votes is None:
vote = 0
else:
vote = answer_votes.vote
return {"vote": vote, "breakdown": get_answer_vote_breakdown(answer_id)}
def do_post_vote(post_id, vote):
current_user = g.user
if current_user is None:
return abort(401)
# ensure that vote is a valid value
try:
vote = int(vote)
except ValueError:
return abort(400)
if vote not in (-1, 0, 1):
return abort(400)
post = Post.query.filter_by(id=post_id).first()
# ensure that user is not voting on own content
if post.user_id == g.user.id:
return abort(403)
# handle changing existing vote
prev_vote = PostVote.query.filter_by(post_id=post_id, user_id=current_user.id).first()
if prev_vote is not None:
prev_vote.vote = vote
db.session.commit()
else:
new_vote = PostVote(post_id=post_id, vote=vote, user_id=current_user.id)
current_user.post_votes.append(new_vote)
post = Post.query.filter_by(id=post_id).first()
post.votes.append(new_vote)
db.session.add(new_vote)
db.session.commit()
return {"vote": vote, "breakdown": get_post_vote_breakdown(post_id)}
def do_answer_vote(answer_id, vote):
current_user = g.user
if current_user is None:
return abort(401)
# ensure that vote is a valid value
try:
vote = int(vote)
except ValueError:
return abort(400)
if vote not in (-1, 0, 1):
return abort(400)
answer = Answer.query.filter_by(id=answer_id).first()
# ensure that user is not voting on own content
if answer.user_id == g.user.id:
return abort(403)
# handle changing existing vote
prev_vote = AnswerVote.query.filter_by(answer_id=answer_id, user_id=current_user.id).first()
if prev_vote is not None:
prev_vote.vote = vote
db.session.commit()
else:
new_vote = AnswerVote(answer_id=answer_id, vote=vote, user_id=current_user.id)
current_user.answer_votes.append(new_vote)
answer.votes.append(new_vote)
db.session.add(new_vote)
db.session.commit()
return {"vote": vote, "breakdown": get_answer_vote_breakdown(answer_id)}
| 30.496241
| 100
| 0.68787
| 597
| 4,056
| 4.465662
| 0.130653
| 0.03826
| 0.053638
| 0.051013
| 0.858965
| 0.782821
| 0.782821
| 0.715679
| 0.715679
| 0.627157
| 0
| 0.013716
| 0.209073
| 4,056
| 132
| 101
| 30.727273
| 0.817332
| 0.080621
| 0
| 0.606383
| 0
| 0
| 0.028502
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06383
| false
| 0
| 0.095745
| 0
| 0.351064
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9832420e9b1efa37189cef6984e6d4e746c07bab
| 22
|
py
|
Python
|
plugins/photos/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 13
|
2020-01-27T09:02:25.000Z
|
2022-01-20T07:45:26.000Z
|
plugins/photos/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 29
|
2020-03-22T06:57:57.000Z
|
2022-01-24T22:46:42.000Z
|
plugins/photos/__init__.py
|
mohnjahoney/website_source
|
edc86a869b90ae604f32e736d9d5ecd918088e6a
|
[
"MIT"
] | 6
|
2020-07-10T00:13:30.000Z
|
2022-01-26T08:22:33.000Z
|
from .photos import *
| 11
| 21
| 0.727273
| 3
| 22
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
98927ebd75c178bd40da3ce84bab511170412f75
| 4,993
|
py
|
Python
|
src/app/main/form.py
|
YiNNx/OAuth2.0
|
0b13de55aea594c7265f6d82783f4836746b7ea0
|
[
"MIT"
] | null | null | null |
src/app/main/form.py
|
YiNNx/OAuth2.0
|
0b13de55aea594c7265f6d82783f4836746b7ea0
|
[
"MIT"
] | null | null | null |
src/app/main/form.py
|
YiNNx/OAuth2.0
|
0b13de55aea594c7265f6d82783f4836746b7ea0
|
[
"MIT"
] | null | null | null |
' wtf表单 '
__author__ = 'YiNN'
from wtforms.fields import simple,RadioField,IntegerField
from wtforms import Form,validators,widgets
class LoginForm(Form):
'''Form'''
email = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.Email(message="请输入正确的Email格式(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
pword = simple.PasswordField(
validators=[
validators.DataRequired(message="请输入密码(゚Д゚*)ノ"),
],
widget=widgets.PasswordInput(),
render_kw={"class":"form-control"}
)
class SignUpForm(Form):
'''Form'''
email = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.Email(message="请输入正确的Email格式(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
pword = simple.PasswordField(
validators=[
validators.DataRequired(message="请输入密码(゚Д゚*)ノ"),
validators.Length(max=20,min=6,message="密码长度须大于%(max)d字且小于%(min)d字(゚Д゚*)ノ"),
],
widget=widgets.PasswordInput(),
render_kw={"class":"form-control"}
)
pword_re = simple.PasswordField(
validators=[
validators.DataRequired(message="请输入密码(゚Д゚*)ノ"),
validators.EqualTo('pword',message="两次密码输入不同哦(゚Д゚*)ノ"),
],
widget=widgets.PasswordInput(),
render_kw={"class":"form-control"}
)
nickname = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.Length(max=8,min=3,message="昵称须大于%(max)d字且小于%(min)d字(゚Д゚*)ノ")
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
class InfoForm(Form):
'''Form'''
email = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.Email(message="请输入正确的Email格式(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
nickname = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.Length(max=8,min=3,message="昵称须大于%(max)d字且小于%(min)d字(゚Д゚*)ノ")
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
avator = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ")
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
intro = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
class OAuthSignForm(Form):
'''Form'''
appName = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
homeURL = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
appDesc = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
backURL = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
secrets = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
class CollectForm(Form):
statu = RadioField(
choices=[ ('想看', '想看'),('在看', '在看'), ('看过', '看过'), ('搁置', '搁置'), ('抛弃', '抛弃')],
validators=[validators.DataRequired(message="不能为空(゚Д゚*)ノ")]
)
score = IntegerField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.NumberRange(min=1, max=10,message="超出范围了(゚Д゚*)ノ")
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
comment = simple.StringField(
widget=widgets.TextInput(),
validators=[
validators.DataRequired(message="不能为空(゚Д゚*)ノ"),
validators.Length(max=200,message="最多%(min)d字(゚Д゚*)ノ")
],
render_kw={"class":"form-control"} #设置属性生成的html属性
)
| 30.078313
| 88
| 0.582415
| 516
| 4,993
| 5.593023
| 0.145349
| 0.018711
| 0.028067
| 0.037422
| 0.865558
| 0.865558
| 0.865558
| 0.862439
| 0.857935
| 0.841649
| 0
| 0.003514
| 0.259163
| 4,993
| 166
| 89
| 30.078313
| 0.776696
| 0.041859
| 0
| 0.680851
| 0
| 0
| 0.152585
| 0.019966
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.042553
| 0.014184
| 0
| 0.177305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7f8b73432e4ace3ffed041d6c8bcbe2dc5de50ac
| 42
|
py
|
Python
|
core/modules/models/cls/__init__.py
|
FelixFu520/DAO
|
ac30bad4503408e771bc28c77dd8a20c18c15a05
|
[
"MIT"
] | null | null | null |
core/modules/models/cls/__init__.py
|
FelixFu520/DAO
|
ac30bad4503408e771bc28c77dd8a20c18c15a05
|
[
"MIT"
] | null | null | null |
core/modules/models/cls/__init__.py
|
FelixFu520/DAO
|
ac30bad4503408e771bc28c77dd8a20c18c15a05
|
[
"MIT"
] | null | null | null |
# classification
from .TIMMC import TIMMC
| 14
| 24
| 0.809524
| 5
| 42
| 6.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 42
| 2
| 25
| 21
| 0.944444
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f6aad77ea16b7b8ca9beb20c89ac827cfcb05a37
| 2,048
|
py
|
Python
|
tests/test_color.py
|
underwatergrasshopper/PyUnderGUI
|
9a3107bbcf04168eb131a6dae5d50ff35b00ea7f
|
[
"MIT"
] | null | null | null |
tests/test_color.py
|
underwatergrasshopper/PyUnderGUI
|
9a3107bbcf04168eb131a6dae5d50ff35b00ea7f
|
[
"MIT"
] | null | null | null |
tests/test_color.py
|
underwatergrasshopper/PyUnderGUI
|
9a3107bbcf04168eb131a6dae5d50ff35b00ea7f
|
[
"MIT"
] | null | null | null |
import math
from TestKit import *
from UnderGUI.Color import *
__all__ = ['test_color']
def test_color():
### ColorF ###
c = ColorF(1, 0.5, 0.75, 0.25).to_color_i()
assert c.r == 255 and c.g == 127 and c.b == 191 and c.a == 63
c = ColorF(1, 0.5, 0.75, 0.25).to_color_f()
assert math.isclose(c.r, 1.0, abs_tol=0.01) and math.isclose(c.g, 0.5, abs_tol=0.01) and math.isclose(c.b, 0.75, abs_tol=0.01) and math.isclose(c.a, 0.25, abs_tol=0.01)
c = ColorF(1, 0.5, 0.75, 0.25).to_color_b()
assert c.r == b'\xFF' and c.g == b'\x7F' and c.b == b'\xBF' and c.a ==b'\x3F'
### ColorI ###
c = ColorI(255, 127, 191, 63).to_color_i()
assert c.r == 255 and c.g == 127 and c.b == 191 and c.a == 63
c = ColorI(255, 127, 191, 63).to_color_f()
assert math.isclose(c.r, 1.0, abs_tol=0.01) and math.isclose(c.g, 0.5, abs_tol=0.01) and math.isclose(c.b, 0.75, abs_tol=0.01) and math.isclose(c.a, 0.25, abs_tol=0.01)
c = ColorI(255, 127, 191, 63).to_color_b()
assert c.r == b'\xFF' and c.g == b'\x7F' and c.b == b'\xBF' and c.a ==b'\x3F'
### ColorB ###
c = ColorB(b'\xFF', b'\x7F', b'\xBF', b'\x3F').to_color_i()
assert c.r == 255 and c.g == 127 and c.b == 191 and c.a == 63
c = ColorB(b'\xFF', b'\x7F', b'\xBF', b'\x3F').to_color_f()
assert math.isclose(c.r, 1.0, abs_tol=0.01) and math.isclose(c.g, 0.5, abs_tol=0.01) and math.isclose(c.b, 0.75, abs_tol=0.01) and math.isclose(c.a, 0.25, abs_tol=0.01)
c = ColorB(b'\xFF', b'\x7F', b'\xBF', b'\x3F').to_color_b()
assert c.r == b'\xFF' and c.g == b'\x7F' and c.b == b'\xBF' and c.a ==b'\x3F'
c = ColorB(b'\xFF\x7F\xBF', a = b'\x3F')
assert c.r == b'\xFF' and c.g == b'\x7F' and c.b == b'\xBF' and c.a ==b'\x3F'
c = ColorB(b'\xFF\x7F\xBF\x3F')
assert c.r == b'\xFF' and c.g == b'\x7F' and c.b == b'\xBF' and c.a ==b'\x3F'
# immediate tests
#print(int.from_bytes(b'\xff', "little"))
#print((255).to_bytes(1, byteorder='little'))
if __name__ == "__main__":
run_test(test_color)
| 44.521739
| 172
| 0.565918
| 439
| 2,048
| 2.530752
| 0.113895
| 0.086409
| 0.129613
| 0.09721
| 0.827183
| 0.827183
| 0.827183
| 0.827183
| 0.780378
| 0.780378
| 0
| 0.112132
| 0.203125
| 2,048
| 45
| 173
| 45.511111
| 0.568627
| 0.060059
| 0
| 0.37931
| 0
| 0
| 0.093537
| 0
| 0
| 0
| 0
| 0
| 0.37931
| 1
| 0.034483
| false
| 0
| 0.103448
| 0
| 0.137931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f6eeaf7483937a033a00a8542cd47a1fd289c645
| 3,977
|
py
|
Python
|
examples/pbc/22-k_points_mp2_stagger.py
|
xinxing02/pyscf
|
30aeafc408aa87ac1fae6aaa6a42e195b5a1dc0a
|
[
"Apache-2.0"
] | null | null | null |
examples/pbc/22-k_points_mp2_stagger.py
|
xinxing02/pyscf
|
30aeafc408aa87ac1fae6aaa6a42e195b5a1dc0a
|
[
"Apache-2.0"
] | null | null | null |
examples/pbc/22-k_points_mp2_stagger.py
|
xinxing02/pyscf
|
30aeafc408aa87ac1fae6aaa6a42e195b5a1dc0a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
'''
Example code for
k-point spin-restricted periodic MP2 calculation using the staggered mesh method
Author: Xin Xing (xxing@berkeley.edu)
Reference: Staggered Mesh Method for Correlation Energy Calculations of Solids: Second-Order
Møller–Plesset Perturbation Theory, J. Chem. Theory Comput. 2021, 17, 8, 4733-4745
'''
from pyscf.pbc.mp.kmp2_stagger import KMP2_stagger
from pyscf.pbc import df, gto, scf, mp
'''
Hydrogen dimer
'''
cell = gto.Cell()
cell.pseudo = 'gth-pade'
cell.basis = 'gth-szv'
cell.ke_cutoff=100
cell.atom='''
H 3.00 3.00 2.10
H 3.00 3.00 3.90
'''
cell.a = '''
6.0 0.0 0.0
0.0 6.0 0.0
0.0 0.0 6.0
'''
cell.unit = 'B'
cell.verbose = 4
cell.build()
# HF calculation using FFTDF
nks_mf = [2,2,2]
kpts = cell.make_kpts(nks_mf, with_gamma_point=True)
kmf = scf.KRHF(cell, kpts, exxdiv='ewald')
ehf = kmf.kernel()
# staggered mesh KMP2 calculation using two submeshes of size [1,1,1] in kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=True)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.0160902544091997))<1e-5)
# staggered mesh KMP2 calculation using two meshes of size [2,2,2], one of them is kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=False)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.0140289970302513))<1e-5)
# standard KMP2 calculation
kmp = mp.KMP2(kmf)
emp2, _ = kmp.kernel()
assert((abs(emp2 - -0.0143904878990777))<1e-5)
# HF calculation using GDF
nks_mf = [2,2,2]
kpts = cell.make_kpts(nks_mf, with_gamma_point=True)
kmf = scf.KRHF(cell, kpts, exxdiv='ewald')
gdf = df.GDF(cell, kpts).build()
kmf.with_df = gdf
ehf = kmf.kernel()
# staggered mesh KMP2 calculation using two submeshes of size [1,1,1] in kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=True)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.0158364523431071))<1e-5)
# staggered mesh KMP2 calculation using two meshes of size [2,2,2], one of them is kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=False)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.0140280303691396))<1e-5)
# standard KMP2 calculation
kmp = mp.KMP2(kmf)
emp2, _ = kmp.kernel()
assert((abs(emp2 - -0.0141829343769316))<1e-5)
'''
Diamond system
'''
cell = gto.Cell()
cell.pseudo = 'gth-pade'
cell.basis = 'gth-szv'
cell.ke_cutoff=100
cell.atom='''
C 0. 0. 0.
C 1.26349729, 0.7294805 , 0.51582061
'''
cell.a = '''
2.52699457, 0. , 0.
1.26349729, 2.18844149, 0.
1.26349729, 0.7294805 , 2.06328243
'''
cell.unit = 'angstrom'
cell.verbose = 4
cell.build()
# HF calculation using FFTDF
nks_mf = [2,2,2]
kpts = cell.make_kpts(nks_mf, with_gamma_point=True)
kmf = scf.KRHF(cell, kpts, exxdiv='ewald')
ehf = kmf.kernel()
# staggered mesh KMP2 calculation using two submeshes of size [1,1,1] in kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=True)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.156289981810986))<1e-5)
# staggered mesh KMP2 calculation using two meshes of size [2,2,2], one of them is kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=False)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.105454107635884))<1e-5)
# standard KMP2 calculation
kmp = mp.KMP2(kmf)
emp2, _ = kmp.kernel()
assert((abs(emp2 - -0.095517731535516))<1e-5)
# HF calculation using GDF
nks_mf = [2,2,2]
kpts = cell.make_kpts(nks_mf, with_gamma_point=True)
kmf = scf.KRHF(cell, kpts, exxdiv='ewald')
gdf = df.GDF(cell, kpts).build()
kmf.with_df = gdf
ehf = kmf.kernel()
# staggered mesh KMP2 calculation using two submeshes of size [1,1,1] in kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=True)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.154923152683604))<1e-5)
# staggered mesh KMP2 calculation using two meshes of size [2,2,2], one of them is kmf.kpts
kmp = KMP2_stagger(kmf, flag_submesh=False)
emp2 = kmp.kernel()
assert((abs(emp2 - -0.105421948003715))<1e-5)
# standard KMP2 calculation
kmp = mp.KMP2(kmf)
emp2, _ = kmp.kernel()
assert((abs(emp2 - -0.0952009565805345))<1e-5)
| 26.164474
| 93
| 0.685441
| 656
| 3,977
| 4.08689
| 0.21189
| 0.011936
| 0.058187
| 0.085043
| 0.743752
| 0.73853
| 0.73853
| 0.73853
| 0.73853
| 0.73853
| 0
| 0.137119
| 0.167463
| 3,977
| 151
| 94
| 26.337748
| 0.672304
| 0.316822
| 0
| 0.711111
| 0
| 0
| 0.13741
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 1
| 0
| false
| 0
| 0.022222
| 0
| 0.022222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
63e437aaf9a032fc8bc3a23a143c254c49727872
| 9,799
|
py
|
Python
|
extra_models.py
|
vincnt/tcn-audio-fx
|
c3ca38a7975ca99f7aebebf310a016e1cbcfdf0c
|
[
"MIT"
] | null | null | null |
extra_models.py
|
vincnt/tcn-audio-fx
|
c3ca38a7975ca99f7aebebf310a016e1cbcfdf0c
|
[
"MIT"
] | null | null | null |
extra_models.py
|
vincnt/tcn-audio-fx
|
c3ca38a7975ca99f7aebebf310a016e1cbcfdf0c
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from models import BaseTCN, TCN, CausalConv1d, _conv_stack
from model_utilities import GatedActivation
class OutputcoderTCN(TCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film',force_local_residual=False,conditioning_structure='shallow'):
super(OutputcoderTCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual,conditioning_structure)
self.output_layer = torch.nn.Sequential(
CausalConv1d(in_channels=num_channels,out_channels=64,kernel_size=1,bias=bias),
GatedActivation(num_channels=32),
CausalConv1d(in_channels=32,out_channels=32,kernel_size=1,bias=bias),
GatedActivation(num_channels=16),
CausalConv1d(in_channels=16,out_channels=16,kernel_size=1,bias=bias),
GatedActivation(num_channels=8),
CausalConv1d(in_channels=8,out_channels=1,kernel_size=1,bias=bias)
)
class DeepOutputcoderTCN(TCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film',force_local_residual=False,conditioning_structure='shallow'):
super(DeepOutputcoderTCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual,conditioning_structure)
self.output_layer = torch.nn.Sequential(
CausalConv1d(in_channels=num_channels,out_channels=64,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=64,out_channels=128,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=128,out_channels=64,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=64,out_channels=32,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=32,out_channels=8,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=8,out_channels=4,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=4,out_channels=num_channels,kernel_size=1,bias=bias),
torch.nn.ReLU(),
CausalConv1d(in_channels=num_channels,out_channels=1,kernel_size=1,bias=bias)
)
class NormedTCN(BaseTCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film',force_local_residual=False,conditioning_structure='shallow'):
super(NormedTCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual,conditioning_structure)
def forward(self, x, cond_params):
out = x
out = self.input_layer(out) # this wasnt there before I think
for hidden, residual in zip(self.hidden, self.residuals):
skip_x = out
out_hidden = hidden(skip_x)
if self.conditioning:
out = self.film(out, cond_params)
out = self.activ(out_hidden)
out = torch.nn.functional.normalize(out, dim=-1)
res = residual(skip_x)
res = torch.nn.functional.normalize(res, dim=-1)
out = out + residual(skip_x)
out = torch.nn.functional.normalize(out, dim=-1)
output = self.output_layer(out)
output = torch.nn.functional.normalize(output, dim=-1)
return output, None
class ParallelTCN(BaseTCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film',force_local_residual=False,conditioning_structure='shallow'):
super(ParallelTCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual,conditioning_structure)
self.num_channels = num_channels
self.conditioning = conditioning
dilations = [dilation_factor ** d for d in range(dilation_depth)] * num_repeat
internal_channels = int(num_channels * 2) if 'gated' in activation else int(num_channels)
if grouping == 'all':
groups = 1
elif grouping == 'local_out':
groups = internal_channels
else:
groups = num_channels
groups = 1 if grouping == 'all' else num_channels
self.hidden_parallel = _conv_stack(dilations, num_channels, internal_channels, kernel_size, groups=groups, bias=bias)
self.output_layer = CausalConv1d(in_channels=num_channels*2,out_channels=1,kernel_size=1,bias=bias)
def forward(self, x, cond_params):
out = x
out = self.input_layer(out) # this wasnt there before I think
for hidden, residual in zip(self.hidden, self.residuals):
skip_x = out
out_hidden = hidden(skip_x)
if self.conditioning:
out_hidden = self.film(out_hidden, cond_params)
out = self.activ(out_hidden)
out = out + residual(skip_x)
parallel_out = self.input_layer(x)
for hidden in self.hidden_parallel:
parallel_out = hidden(parallel_out)
if self.conditioning:
parallel_out = self.film(parallel_out, cond_params)
parallel_out = self.activ(parallel_out)
combined_out = torch.cat([out, parallel_out],dim=1)
data = {'pre_out':combined_out}
output = self.output_layer(combined_out)
return torch.tanh(output), data
# input into each layer rather than cumulative residual
class Parallel2TCN(BaseTCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film',force_local_residual=False):
super(Parallel2TCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual)
def forward(self, x, cond_params):
out = x
out = self.input_layer(out) # this wasnt there before I think
skip_x = out
for hidden, residual in zip(self.hidden, self.residuals):
out_hidden = hidden(skip_x)
out = self.activ(out_hidden)
if self.conditioning:
out = self.film(out, cond_params)
out = out + residual(skip_x)
pre_out_data = out
output = self.output_layer(out)
data = {'pre_out':pre_out_data}
return torch.tanh(output), data
class NotanhTCN(BaseTCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film',force_local_residual=False):
super(NotanhTCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual)
def forward(self, x, cond_params):
out = x
out = self.input_layer(out) # this wasnt there before I think
for hidden, residual in zip(self.hidden, self.residuals):
skip_x = out
out_hidden = hidden(skip_x)
out = self.activ(out_hidden)
if self.conditioning:
out = self.film(out, cond_params)
out = out + residual(skip_x)
pre_out_data = out
output = self.output_layer(out)
data = {'pre_out':pre_out_data}
output = torch.nn.functional.normalize(output, dim=-1)
return output, data
class OctopusTCN(BaseTCN):
def __init__(self, num_channels, dilation_depth, num_repeat, kernel_size=2, dilation_factor=2, conditioning=False, num_conditioning=3, activation='gated', grouping='all', bias=True, conditioning_type='basic_film', force_local_residual=False):
super(OctopusTCN, self).__init__(num_channels, dilation_depth, num_repeat, kernel_size, dilation_factor, conditioning, num_conditioning, activation, grouping, bias, conditioning_type,force_local_residual)
mix_channels = num_channels * dilation_depth * num_repeat
groups = 1 if grouping == 'all' else num_channels
self.output_mixer = nn.Conv1d(
in_channels= mix_channels,
out_channels=mix_channels,
kernel_size=1,
groups=groups
)
self.output_layer = nn.Conv1d(
in_channels= mix_channels,
out_channels=1,
kernel_size=1,
)
def forward(self, x, cond_params):
out = x
out = self.input_layer(out) # this wasnt there before I think
skip_x = out
layer_outs = []
for hidden, residual in zip(self.hidden, self.residuals):
out_hidden = hidden(skip_x)
if self.conditioning:
out_hidden = self.film(out_hidden, cond_params)
out = self.activ(out_hidden)
layer_out = out + residual(skip_x)
layer_outs.append(layer_out)
out = torch.cat(layer_outs, dim=1)
out = self.output_mixer(out)
out = self.output_layer(out)
return out, None
| 52.40107
| 278
| 0.708338
| 1,271
| 9,799
| 5.173879
| 0.088906
| 0.051855
| 0.038929
| 0.053528
| 0.806569
| 0.778437
| 0.76247
| 0.758972
| 0.708029
| 0.685219
| 0
| 0.013645
| 0.192265
| 9,799
| 187
| 279
| 52.40107
| 0.817183
| 0.021737
| 0
| 0.54717
| 0
| 0
| 0.020668
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075472
| false
| 0
| 0.025157
| 0
| 0.176101
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
63ed0f687247a0bb87e544a10ae5b7f40c1dc270
| 147,338
|
py
|
Python
|
src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py
|
doraskayo/buildstream
|
1c72d4342ae7df360808de22c5e49f55dbb6bec6
|
[
"Apache-2.0"
] | null | null | null |
src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py
|
doraskayo/buildstream
|
1c72d4342ae7df360808de22c5e49f55dbb6bec6
|
[
"Apache-2.0"
] | null | null | null |
src/buildstream/_protos/build/bazel/remote/execution/v2/remote_execution_pb2.py
|
doraskayo/buildstream
|
1c72d4342ae7df360808de22c5e49f55dbb6bec6
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: build/bazel/remote/execution/v2/remote_execution.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from buildstream._protos.build.bazel.semver import semver_pb2 as build_dot_bazel_dot_semver_dot_semver__pb2
from buildstream._protos.google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from buildstream._protos.google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from buildstream._protos.google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='build/bazel/remote/execution/v2/remote_execution.proto',
package='build.bazel.remote.execution.v2',
syntax='proto3',
serialized_options=b'\n\037build.bazel.remote.execution.v2B\024RemoteExecutionProtoP\001Z\017remoteexecution\242\002\003REX\252\002\037Build.Bazel.Remote.Execution.V2',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n6build/bazel/remote/execution/v2/remote_execution.proto\x12\x1f\x62uild.bazel.remote.execution.v2\x1a\x1f\x62uild/bazel/semver/semver.proto\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"\xdb\x01\n\x06\x41\x63tion\x12?\n\x0e\x63ommand_digest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x42\n\x11input_root_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12*\n\x07timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x64o_not_cache\x18\x07 \x01(\x08J\x04\x08\x03\x10\x06J\x04\x08\x08\x10\t\"\xed\x02\n\x07\x43ommand\x12\x11\n\targuments\x18\x01 \x03(\t\x12[\n\x15\x65nvironment_variables\x18\x02 \x03(\x0b\x32<.build.bazel.remote.execution.v2.Command.EnvironmentVariable\x12\x14\n\x0coutput_files\x18\x03 \x03(\t\x12\x1a\n\x12output_directories\x18\x04 \x03(\t\x12\x14\n\x0coutput_paths\x18\x07 \x03(\t\x12;\n\x08platform\x18\x05 \x01(\x0b\x32).build.bazel.remote.execution.v2.Platform\x12\x19\n\x11working_directory\x18\x06 \x01(\t\x12\x1e\n\x16output_node_properties\x18\x08 \x03(\t\x1a\x32\n\x13\x45nvironmentVariable\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"{\n\x08Platform\x12\x46\n\nproperties\x18\x01 \x03(\x0b\x32\x32.build.bazel.remote.execution.v2.Platform.Property\x1a\'\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\x9a\x02\n\tDirectory\x12\x38\n\x05\x66iles\x18\x01 \x03(\x0b\x32).build.bazel.remote.execution.v2.FileNode\x12\x43\n\x0b\x64irectories\x18\x02 \x03(\x0b\x32..build.bazel.remote.execution.v2.DirectoryNode\x12>\n\x08symlinks\x18\x03 \x03(\x0b\x32,.build.bazel.remote.execution.v2.SymlinkNode\x12H\n\x0fnode_properties\x18\x05 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x04\x10\x05\"+\n\x0cNodeProperty\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xaf\x01\n\x0eNodeProperties\x12\x41\n\nproperties\x18\x01 \x03(\x0b\x32-.build.bazel.remote.execution.v2.NodeProperty\x12)\n\x05mtime\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\tunix_mode\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\"\xbe\x01\n\x08\x46ileNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12H\n\x0fnode_properties\x18\x06 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x03\x10\x04J\x04\x08\x05\x10\x06\"V\n\rDirectoryNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"{\n\x0bSymlinkNode\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06target\x18\x02 \x01(\t\x12H\n\x0fnode_properties\x18\x04 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x03\x10\x04\"*\n\x06\x44igest\x12\x0c\n\x04hash\x18\x01 \x01(\t\x12\x12\n\nsize_bytes\x18\x02 \x01(\x03\"\xec\x04\n\x16\x45xecutedActionMetadata\x12\x0e\n\x06worker\x18\x01 \x01(\t\x12\x34\n\x10queued_timestamp\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\x16worker_start_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12>\n\x1aworker_completed_timestamp\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12?\n\x1binput_fetch_start_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x1finput_fetch_completed_timestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x19\x65xecution_start_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x1d\x65xecution_completed_timestamp\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x1doutput_upload_start_timestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n!output_upload_completed_timestamp\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x9f\x05\n\x0c\x41\x63tionResult\x12\x41\n\x0coutput_files\x18\x02 \x03(\x0b\x32+.build.bazel.remote.execution.v2.OutputFile\x12L\n\x14output_file_symlinks\x18\n \x03(\x0b\x32..build.bazel.remote.execution.v2.OutputSymlink\x12G\n\x0foutput_symlinks\x18\x0c \x03(\x0b\x32..build.bazel.remote.execution.v2.OutputSymlink\x12L\n\x12output_directories\x18\x03 \x03(\x0b\x32\x30.build.bazel.remote.execution.v2.OutputDirectory\x12Q\n\x19output_directory_symlinks\x18\x0b \x03(\x0b\x32..build.bazel.remote.execution.v2.OutputSymlink\x12\x11\n\texit_code\x18\x04 \x01(\x05\x12\x12\n\nstdout_raw\x18\x05 \x01(\x0c\x12>\n\rstdout_digest\x18\x06 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x12\n\nstderr_raw\x18\x07 \x01(\x0c\x12>\n\rstderr_digest\x18\x08 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12S\n\x12\x65xecution_metadata\x18\t \x01(\x0b\x32\x37.build.bazel.remote.execution.v2.ExecutedActionMetadataJ\x04\x08\x01\x10\x02\"\xd2\x01\n\nOutputFile\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x37\n\x06\x64igest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\ris_executable\x18\x04 \x01(\x08\x12\x10\n\x08\x63ontents\x18\x05 \x01(\x0c\x12H\n\x0fnode_properties\x18\x07 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x03\x10\x04J\x04\x08\x06\x10\x07\"~\n\x04Tree\x12\x38\n\x04root\x18\x01 \x01(\x0b\x32*.build.bazel.remote.execution.v2.Directory\x12<\n\x08\x63hildren\x18\x02 \x03(\x0b\x32*.build.bazel.remote.execution.v2.Directory\"c\n\x0fOutputDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\x12<\n\x0btree_digest\x18\x03 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.DigestJ\x04\x08\x02\x10\x03\"}\n\rOutputSymlink\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0e\n\x06target\x18\x02 \x01(\t\x12H\n\x0fnode_properties\x18\x04 \x01(\x0b\x32/.build.bazel.remote.execution.v2.NodePropertiesJ\x04\x08\x03\x10\x04\"#\n\x0f\x45xecutionPolicy\x12\x10\n\x08priority\x18\x01 \x01(\x05\"&\n\x12ResultsCachePolicy\x12\x10\n\x08priority\x18\x01 \x01(\x05\"\xb3\x02\n\x0e\x45xecuteRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x19\n\x11skip_cache_lookup\x18\x03 \x01(\x08\x12>\n\raction_digest\x18\x06 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12J\n\x10\x65xecution_policy\x18\x07 \x01(\x0b\x32\x30.build.bazel.remote.execution.v2.ExecutionPolicy\x12Q\n\x14results_cache_policy\x18\x08 \x01(\x0b\x32\x33.build.bazel.remote.execution.v2.ResultsCachePolicyJ\x04\x08\x02\x10\x03J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\"Z\n\x07LogFile\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x16\n\x0ehuman_readable\x18\x02 \x01(\x08\"\xd0\x02\n\x0f\x45xecuteResponse\x12=\n\x06result\x18\x01 \x01(\x0b\x32-.build.bazel.remote.execution.v2.ActionResult\x12\x15\n\rcached_result\x18\x02 \x01(\x08\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12U\n\x0bserver_logs\x18\x04 \x03(\x0b\x32@.build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry\x12\x0f\n\x07message\x18\x05 \x01(\t\x1a[\n\x0fServerLogsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.build.bazel.remote.execution.v2.LogFile:\x02\x38\x01\"a\n\x0e\x45xecutionStage\"O\n\x05Value\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0f\n\x0b\x43\x41\x43HE_CHECK\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\r\n\tEXECUTING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\"\xd8\x01\n\x18\x45xecuteOperationMetadata\x12\x44\n\x05stage\x18\x01 \x01(\x0e\x32\x35.build.bazel.remote.execution.v2.ExecutionStage.Value\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x1a\n\x12stdout_stream_name\x18\x03 \x01(\t\x12\x1a\n\x12stderr_stream_name\x18\x04 \x01(\t\"$\n\x14WaitExecutionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xba\x01\n\x16GetActionResultRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x15\n\rinline_stdout\x18\x03 \x01(\x08\x12\x15\n\rinline_stderr\x18\x04 \x01(\x08\x12\x1b\n\x13inline_output_files\x18\x05 \x03(\t\"\x8b\x02\n\x19UpdateActionResultRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12>\n\raction_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x44\n\raction_result\x18\x03 \x01(\x0b\x32-.build.bazel.remote.execution.v2.ActionResult\x12Q\n\x14results_cache_policy\x18\x04 \x01(\x0b\x32\x33.build.bazel.remote.execution.v2.ResultsCachePolicy\"o\n\x17\x46indMissingBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12=\n\x0c\x62lob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"a\n\x18\x46indMissingBlobsResponse\x12\x45\n\x14missing_blob_digests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xd6\x01\n\x17\x42\x61tchUpdateBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12R\n\x08requests\x18\x02 \x03(\x0b\x32@.build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request\x1aP\n\x07Request\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\xda\x01\n\x18\x42\x61tchUpdateBlobsResponse\x12U\n\tresponses\x18\x01 \x03(\x0b\x32\x42.build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response\x1ag\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\"h\n\x15\x42\x61tchReadBlobsRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x38\n\x07\x64igests\x18\x02 \x03(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\"\xe4\x01\n\x16\x42\x61tchReadBlobsResponse\x12S\n\tresponses\x18\x01 \x03(\x0b\x32@.build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response\x1au\n\x08Response\x12\x37\n\x06\x64igest\x18\x01 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\"\n\x06status\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\"\x8c\x01\n\x0eGetTreeRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12<\n\x0broot_digest\x18\x02 \x01(\x0b\x32\'.build.bazel.remote.execution.v2.Digest\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"k\n\x0fGetTreeResponse\x12?\n\x0b\x64irectories\x18\x01 \x03(\x0b\x32*.build.bazel.remote.execution.v2.Directory\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"/\n\x16GetCapabilitiesRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\"\xe3\x02\n\x12ServerCapabilities\x12N\n\x12\x63\x61\x63he_capabilities\x18\x01 \x01(\x0b\x32\x32.build.bazel.remote.execution.v2.CacheCapabilities\x12V\n\x16\x65xecution_capabilities\x18\x02 \x01(\x0b\x32\x36.build.bazel.remote.execution.v2.ExecutionCapabilities\x12:\n\x16\x64\x65precated_api_version\x18\x03 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\x12\x33\n\x0flow_api_version\x18\x04 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\x12\x34\n\x10high_api_version\x18\x05 \x01(\x0b\x32\x1a.build.bazel.semver.SemVer\"f\n\x0e\x44igestFunction\"T\n\x05Value\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06SHA256\x10\x01\x12\x08\n\x04SHA1\x10\x02\x12\x07\n\x03MD5\x10\x03\x12\x07\n\x03VSO\x10\x04\x12\n\n\x06SHA384\x10\x05\x12\n\n\x06SHA512\x10\x06\"7\n\x1d\x41\x63tionCacheUpdateCapabilities\x12\x16\n\x0eupdate_enabled\x18\x01 \x01(\x08\"\xac\x01\n\x14PriorityCapabilities\x12W\n\npriorities\x18\x01 \x03(\x0b\x32\x43.build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange\x1a;\n\rPriorityRange\x12\x14\n\x0cmin_priority\x18\x01 \x01(\x05\x12\x14\n\x0cmax_priority\x18\x02 \x01(\x05\"P\n\x1bSymlinkAbsolutePathStrategy\"1\n\x05Value\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0e\n\nDISALLOWED\x10\x01\x12\x0b\n\x07\x41LLOWED\x10\x02\"\xb9\x03\n\x11\x43\x61\x63heCapabilities\x12N\n\x0f\x64igest_function\x18\x01 \x03(\x0e\x32\x35.build.bazel.remote.execution.v2.DigestFunction.Value\x12h\n action_cache_update_capabilities\x18\x02 \x01(\x0b\x32>.build.bazel.remote.execution.v2.ActionCacheUpdateCapabilities\x12Z\n\x1b\x63\x61\x63he_priority_capabilities\x18\x03 \x01(\x0b\x32\x35.build.bazel.remote.execution.v2.PriorityCapabilities\x12\"\n\x1amax_batch_total_size_bytes\x18\x04 \x01(\x03\x12j\n\x1esymlink_absolute_path_strategy\x18\x05 \x01(\x0e\x32\x42.build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy.Value\"\x80\x02\n\x15\x45xecutionCapabilities\x12N\n\x0f\x64igest_function\x18\x01 \x01(\x0e\x32\x35.build.bazel.remote.execution.v2.DigestFunction.Value\x12\x14\n\x0c\x65xec_enabled\x18\x02 \x01(\x08\x12^\n\x1f\x65xecution_priority_capabilities\x18\x03 \x01(\x0b\x32\x35.build.bazel.remote.execution.v2.PriorityCapabilities\x12!\n\x19supported_node_properties\x18\x04 \x03(\t\"6\n\x0bToolDetails\x12\x11\n\ttool_name\x18\x01 \x01(\t\x12\x14\n\x0ctool_version\x18\x02 \x01(\t\"\xa7\x01\n\x0fRequestMetadata\x12\x42\n\x0ctool_details\x18\x01 \x01(\x0b\x32,.build.bazel.remote.execution.v2.ToolDetails\x12\x11\n\taction_id\x18\x02 \x01(\t\x12\x1a\n\x12tool_invocation_id\x18\x03 \x01(\t\x12!\n\x19\x63orrelated_invocations_id\x18\x04 \x01(\t2\xb9\x02\n\tExecution\x12\x8e\x01\n\x07\x45xecute\x12/.build.bazel.remote.execution.v2.ExecuteRequest\x1a\x1d.google.longrunning.Operation\"1\x82\xd3\xe4\x93\x02+\"&/v2/{instance_name=**}/actions:execute:\x01*0\x01\x12\x9a\x01\n\rWaitExecution\x12\x35.build.bazel.remote.execution.v2.WaitExecutionRequest\x1a\x1d.google.longrunning.Operation\"1\x82\xd3\xe4\x93\x02+\"&/v2/{name=operations/**}:waitExecution:\x01*0\x01\x32\xd6\x03\n\x0b\x41\x63tionCache\x12\xd7\x01\n\x0fGetActionResult\x12\x37.build.bazel.remote.execution.v2.GetActionResultRequest\x1a-.build.bazel.remote.execution.v2.ActionResult\"\\\x82\xd3\xe4\x93\x02V\x12T/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}\x12\xec\x01\n\x12UpdateActionResult\x12:.build.bazel.remote.execution.v2.UpdateActionResultRequest\x1a-.build.bazel.remote.execution.v2.ActionResult\"k\x82\xd3\xe4\x93\x02\x65\x1aT/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}:\raction_result2\x9b\x06\n\x19\x43ontentAddressableStorage\x12\xbc\x01\n\x10\x46indMissingBlobs\x12\x38.build.bazel.remote.execution.v2.FindMissingBlobsRequest\x1a\x39.build.bazel.remote.execution.v2.FindMissingBlobsResponse\"3\x82\xd3\xe4\x93\x02-\"(/v2/{instance_name=**}/blobs:findMissing:\x01*\x12\xbc\x01\n\x10\x42\x61tchUpdateBlobs\x12\x38.build.bazel.remote.execution.v2.BatchUpdateBlobsRequest\x1a\x39.build.bazel.remote.execution.v2.BatchUpdateBlobsResponse\"3\x82\xd3\xe4\x93\x02-\"(/v2/{instance_name=**}/blobs:batchUpdate:\x01*\x12\xb4\x01\n\x0e\x42\x61tchReadBlobs\x12\x36.build.bazel.remote.execution.v2.BatchReadBlobsRequest\x1a\x37.build.bazel.remote.execution.v2.BatchReadBlobsResponse\"1\x82\xd3\xe4\x93\x02+\"&/v2/{instance_name=**}/blobs:batchRead:\x01*\x12\xc8\x01\n\x07GetTree\x12/.build.bazel.remote.execution.v2.GetTreeRequest\x1a\x30.build.bazel.remote.execution.v2.GetTreeResponse\"X\x82\xd3\xe4\x93\x02R\x12P/v2/{instance_name=**}/blobs/{root_digest.hash}/{root_digest.size_bytes}:getTree0\x01\x32\xbd\x01\n\x0c\x43\x61pabilities\x12\xac\x01\n\x0fGetCapabilities\x12\x37.build.bazel.remote.execution.v2.GetCapabilitiesRequest\x1a\x33.build.bazel.remote.execution.v2.ServerCapabilities\"+\x82\xd3\xe4\x93\x02%\x12#/v2/{instance_name=**}/capabilitiesBr\n\x1f\x62uild.bazel.remote.execution.v2B\x14RemoteExecutionProtoP\x01Z\x0fremoteexecution\xa2\x02\x03REX\xaa\x02\x1f\x42uild.Bazel.Remote.Execution.V2b\x06proto3'
,
dependencies=[build_dot_bazel_dot_semver_dot_semver__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
_EXECUTIONSTAGE_VALUE = _descriptor.EnumDescriptor(
name='Value',
full_name='build.bazel.remote.execution.v2.ExecutionStage.Value',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CACHE_CHECK', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='QUEUED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXECUTING', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COMPLETED', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=4688,
serialized_end=4767,
)
_sym_db.RegisterEnumDescriptor(_EXECUTIONSTAGE_VALUE)
_DIGESTFUNCTION_VALUE = _descriptor.EnumDescriptor(
name='Value',
full_name='build.bazel.remote.execution.v2.DigestFunction.Value',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHA256', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHA1', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MD5', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VSO', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHA384', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHA512', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7149,
serialized_end=7233,
)
_sym_db.RegisterEnumDescriptor(_DIGESTFUNCTION_VALUE)
_SYMLINKABSOLUTEPATHSTRATEGY_VALUE = _descriptor.EnumDescriptor(
name='Value',
full_name='build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy.Value',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DISALLOWED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALLOWED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7498,
serialized_end=7547,
)
_sym_db.RegisterEnumDescriptor(_SYMLINKABSOLUTEPATHSTRATEGY_VALUE)
_ACTION = _descriptor.Descriptor(
name='Action',
full_name='build.bazel.remote.execution.v2.Action',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='command_digest', full_name='build.bazel.remote.execution.v2.Action.command_digest', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='input_root_digest', full_name='build.bazel.remote.execution.v2.Action.input_root_digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timeout', full_name='build.bazel.remote.execution.v2.Action.timeout', index=2,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='do_not_cache', full_name='build.bazel.remote.execution.v2.Action.do_not_cache', index=3,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=314,
serialized_end=533,
)
_COMMAND_ENVIRONMENTVARIABLE = _descriptor.Descriptor(
name='EnvironmentVariable',
full_name='build.bazel.remote.execution.v2.Command.EnvironmentVariable',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.Command.EnvironmentVariable.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='build.bazel.remote.execution.v2.Command.EnvironmentVariable.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=851,
serialized_end=901,
)
_COMMAND = _descriptor.Descriptor(
name='Command',
full_name='build.bazel.remote.execution.v2.Command',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='arguments', full_name='build.bazel.remote.execution.v2.Command.arguments', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='environment_variables', full_name='build.bazel.remote.execution.v2.Command.environment_variables', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_files', full_name='build.bazel.remote.execution.v2.Command.output_files', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_directories', full_name='build.bazel.remote.execution.v2.Command.output_directories', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_paths', full_name='build.bazel.remote.execution.v2.Command.output_paths', index=4,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='platform', full_name='build.bazel.remote.execution.v2.Command.platform', index=5,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='working_directory', full_name='build.bazel.remote.execution.v2.Command.working_directory', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_node_properties', full_name='build.bazel.remote.execution.v2.Command.output_node_properties', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_COMMAND_ENVIRONMENTVARIABLE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=536,
serialized_end=901,
)
_PLATFORM_PROPERTY = _descriptor.Descriptor(
name='Property',
full_name='build.bazel.remote.execution.v2.Platform.Property',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.Platform.Property.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='build.bazel.remote.execution.v2.Platform.Property.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=987,
serialized_end=1026,
)
_PLATFORM = _descriptor.Descriptor(
name='Platform',
full_name='build.bazel.remote.execution.v2.Platform',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='properties', full_name='build.bazel.remote.execution.v2.Platform.properties', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_PLATFORM_PROPERTY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=903,
serialized_end=1026,
)
_DIRECTORY = _descriptor.Descriptor(
name='Directory',
full_name='build.bazel.remote.execution.v2.Directory',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='files', full_name='build.bazel.remote.execution.v2.Directory.files', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='directories', full_name='build.bazel.remote.execution.v2.Directory.directories', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='symlinks', full_name='build.bazel.remote.execution.v2.Directory.symlinks', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_properties', full_name='build.bazel.remote.execution.v2.Directory.node_properties', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1029,
serialized_end=1311,
)
_NODEPROPERTY = _descriptor.Descriptor(
name='NodeProperty',
full_name='build.bazel.remote.execution.v2.NodeProperty',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.NodeProperty.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='build.bazel.remote.execution.v2.NodeProperty.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1313,
serialized_end=1356,
)
_NODEPROPERTIES = _descriptor.Descriptor(
name='NodeProperties',
full_name='build.bazel.remote.execution.v2.NodeProperties',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='properties', full_name='build.bazel.remote.execution.v2.NodeProperties.properties', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mtime', full_name='build.bazel.remote.execution.v2.NodeProperties.mtime', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unix_mode', full_name='build.bazel.remote.execution.v2.NodeProperties.unix_mode', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1359,
serialized_end=1534,
)
_FILENODE = _descriptor.Descriptor(
name='FileNode',
full_name='build.bazel.remote.execution.v2.FileNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.FileNode.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.FileNode.digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_executable', full_name='build.bazel.remote.execution.v2.FileNode.is_executable', index=2,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_properties', full_name='build.bazel.remote.execution.v2.FileNode.node_properties', index=3,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1537,
serialized_end=1727,
)
_DIRECTORYNODE = _descriptor.Descriptor(
name='DirectoryNode',
full_name='build.bazel.remote.execution.v2.DirectoryNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.DirectoryNode.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.DirectoryNode.digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1729,
serialized_end=1815,
)
_SYMLINKNODE = _descriptor.Descriptor(
name='SymlinkNode',
full_name='build.bazel.remote.execution.v2.SymlinkNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.SymlinkNode.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='build.bazel.remote.execution.v2.SymlinkNode.target', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_properties', full_name='build.bazel.remote.execution.v2.SymlinkNode.node_properties', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1817,
serialized_end=1940,
)
_DIGEST = _descriptor.Descriptor(
name='Digest',
full_name='build.bazel.remote.execution.v2.Digest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='hash', full_name='build.bazel.remote.execution.v2.Digest.hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='size_bytes', full_name='build.bazel.remote.execution.v2.Digest.size_bytes', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1942,
serialized_end=1984,
)
_EXECUTEDACTIONMETADATA = _descriptor.Descriptor(
name='ExecutedActionMetadata',
full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='worker', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.worker', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='queued_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.queued_timestamp', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='worker_start_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.worker_start_timestamp', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='worker_completed_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.worker_completed_timestamp', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='input_fetch_start_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.input_fetch_start_timestamp', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='input_fetch_completed_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.input_fetch_completed_timestamp', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_start_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.execution_start_timestamp', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_completed_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.execution_completed_timestamp', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_upload_start_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.output_upload_start_timestamp', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_upload_completed_timestamp', full_name='build.bazel.remote.execution.v2.ExecutedActionMetadata.output_upload_completed_timestamp', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1987,
serialized_end=2607,
)
_ACTIONRESULT = _descriptor.Descriptor(
name='ActionResult',
full_name='build.bazel.remote.execution.v2.ActionResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='output_files', full_name='build.bazel.remote.execution.v2.ActionResult.output_files', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_file_symlinks', full_name='build.bazel.remote.execution.v2.ActionResult.output_file_symlinks', index=1,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_symlinks', full_name='build.bazel.remote.execution.v2.ActionResult.output_symlinks', index=2,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_directories', full_name='build.bazel.remote.execution.v2.ActionResult.output_directories', index=3,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='output_directory_symlinks', full_name='build.bazel.remote.execution.v2.ActionResult.output_directory_symlinks', index=4,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exit_code', full_name='build.bazel.remote.execution.v2.ActionResult.exit_code', index=5,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stdout_raw', full_name='build.bazel.remote.execution.v2.ActionResult.stdout_raw', index=6,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stdout_digest', full_name='build.bazel.remote.execution.v2.ActionResult.stdout_digest', index=7,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stderr_raw', full_name='build.bazel.remote.execution.v2.ActionResult.stderr_raw', index=8,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stderr_digest', full_name='build.bazel.remote.execution.v2.ActionResult.stderr_digest', index=9,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_metadata', full_name='build.bazel.remote.execution.v2.ActionResult.execution_metadata', index=10,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2610,
serialized_end=3281,
)
_OUTPUTFILE = _descriptor.Descriptor(
name='OutputFile',
full_name='build.bazel.remote.execution.v2.OutputFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='path', full_name='build.bazel.remote.execution.v2.OutputFile.path', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.OutputFile.digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_executable', full_name='build.bazel.remote.execution.v2.OutputFile.is_executable', index=2,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='contents', full_name='build.bazel.remote.execution.v2.OutputFile.contents', index=3,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_properties', full_name='build.bazel.remote.execution.v2.OutputFile.node_properties', index=4,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3284,
serialized_end=3494,
)
_TREE = _descriptor.Descriptor(
name='Tree',
full_name='build.bazel.remote.execution.v2.Tree',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='root', full_name='build.bazel.remote.execution.v2.Tree.root', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='children', full_name='build.bazel.remote.execution.v2.Tree.children', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3496,
serialized_end=3622,
)
_OUTPUTDIRECTORY = _descriptor.Descriptor(
name='OutputDirectory',
full_name='build.bazel.remote.execution.v2.OutputDirectory',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='path', full_name='build.bazel.remote.execution.v2.OutputDirectory.path', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tree_digest', full_name='build.bazel.remote.execution.v2.OutputDirectory.tree_digest', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3624,
serialized_end=3723,
)
_OUTPUTSYMLINK = _descriptor.Descriptor(
name='OutputSymlink',
full_name='build.bazel.remote.execution.v2.OutputSymlink',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='path', full_name='build.bazel.remote.execution.v2.OutputSymlink.path', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='build.bazel.remote.execution.v2.OutputSymlink.target', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_properties', full_name='build.bazel.remote.execution.v2.OutputSymlink.node_properties', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3725,
serialized_end=3850,
)
_EXECUTIONPOLICY = _descriptor.Descriptor(
name='ExecutionPolicy',
full_name='build.bazel.remote.execution.v2.ExecutionPolicy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='priority', full_name='build.bazel.remote.execution.v2.ExecutionPolicy.priority', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3852,
serialized_end=3887,
)
_RESULTSCACHEPOLICY = _descriptor.Descriptor(
name='ResultsCachePolicy',
full_name='build.bazel.remote.execution.v2.ResultsCachePolicy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='priority', full_name='build.bazel.remote.execution.v2.ResultsCachePolicy.priority', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3889,
serialized_end=3927,
)
_EXECUTEREQUEST = _descriptor.Descriptor(
name='ExecuteRequest',
full_name='build.bazel.remote.execution.v2.ExecuteRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.ExecuteRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='skip_cache_lookup', full_name='build.bazel.remote.execution.v2.ExecuteRequest.skip_cache_lookup', index=1,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_digest', full_name='build.bazel.remote.execution.v2.ExecuteRequest.action_digest', index=2,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_policy', full_name='build.bazel.remote.execution.v2.ExecuteRequest.execution_policy', index=3,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='results_cache_policy', full_name='build.bazel.remote.execution.v2.ExecuteRequest.results_cache_policy', index=4,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3930,
serialized_end=4237,
)
_LOGFILE = _descriptor.Descriptor(
name='LogFile',
full_name='build.bazel.remote.execution.v2.LogFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.LogFile.digest', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='human_readable', full_name='build.bazel.remote.execution.v2.LogFile.human_readable', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4239,
serialized_end=4329,
)
_EXECUTERESPONSE_SERVERLOGSENTRY = _descriptor.Descriptor(
name='ServerLogsEntry',
full_name='build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4577,
serialized_end=4668,
)
_EXECUTERESPONSE = _descriptor.Descriptor(
name='ExecuteResponse',
full_name='build.bazel.remote.execution.v2.ExecuteResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='build.bazel.remote.execution.v2.ExecuteResponse.result', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cached_result', full_name='build.bazel.remote.execution.v2.ExecuteResponse.cached_result', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='build.bazel.remote.execution.v2.ExecuteResponse.status', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server_logs', full_name='build.bazel.remote.execution.v2.ExecuteResponse.server_logs', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='build.bazel.remote.execution.v2.ExecuteResponse.message', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_EXECUTERESPONSE_SERVERLOGSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4332,
serialized_end=4668,
)
_EXECUTIONSTAGE = _descriptor.Descriptor(
name='ExecutionStage',
full_name='build.bazel.remote.execution.v2.ExecutionStage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_EXECUTIONSTAGE_VALUE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4670,
serialized_end=4767,
)
_EXECUTEOPERATIONMETADATA = _descriptor.Descriptor(
name='ExecuteOperationMetadata',
full_name='build.bazel.remote.execution.v2.ExecuteOperationMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='stage', full_name='build.bazel.remote.execution.v2.ExecuteOperationMetadata.stage', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_digest', full_name='build.bazel.remote.execution.v2.ExecuteOperationMetadata.action_digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stdout_stream_name', full_name='build.bazel.remote.execution.v2.ExecuteOperationMetadata.stdout_stream_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stderr_stream_name', full_name='build.bazel.remote.execution.v2.ExecuteOperationMetadata.stderr_stream_name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4770,
serialized_end=4986,
)
_WAITEXECUTIONREQUEST = _descriptor.Descriptor(
name='WaitExecutionRequest',
full_name='build.bazel.remote.execution.v2.WaitExecutionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='build.bazel.remote.execution.v2.WaitExecutionRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4988,
serialized_end=5024,
)
_GETACTIONRESULTREQUEST = _descriptor.Descriptor(
name='GetActionResultRequest',
full_name='build.bazel.remote.execution.v2.GetActionResultRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_digest', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.action_digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='inline_stdout', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.inline_stdout', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='inline_stderr', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.inline_stderr', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='inline_output_files', full_name='build.bazel.remote.execution.v2.GetActionResultRequest.inline_output_files', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5027,
serialized_end=5213,
)
_UPDATEACTIONRESULTREQUEST = _descriptor.Descriptor(
name='UpdateActionResultRequest',
full_name='build.bazel.remote.execution.v2.UpdateActionResultRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.UpdateActionResultRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_digest', full_name='build.bazel.remote.execution.v2.UpdateActionResultRequest.action_digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_result', full_name='build.bazel.remote.execution.v2.UpdateActionResultRequest.action_result', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='results_cache_policy', full_name='build.bazel.remote.execution.v2.UpdateActionResultRequest.results_cache_policy', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5216,
serialized_end=5483,
)
_FINDMISSINGBLOBSREQUEST = _descriptor.Descriptor(
name='FindMissingBlobsRequest',
full_name='build.bazel.remote.execution.v2.FindMissingBlobsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.FindMissingBlobsRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='blob_digests', full_name='build.bazel.remote.execution.v2.FindMissingBlobsRequest.blob_digests', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5485,
serialized_end=5596,
)
_FINDMISSINGBLOBSRESPONSE = _descriptor.Descriptor(
name='FindMissingBlobsResponse',
full_name='build.bazel.remote.execution.v2.FindMissingBlobsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='missing_blob_digests', full_name='build.bazel.remote.execution.v2.FindMissingBlobsResponse.missing_blob_digests', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5598,
serialized_end=5695,
)
_BATCHUPDATEBLOBSREQUEST_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request.digest', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5832,
serialized_end=5912,
)
_BATCHUPDATEBLOBSREQUEST = _descriptor.Descriptor(
name='BatchUpdateBlobsRequest',
full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='requests', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.requests', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_BATCHUPDATEBLOBSREQUEST_REQUEST, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5698,
serialized_end=5912,
)
_BATCHUPDATEBLOBSRESPONSE_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response.digest', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response.status', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6030,
serialized_end=6133,
)
_BATCHUPDATEBLOBSRESPONSE = _descriptor.Descriptor(
name='BatchUpdateBlobsResponse',
full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='responses', full_name='build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.responses', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_BATCHUPDATEBLOBSRESPONSE_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5915,
serialized_end=6133,
)
_BATCHREADBLOBSREQUEST = _descriptor.Descriptor(
name='BatchReadBlobsRequest',
full_name='build.bazel.remote.execution.v2.BatchReadBlobsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.BatchReadBlobsRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='digests', full_name='build.bazel.remote.execution.v2.BatchReadBlobsRequest.digests', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6135,
serialized_end=6239,
)
_BATCHREADBLOBSRESPONSE_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='digest', full_name='build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response.digest', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='data', full_name='build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response.status', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6353,
serialized_end=6470,
)
_BATCHREADBLOBSRESPONSE = _descriptor.Descriptor(
name='BatchReadBlobsResponse',
full_name='build.bazel.remote.execution.v2.BatchReadBlobsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='responses', full_name='build.bazel.remote.execution.v2.BatchReadBlobsResponse.responses', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_BATCHREADBLOBSRESPONSE_RESPONSE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6242,
serialized_end=6470,
)
_GETTREEREQUEST = _descriptor.Descriptor(
name='GetTreeRequest',
full_name='build.bazel.remote.execution.v2.GetTreeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.GetTreeRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='root_digest', full_name='build.bazel.remote.execution.v2.GetTreeRequest.root_digest', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='page_size', full_name='build.bazel.remote.execution.v2.GetTreeRequest.page_size', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='page_token', full_name='build.bazel.remote.execution.v2.GetTreeRequest.page_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6473,
serialized_end=6613,
)
_GETTREERESPONSE = _descriptor.Descriptor(
name='GetTreeResponse',
full_name='build.bazel.remote.execution.v2.GetTreeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='directories', full_name='build.bazel.remote.execution.v2.GetTreeResponse.directories', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='build.bazel.remote.execution.v2.GetTreeResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6615,
serialized_end=6722,
)
_GETCAPABILITIESREQUEST = _descriptor.Descriptor(
name='GetCapabilitiesRequest',
full_name='build.bazel.remote.execution.v2.GetCapabilitiesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instance_name', full_name='build.bazel.remote.execution.v2.GetCapabilitiesRequest.instance_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6724,
serialized_end=6771,
)
_SERVERCAPABILITIES = _descriptor.Descriptor(
name='ServerCapabilities',
full_name='build.bazel.remote.execution.v2.ServerCapabilities',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='cache_capabilities', full_name='build.bazel.remote.execution.v2.ServerCapabilities.cache_capabilities', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_capabilities', full_name='build.bazel.remote.execution.v2.ServerCapabilities.execution_capabilities', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deprecated_api_version', full_name='build.bazel.remote.execution.v2.ServerCapabilities.deprecated_api_version', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='low_api_version', full_name='build.bazel.remote.execution.v2.ServerCapabilities.low_api_version', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='high_api_version', full_name='build.bazel.remote.execution.v2.ServerCapabilities.high_api_version', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6774,
serialized_end=7129,
)
_DIGESTFUNCTION = _descriptor.Descriptor(
name='DigestFunction',
full_name='build.bazel.remote.execution.v2.DigestFunction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_DIGESTFUNCTION_VALUE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7131,
serialized_end=7233,
)
_ACTIONCACHEUPDATECAPABILITIES = _descriptor.Descriptor(
name='ActionCacheUpdateCapabilities',
full_name='build.bazel.remote.execution.v2.ActionCacheUpdateCapabilities',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='update_enabled', full_name='build.bazel.remote.execution.v2.ActionCacheUpdateCapabilities.update_enabled', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7235,
serialized_end=7290,
)
_PRIORITYCAPABILITIES_PRIORITYRANGE = _descriptor.Descriptor(
name='PriorityRange',
full_name='build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='min_priority', full_name='build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange.min_priority', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_priority', full_name='build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange.max_priority', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7406,
serialized_end=7465,
)
_PRIORITYCAPABILITIES = _descriptor.Descriptor(
name='PriorityCapabilities',
full_name='build.bazel.remote.execution.v2.PriorityCapabilities',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='priorities', full_name='build.bazel.remote.execution.v2.PriorityCapabilities.priorities', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_PRIORITYCAPABILITIES_PRIORITYRANGE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7293,
serialized_end=7465,
)
_SYMLINKABSOLUTEPATHSTRATEGY = _descriptor.Descriptor(
name='SymlinkAbsolutePathStrategy',
full_name='build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_SYMLINKABSOLUTEPATHSTRATEGY_VALUE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7467,
serialized_end=7547,
)
_CACHECAPABILITIES = _descriptor.Descriptor(
name='CacheCapabilities',
full_name='build.bazel.remote.execution.v2.CacheCapabilities',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='digest_function', full_name='build.bazel.remote.execution.v2.CacheCapabilities.digest_function', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_cache_update_capabilities', full_name='build.bazel.remote.execution.v2.CacheCapabilities.action_cache_update_capabilities', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cache_priority_capabilities', full_name='build.bazel.remote.execution.v2.CacheCapabilities.cache_priority_capabilities', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_batch_total_size_bytes', full_name='build.bazel.remote.execution.v2.CacheCapabilities.max_batch_total_size_bytes', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='symlink_absolute_path_strategy', full_name='build.bazel.remote.execution.v2.CacheCapabilities.symlink_absolute_path_strategy', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7550,
serialized_end=7991,
)
_EXECUTIONCAPABILITIES = _descriptor.Descriptor(
name='ExecutionCapabilities',
full_name='build.bazel.remote.execution.v2.ExecutionCapabilities',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='digest_function', full_name='build.bazel.remote.execution.v2.ExecutionCapabilities.digest_function', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exec_enabled', full_name='build.bazel.remote.execution.v2.ExecutionCapabilities.exec_enabled', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='execution_priority_capabilities', full_name='build.bazel.remote.execution.v2.ExecutionCapabilities.execution_priority_capabilities', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='supported_node_properties', full_name='build.bazel.remote.execution.v2.ExecutionCapabilities.supported_node_properties', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7994,
serialized_end=8250,
)
_TOOLDETAILS = _descriptor.Descriptor(
name='ToolDetails',
full_name='build.bazel.remote.execution.v2.ToolDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tool_name', full_name='build.bazel.remote.execution.v2.ToolDetails.tool_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tool_version', full_name='build.bazel.remote.execution.v2.ToolDetails.tool_version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8252,
serialized_end=8306,
)
_REQUESTMETADATA = _descriptor.Descriptor(
name='RequestMetadata',
full_name='build.bazel.remote.execution.v2.RequestMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tool_details', full_name='build.bazel.remote.execution.v2.RequestMetadata.tool_details', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='action_id', full_name='build.bazel.remote.execution.v2.RequestMetadata.action_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tool_invocation_id', full_name='build.bazel.remote.execution.v2.RequestMetadata.tool_invocation_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='correlated_invocations_id', full_name='build.bazel.remote.execution.v2.RequestMetadata.correlated_invocations_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8309,
serialized_end=8476,
)
_ACTION.fields_by_name['command_digest'].message_type = _DIGEST
_ACTION.fields_by_name['input_root_digest'].message_type = _DIGEST
_ACTION.fields_by_name['timeout'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_COMMAND_ENVIRONMENTVARIABLE.containing_type = _COMMAND
_COMMAND.fields_by_name['environment_variables'].message_type = _COMMAND_ENVIRONMENTVARIABLE
_COMMAND.fields_by_name['platform'].message_type = _PLATFORM
_PLATFORM_PROPERTY.containing_type = _PLATFORM
_PLATFORM.fields_by_name['properties'].message_type = _PLATFORM_PROPERTY
_DIRECTORY.fields_by_name['files'].message_type = _FILENODE
_DIRECTORY.fields_by_name['directories'].message_type = _DIRECTORYNODE
_DIRECTORY.fields_by_name['symlinks'].message_type = _SYMLINKNODE
_DIRECTORY.fields_by_name['node_properties'].message_type = _NODEPROPERTIES
_NODEPROPERTIES.fields_by_name['properties'].message_type = _NODEPROPERTY
_NODEPROPERTIES.fields_by_name['mtime'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_NODEPROPERTIES.fields_by_name['unix_mode'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE
_FILENODE.fields_by_name['digest'].message_type = _DIGEST
_FILENODE.fields_by_name['node_properties'].message_type = _NODEPROPERTIES
_DIRECTORYNODE.fields_by_name['digest'].message_type = _DIGEST
_SYMLINKNODE.fields_by_name['node_properties'].message_type = _NODEPROPERTIES
_EXECUTEDACTIONMETADATA.fields_by_name['queued_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['worker_start_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['worker_completed_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['input_fetch_start_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['input_fetch_completed_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['execution_start_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['execution_completed_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['output_upload_start_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_EXECUTEDACTIONMETADATA.fields_by_name['output_upload_completed_timestamp'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_ACTIONRESULT.fields_by_name['output_files'].message_type = _OUTPUTFILE
_ACTIONRESULT.fields_by_name['output_file_symlinks'].message_type = _OUTPUTSYMLINK
_ACTIONRESULT.fields_by_name['output_symlinks'].message_type = _OUTPUTSYMLINK
_ACTIONRESULT.fields_by_name['output_directories'].message_type = _OUTPUTDIRECTORY
_ACTIONRESULT.fields_by_name['output_directory_symlinks'].message_type = _OUTPUTSYMLINK
_ACTIONRESULT.fields_by_name['stdout_digest'].message_type = _DIGEST
_ACTIONRESULT.fields_by_name['stderr_digest'].message_type = _DIGEST
_ACTIONRESULT.fields_by_name['execution_metadata'].message_type = _EXECUTEDACTIONMETADATA
_OUTPUTFILE.fields_by_name['digest'].message_type = _DIGEST
_OUTPUTFILE.fields_by_name['node_properties'].message_type = _NODEPROPERTIES
_TREE.fields_by_name['root'].message_type = _DIRECTORY
_TREE.fields_by_name['children'].message_type = _DIRECTORY
_OUTPUTDIRECTORY.fields_by_name['tree_digest'].message_type = _DIGEST
_OUTPUTSYMLINK.fields_by_name['node_properties'].message_type = _NODEPROPERTIES
_EXECUTEREQUEST.fields_by_name['action_digest'].message_type = _DIGEST
_EXECUTEREQUEST.fields_by_name['execution_policy'].message_type = _EXECUTIONPOLICY
_EXECUTEREQUEST.fields_by_name['results_cache_policy'].message_type = _RESULTSCACHEPOLICY
_LOGFILE.fields_by_name['digest'].message_type = _DIGEST
_EXECUTERESPONSE_SERVERLOGSENTRY.fields_by_name['value'].message_type = _LOGFILE
_EXECUTERESPONSE_SERVERLOGSENTRY.containing_type = _EXECUTERESPONSE
_EXECUTERESPONSE.fields_by_name['result'].message_type = _ACTIONRESULT
_EXECUTERESPONSE.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_EXECUTERESPONSE.fields_by_name['server_logs'].message_type = _EXECUTERESPONSE_SERVERLOGSENTRY
_EXECUTIONSTAGE_VALUE.containing_type = _EXECUTIONSTAGE
_EXECUTEOPERATIONMETADATA.fields_by_name['stage'].enum_type = _EXECUTIONSTAGE_VALUE
_EXECUTEOPERATIONMETADATA.fields_by_name['action_digest'].message_type = _DIGEST
_GETACTIONRESULTREQUEST.fields_by_name['action_digest'].message_type = _DIGEST
_UPDATEACTIONRESULTREQUEST.fields_by_name['action_digest'].message_type = _DIGEST
_UPDATEACTIONRESULTREQUEST.fields_by_name['action_result'].message_type = _ACTIONRESULT
_UPDATEACTIONRESULTREQUEST.fields_by_name['results_cache_policy'].message_type = _RESULTSCACHEPOLICY
_FINDMISSINGBLOBSREQUEST.fields_by_name['blob_digests'].message_type = _DIGEST
_FINDMISSINGBLOBSRESPONSE.fields_by_name['missing_blob_digests'].message_type = _DIGEST
_BATCHUPDATEBLOBSREQUEST_REQUEST.fields_by_name['digest'].message_type = _DIGEST
_BATCHUPDATEBLOBSREQUEST_REQUEST.containing_type = _BATCHUPDATEBLOBSREQUEST
_BATCHUPDATEBLOBSREQUEST.fields_by_name['requests'].message_type = _BATCHUPDATEBLOBSREQUEST_REQUEST
_BATCHUPDATEBLOBSRESPONSE_RESPONSE.fields_by_name['digest'].message_type = _DIGEST
_BATCHUPDATEBLOBSRESPONSE_RESPONSE.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_BATCHUPDATEBLOBSRESPONSE_RESPONSE.containing_type = _BATCHUPDATEBLOBSRESPONSE
_BATCHUPDATEBLOBSRESPONSE.fields_by_name['responses'].message_type = _BATCHUPDATEBLOBSRESPONSE_RESPONSE
_BATCHREADBLOBSREQUEST.fields_by_name['digests'].message_type = _DIGEST
_BATCHREADBLOBSRESPONSE_RESPONSE.fields_by_name['digest'].message_type = _DIGEST
_BATCHREADBLOBSRESPONSE_RESPONSE.fields_by_name['status'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_BATCHREADBLOBSRESPONSE_RESPONSE.containing_type = _BATCHREADBLOBSRESPONSE
_BATCHREADBLOBSRESPONSE.fields_by_name['responses'].message_type = _BATCHREADBLOBSRESPONSE_RESPONSE
_GETTREEREQUEST.fields_by_name['root_digest'].message_type = _DIGEST
_GETTREERESPONSE.fields_by_name['directories'].message_type = _DIRECTORY
_SERVERCAPABILITIES.fields_by_name['cache_capabilities'].message_type = _CACHECAPABILITIES
_SERVERCAPABILITIES.fields_by_name['execution_capabilities'].message_type = _EXECUTIONCAPABILITIES
_SERVERCAPABILITIES.fields_by_name['deprecated_api_version'].message_type = build_dot_bazel_dot_semver_dot_semver__pb2._SEMVER
_SERVERCAPABILITIES.fields_by_name['low_api_version'].message_type = build_dot_bazel_dot_semver_dot_semver__pb2._SEMVER
_SERVERCAPABILITIES.fields_by_name['high_api_version'].message_type = build_dot_bazel_dot_semver_dot_semver__pb2._SEMVER
_DIGESTFUNCTION_VALUE.containing_type = _DIGESTFUNCTION
_PRIORITYCAPABILITIES_PRIORITYRANGE.containing_type = _PRIORITYCAPABILITIES
_PRIORITYCAPABILITIES.fields_by_name['priorities'].message_type = _PRIORITYCAPABILITIES_PRIORITYRANGE
_SYMLINKABSOLUTEPATHSTRATEGY_VALUE.containing_type = _SYMLINKABSOLUTEPATHSTRATEGY
_CACHECAPABILITIES.fields_by_name['digest_function'].enum_type = _DIGESTFUNCTION_VALUE
_CACHECAPABILITIES.fields_by_name['action_cache_update_capabilities'].message_type = _ACTIONCACHEUPDATECAPABILITIES
_CACHECAPABILITIES.fields_by_name['cache_priority_capabilities'].message_type = _PRIORITYCAPABILITIES
_CACHECAPABILITIES.fields_by_name['symlink_absolute_path_strategy'].enum_type = _SYMLINKABSOLUTEPATHSTRATEGY_VALUE
_EXECUTIONCAPABILITIES.fields_by_name['digest_function'].enum_type = _DIGESTFUNCTION_VALUE
_EXECUTIONCAPABILITIES.fields_by_name['execution_priority_capabilities'].message_type = _PRIORITYCAPABILITIES
_REQUESTMETADATA.fields_by_name['tool_details'].message_type = _TOOLDETAILS
DESCRIPTOR.message_types_by_name['Action'] = _ACTION
DESCRIPTOR.message_types_by_name['Command'] = _COMMAND
DESCRIPTOR.message_types_by_name['Platform'] = _PLATFORM
DESCRIPTOR.message_types_by_name['Directory'] = _DIRECTORY
DESCRIPTOR.message_types_by_name['NodeProperty'] = _NODEPROPERTY
DESCRIPTOR.message_types_by_name['NodeProperties'] = _NODEPROPERTIES
DESCRIPTOR.message_types_by_name['FileNode'] = _FILENODE
DESCRIPTOR.message_types_by_name['DirectoryNode'] = _DIRECTORYNODE
DESCRIPTOR.message_types_by_name['SymlinkNode'] = _SYMLINKNODE
DESCRIPTOR.message_types_by_name['Digest'] = _DIGEST
DESCRIPTOR.message_types_by_name['ExecutedActionMetadata'] = _EXECUTEDACTIONMETADATA
DESCRIPTOR.message_types_by_name['ActionResult'] = _ACTIONRESULT
DESCRIPTOR.message_types_by_name['OutputFile'] = _OUTPUTFILE
DESCRIPTOR.message_types_by_name['Tree'] = _TREE
DESCRIPTOR.message_types_by_name['OutputDirectory'] = _OUTPUTDIRECTORY
DESCRIPTOR.message_types_by_name['OutputSymlink'] = _OUTPUTSYMLINK
DESCRIPTOR.message_types_by_name['ExecutionPolicy'] = _EXECUTIONPOLICY
DESCRIPTOR.message_types_by_name['ResultsCachePolicy'] = _RESULTSCACHEPOLICY
DESCRIPTOR.message_types_by_name['ExecuteRequest'] = _EXECUTEREQUEST
DESCRIPTOR.message_types_by_name['LogFile'] = _LOGFILE
DESCRIPTOR.message_types_by_name['ExecuteResponse'] = _EXECUTERESPONSE
DESCRIPTOR.message_types_by_name['ExecutionStage'] = _EXECUTIONSTAGE
DESCRIPTOR.message_types_by_name['ExecuteOperationMetadata'] = _EXECUTEOPERATIONMETADATA
DESCRIPTOR.message_types_by_name['WaitExecutionRequest'] = _WAITEXECUTIONREQUEST
DESCRIPTOR.message_types_by_name['GetActionResultRequest'] = _GETACTIONRESULTREQUEST
DESCRIPTOR.message_types_by_name['UpdateActionResultRequest'] = _UPDATEACTIONRESULTREQUEST
DESCRIPTOR.message_types_by_name['FindMissingBlobsRequest'] = _FINDMISSINGBLOBSREQUEST
DESCRIPTOR.message_types_by_name['FindMissingBlobsResponse'] = _FINDMISSINGBLOBSRESPONSE
DESCRIPTOR.message_types_by_name['BatchUpdateBlobsRequest'] = _BATCHUPDATEBLOBSREQUEST
DESCRIPTOR.message_types_by_name['BatchUpdateBlobsResponse'] = _BATCHUPDATEBLOBSRESPONSE
DESCRIPTOR.message_types_by_name['BatchReadBlobsRequest'] = _BATCHREADBLOBSREQUEST
DESCRIPTOR.message_types_by_name['BatchReadBlobsResponse'] = _BATCHREADBLOBSRESPONSE
DESCRIPTOR.message_types_by_name['GetTreeRequest'] = _GETTREEREQUEST
DESCRIPTOR.message_types_by_name['GetTreeResponse'] = _GETTREERESPONSE
DESCRIPTOR.message_types_by_name['GetCapabilitiesRequest'] = _GETCAPABILITIESREQUEST
DESCRIPTOR.message_types_by_name['ServerCapabilities'] = _SERVERCAPABILITIES
DESCRIPTOR.message_types_by_name['DigestFunction'] = _DIGESTFUNCTION
DESCRIPTOR.message_types_by_name['ActionCacheUpdateCapabilities'] = _ACTIONCACHEUPDATECAPABILITIES
DESCRIPTOR.message_types_by_name['PriorityCapabilities'] = _PRIORITYCAPABILITIES
DESCRIPTOR.message_types_by_name['SymlinkAbsolutePathStrategy'] = _SYMLINKABSOLUTEPATHSTRATEGY
DESCRIPTOR.message_types_by_name['CacheCapabilities'] = _CACHECAPABILITIES
DESCRIPTOR.message_types_by_name['ExecutionCapabilities'] = _EXECUTIONCAPABILITIES
DESCRIPTOR.message_types_by_name['ToolDetails'] = _TOOLDETAILS
DESCRIPTOR.message_types_by_name['RequestMetadata'] = _REQUESTMETADATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Action = _reflection.GeneratedProtocolMessageType('Action', (_message.Message,), {
'DESCRIPTOR' : _ACTION,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Action)
})
_sym_db.RegisterMessage(Action)
Command = _reflection.GeneratedProtocolMessageType('Command', (_message.Message,), {
'EnvironmentVariable' : _reflection.GeneratedProtocolMessageType('EnvironmentVariable', (_message.Message,), {
'DESCRIPTOR' : _COMMAND_ENVIRONMENTVARIABLE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Command.EnvironmentVariable)
})
,
'DESCRIPTOR' : _COMMAND,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Command)
})
_sym_db.RegisterMessage(Command)
_sym_db.RegisterMessage(Command.EnvironmentVariable)
Platform = _reflection.GeneratedProtocolMessageType('Platform', (_message.Message,), {
'Property' : _reflection.GeneratedProtocolMessageType('Property', (_message.Message,), {
'DESCRIPTOR' : _PLATFORM_PROPERTY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Platform.Property)
})
,
'DESCRIPTOR' : _PLATFORM,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Platform)
})
_sym_db.RegisterMessage(Platform)
_sym_db.RegisterMessage(Platform.Property)
Directory = _reflection.GeneratedProtocolMessageType('Directory', (_message.Message,), {
'DESCRIPTOR' : _DIRECTORY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Directory)
})
_sym_db.RegisterMessage(Directory)
NodeProperty = _reflection.GeneratedProtocolMessageType('NodeProperty', (_message.Message,), {
'DESCRIPTOR' : _NODEPROPERTY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.NodeProperty)
})
_sym_db.RegisterMessage(NodeProperty)
NodeProperties = _reflection.GeneratedProtocolMessageType('NodeProperties', (_message.Message,), {
'DESCRIPTOR' : _NODEPROPERTIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.NodeProperties)
})
_sym_db.RegisterMessage(NodeProperties)
FileNode = _reflection.GeneratedProtocolMessageType('FileNode', (_message.Message,), {
'DESCRIPTOR' : _FILENODE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.FileNode)
})
_sym_db.RegisterMessage(FileNode)
DirectoryNode = _reflection.GeneratedProtocolMessageType('DirectoryNode', (_message.Message,), {
'DESCRIPTOR' : _DIRECTORYNODE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.DirectoryNode)
})
_sym_db.RegisterMessage(DirectoryNode)
SymlinkNode = _reflection.GeneratedProtocolMessageType('SymlinkNode', (_message.Message,), {
'DESCRIPTOR' : _SYMLINKNODE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.SymlinkNode)
})
_sym_db.RegisterMessage(SymlinkNode)
Digest = _reflection.GeneratedProtocolMessageType('Digest', (_message.Message,), {
'DESCRIPTOR' : _DIGEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Digest)
})
_sym_db.RegisterMessage(Digest)
ExecutedActionMetadata = _reflection.GeneratedProtocolMessageType('ExecutedActionMetadata', (_message.Message,), {
'DESCRIPTOR' : _EXECUTEDACTIONMETADATA,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecutedActionMetadata)
})
_sym_db.RegisterMessage(ExecutedActionMetadata)
ActionResult = _reflection.GeneratedProtocolMessageType('ActionResult', (_message.Message,), {
'DESCRIPTOR' : _ACTIONRESULT,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ActionResult)
})
_sym_db.RegisterMessage(ActionResult)
OutputFile = _reflection.GeneratedProtocolMessageType('OutputFile', (_message.Message,), {
'DESCRIPTOR' : _OUTPUTFILE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.OutputFile)
})
_sym_db.RegisterMessage(OutputFile)
Tree = _reflection.GeneratedProtocolMessageType('Tree', (_message.Message,), {
'DESCRIPTOR' : _TREE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.Tree)
})
_sym_db.RegisterMessage(Tree)
OutputDirectory = _reflection.GeneratedProtocolMessageType('OutputDirectory', (_message.Message,), {
'DESCRIPTOR' : _OUTPUTDIRECTORY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.OutputDirectory)
})
_sym_db.RegisterMessage(OutputDirectory)
OutputSymlink = _reflection.GeneratedProtocolMessageType('OutputSymlink', (_message.Message,), {
'DESCRIPTOR' : _OUTPUTSYMLINK,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.OutputSymlink)
})
_sym_db.RegisterMessage(OutputSymlink)
ExecutionPolicy = _reflection.GeneratedProtocolMessageType('ExecutionPolicy', (_message.Message,), {
'DESCRIPTOR' : _EXECUTIONPOLICY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecutionPolicy)
})
_sym_db.RegisterMessage(ExecutionPolicy)
ResultsCachePolicy = _reflection.GeneratedProtocolMessageType('ResultsCachePolicy', (_message.Message,), {
'DESCRIPTOR' : _RESULTSCACHEPOLICY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ResultsCachePolicy)
})
_sym_db.RegisterMessage(ResultsCachePolicy)
ExecuteRequest = _reflection.GeneratedProtocolMessageType('ExecuteRequest', (_message.Message,), {
'DESCRIPTOR' : _EXECUTEREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecuteRequest)
})
_sym_db.RegisterMessage(ExecuteRequest)
LogFile = _reflection.GeneratedProtocolMessageType('LogFile', (_message.Message,), {
'DESCRIPTOR' : _LOGFILE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.LogFile)
})
_sym_db.RegisterMessage(LogFile)
ExecuteResponse = _reflection.GeneratedProtocolMessageType('ExecuteResponse', (_message.Message,), {
'ServerLogsEntry' : _reflection.GeneratedProtocolMessageType('ServerLogsEntry', (_message.Message,), {
'DESCRIPTOR' : _EXECUTERESPONSE_SERVERLOGSENTRY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecuteResponse.ServerLogsEntry)
})
,
'DESCRIPTOR' : _EXECUTERESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecuteResponse)
})
_sym_db.RegisterMessage(ExecuteResponse)
_sym_db.RegisterMessage(ExecuteResponse.ServerLogsEntry)
ExecutionStage = _reflection.GeneratedProtocolMessageType('ExecutionStage', (_message.Message,), {
'DESCRIPTOR' : _EXECUTIONSTAGE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecutionStage)
})
_sym_db.RegisterMessage(ExecutionStage)
ExecuteOperationMetadata = _reflection.GeneratedProtocolMessageType('ExecuteOperationMetadata', (_message.Message,), {
'DESCRIPTOR' : _EXECUTEOPERATIONMETADATA,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecuteOperationMetadata)
})
_sym_db.RegisterMessage(ExecuteOperationMetadata)
WaitExecutionRequest = _reflection.GeneratedProtocolMessageType('WaitExecutionRequest', (_message.Message,), {
'DESCRIPTOR' : _WAITEXECUTIONREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.WaitExecutionRequest)
})
_sym_db.RegisterMessage(WaitExecutionRequest)
GetActionResultRequest = _reflection.GeneratedProtocolMessageType('GetActionResultRequest', (_message.Message,), {
'DESCRIPTOR' : _GETACTIONRESULTREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.GetActionResultRequest)
})
_sym_db.RegisterMessage(GetActionResultRequest)
UpdateActionResultRequest = _reflection.GeneratedProtocolMessageType('UpdateActionResultRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEACTIONRESULTREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.UpdateActionResultRequest)
})
_sym_db.RegisterMessage(UpdateActionResultRequest)
FindMissingBlobsRequest = _reflection.GeneratedProtocolMessageType('FindMissingBlobsRequest', (_message.Message,), {
'DESCRIPTOR' : _FINDMISSINGBLOBSREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.FindMissingBlobsRequest)
})
_sym_db.RegisterMessage(FindMissingBlobsRequest)
FindMissingBlobsResponse = _reflection.GeneratedProtocolMessageType('FindMissingBlobsResponse', (_message.Message,), {
'DESCRIPTOR' : _FINDMISSINGBLOBSRESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.FindMissingBlobsResponse)
})
_sym_db.RegisterMessage(FindMissingBlobsResponse)
BatchUpdateBlobsRequest = _reflection.GeneratedProtocolMessageType('BatchUpdateBlobsRequest', (_message.Message,), {
'Request' : _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), {
'DESCRIPTOR' : _BATCHUPDATEBLOBSREQUEST_REQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchUpdateBlobsRequest.Request)
})
,
'DESCRIPTOR' : _BATCHUPDATEBLOBSREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchUpdateBlobsRequest)
})
_sym_db.RegisterMessage(BatchUpdateBlobsRequest)
_sym_db.RegisterMessage(BatchUpdateBlobsRequest.Request)
BatchUpdateBlobsResponse = _reflection.GeneratedProtocolMessageType('BatchUpdateBlobsResponse', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _BATCHUPDATEBLOBSRESPONSE_RESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchUpdateBlobsResponse.Response)
})
,
'DESCRIPTOR' : _BATCHUPDATEBLOBSRESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchUpdateBlobsResponse)
})
_sym_db.RegisterMessage(BatchUpdateBlobsResponse)
_sym_db.RegisterMessage(BatchUpdateBlobsResponse.Response)
BatchReadBlobsRequest = _reflection.GeneratedProtocolMessageType('BatchReadBlobsRequest', (_message.Message,), {
'DESCRIPTOR' : _BATCHREADBLOBSREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchReadBlobsRequest)
})
_sym_db.RegisterMessage(BatchReadBlobsRequest)
BatchReadBlobsResponse = _reflection.GeneratedProtocolMessageType('BatchReadBlobsResponse', (_message.Message,), {
'Response' : _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _BATCHREADBLOBSRESPONSE_RESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchReadBlobsResponse.Response)
})
,
'DESCRIPTOR' : _BATCHREADBLOBSRESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.BatchReadBlobsResponse)
})
_sym_db.RegisterMessage(BatchReadBlobsResponse)
_sym_db.RegisterMessage(BatchReadBlobsResponse.Response)
GetTreeRequest = _reflection.GeneratedProtocolMessageType('GetTreeRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTREEREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.GetTreeRequest)
})
_sym_db.RegisterMessage(GetTreeRequest)
GetTreeResponse = _reflection.GeneratedProtocolMessageType('GetTreeResponse', (_message.Message,), {
'DESCRIPTOR' : _GETTREERESPONSE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.GetTreeResponse)
})
_sym_db.RegisterMessage(GetTreeResponse)
GetCapabilitiesRequest = _reflection.GeneratedProtocolMessageType('GetCapabilitiesRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCAPABILITIESREQUEST,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.GetCapabilitiesRequest)
})
_sym_db.RegisterMessage(GetCapabilitiesRequest)
ServerCapabilities = _reflection.GeneratedProtocolMessageType('ServerCapabilities', (_message.Message,), {
'DESCRIPTOR' : _SERVERCAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ServerCapabilities)
})
_sym_db.RegisterMessage(ServerCapabilities)
DigestFunction = _reflection.GeneratedProtocolMessageType('DigestFunction', (_message.Message,), {
'DESCRIPTOR' : _DIGESTFUNCTION,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.DigestFunction)
})
_sym_db.RegisterMessage(DigestFunction)
ActionCacheUpdateCapabilities = _reflection.GeneratedProtocolMessageType('ActionCacheUpdateCapabilities', (_message.Message,), {
'DESCRIPTOR' : _ACTIONCACHEUPDATECAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ActionCacheUpdateCapabilities)
})
_sym_db.RegisterMessage(ActionCacheUpdateCapabilities)
PriorityCapabilities = _reflection.GeneratedProtocolMessageType('PriorityCapabilities', (_message.Message,), {
'PriorityRange' : _reflection.GeneratedProtocolMessageType('PriorityRange', (_message.Message,), {
'DESCRIPTOR' : _PRIORITYCAPABILITIES_PRIORITYRANGE,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.PriorityCapabilities.PriorityRange)
})
,
'DESCRIPTOR' : _PRIORITYCAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.PriorityCapabilities)
})
_sym_db.RegisterMessage(PriorityCapabilities)
_sym_db.RegisterMessage(PriorityCapabilities.PriorityRange)
SymlinkAbsolutePathStrategy = _reflection.GeneratedProtocolMessageType('SymlinkAbsolutePathStrategy', (_message.Message,), {
'DESCRIPTOR' : _SYMLINKABSOLUTEPATHSTRATEGY,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.SymlinkAbsolutePathStrategy)
})
_sym_db.RegisterMessage(SymlinkAbsolutePathStrategy)
CacheCapabilities = _reflection.GeneratedProtocolMessageType('CacheCapabilities', (_message.Message,), {
'DESCRIPTOR' : _CACHECAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.CacheCapabilities)
})
_sym_db.RegisterMessage(CacheCapabilities)
ExecutionCapabilities = _reflection.GeneratedProtocolMessageType('ExecutionCapabilities', (_message.Message,), {
'DESCRIPTOR' : _EXECUTIONCAPABILITIES,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ExecutionCapabilities)
})
_sym_db.RegisterMessage(ExecutionCapabilities)
ToolDetails = _reflection.GeneratedProtocolMessageType('ToolDetails', (_message.Message,), {
'DESCRIPTOR' : _TOOLDETAILS,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.ToolDetails)
})
_sym_db.RegisterMessage(ToolDetails)
RequestMetadata = _reflection.GeneratedProtocolMessageType('RequestMetadata', (_message.Message,), {
'DESCRIPTOR' : _REQUESTMETADATA,
'__module__' : 'build.bazel.remote.execution.v2.remote_execution_pb2'
# @@protoc_insertion_point(class_scope:build.bazel.remote.execution.v2.RequestMetadata)
})
_sym_db.RegisterMessage(RequestMetadata)
DESCRIPTOR._options = None
_EXECUTERESPONSE_SERVERLOGSENTRY._options = None
_EXECUTION = _descriptor.ServiceDescriptor(
name='Execution',
full_name='build.bazel.remote.execution.v2.Execution',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=8479,
serialized_end=8792,
methods=[
_descriptor.MethodDescriptor(
name='Execute',
full_name='build.bazel.remote.execution.v2.Execution.Execute',
index=0,
containing_service=None,
input_type=_EXECUTEREQUEST,
output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
serialized_options=b'\202\323\344\223\002+\"&/v2/{instance_name=**}/actions:execute:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='WaitExecution',
full_name='build.bazel.remote.execution.v2.Execution.WaitExecution',
index=1,
containing_service=None,
input_type=_WAITEXECUTIONREQUEST,
output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
serialized_options=b'\202\323\344\223\002+\"&/v2/{name=operations/**}:waitExecution:\001*',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_EXECUTION)
DESCRIPTOR.services_by_name['Execution'] = _EXECUTION
_ACTIONCACHE = _descriptor.ServiceDescriptor(
name='ActionCache',
full_name='build.bazel.remote.execution.v2.ActionCache',
file=DESCRIPTOR,
index=1,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=8795,
serialized_end=9265,
methods=[
_descriptor.MethodDescriptor(
name='GetActionResult',
full_name='build.bazel.remote.execution.v2.ActionCache.GetActionResult',
index=0,
containing_service=None,
input_type=_GETACTIONRESULTREQUEST,
output_type=_ACTIONRESULT,
serialized_options=b'\202\323\344\223\002V\022T/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateActionResult',
full_name='build.bazel.remote.execution.v2.ActionCache.UpdateActionResult',
index=1,
containing_service=None,
input_type=_UPDATEACTIONRESULTREQUEST,
output_type=_ACTIONRESULT,
serialized_options=b'\202\323\344\223\002e\032T/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}:\raction_result',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_ACTIONCACHE)
DESCRIPTOR.services_by_name['ActionCache'] = _ACTIONCACHE
_CONTENTADDRESSABLESTORAGE = _descriptor.ServiceDescriptor(
name='ContentAddressableStorage',
full_name='build.bazel.remote.execution.v2.ContentAddressableStorage',
file=DESCRIPTOR,
index=2,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=9268,
serialized_end=10063,
methods=[
_descriptor.MethodDescriptor(
name='FindMissingBlobs',
full_name='build.bazel.remote.execution.v2.ContentAddressableStorage.FindMissingBlobs',
index=0,
containing_service=None,
input_type=_FINDMISSINGBLOBSREQUEST,
output_type=_FINDMISSINGBLOBSRESPONSE,
serialized_options=b'\202\323\344\223\002-\"(/v2/{instance_name=**}/blobs:findMissing:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='BatchUpdateBlobs',
full_name='build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs',
index=1,
containing_service=None,
input_type=_BATCHUPDATEBLOBSREQUEST,
output_type=_BATCHUPDATEBLOBSRESPONSE,
serialized_options=b'\202\323\344\223\002-\"(/v2/{instance_name=**}/blobs:batchUpdate:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='BatchReadBlobs',
full_name='build.bazel.remote.execution.v2.ContentAddressableStorage.BatchReadBlobs',
index=2,
containing_service=None,
input_type=_BATCHREADBLOBSREQUEST,
output_type=_BATCHREADBLOBSRESPONSE,
serialized_options=b'\202\323\344\223\002+\"&/v2/{instance_name=**}/blobs:batchRead:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetTree',
full_name='build.bazel.remote.execution.v2.ContentAddressableStorage.GetTree',
index=3,
containing_service=None,
input_type=_GETTREEREQUEST,
output_type=_GETTREERESPONSE,
serialized_options=b'\202\323\344\223\002R\022P/v2/{instance_name=**}/blobs/{root_digest.hash}/{root_digest.size_bytes}:getTree',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_CONTENTADDRESSABLESTORAGE)
DESCRIPTOR.services_by_name['ContentAddressableStorage'] = _CONTENTADDRESSABLESTORAGE
_CAPABILITIES = _descriptor.ServiceDescriptor(
name='Capabilities',
full_name='build.bazel.remote.execution.v2.Capabilities',
file=DESCRIPTOR,
index=3,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=10066,
serialized_end=10255,
methods=[
_descriptor.MethodDescriptor(
name='GetCapabilities',
full_name='build.bazel.remote.execution.v2.Capabilities.GetCapabilities',
index=0,
containing_service=None,
input_type=_GETCAPABILITIESREQUEST,
output_type=_SERVERCAPABILITIES,
serialized_options=b'\202\323\344\223\002%\022#/v2/{instance_name=**}/capabilities',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_CAPABILITIES)
DESCRIPTOR.services_by_name['Capabilities'] = _CAPABILITIES
# @@protoc_insertion_point(module_scope)
| 47.329907
| 15,290
| 0.774729
| 18,068
| 147,338
| 5.996126
| 0.039628
| 0.03766
| 0.074397
| 0.081227
| 0.780123
| 0.749082
| 0.728129
| 0.714052
| 0.654701
| 0.610921
| 0
| 0.037448
| 0.104657
| 147,338
| 3,112
| 15,291
| 47.345116
| 0.783805
| 0.032415
| 0
| 0.695908
| 1
| 0.010749
| 0.230448
| 0.191914
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003814
| 0
| 0.003814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
63f299db707516b42cb72cbfc473356cfc799bd9
| 9,182
|
py
|
Python
|
tests/test_background_loader.py
|
pjeanjean/dakara-player
|
0251f42ab86a3ae8fdfc2bb61d156527807dedf2
|
[
"MIT"
] | null | null | null |
tests/test_background_loader.py
|
pjeanjean/dakara-player
|
0251f42ab86a3ae8fdfc2bb61d156527807dedf2
|
[
"MIT"
] | null | null | null |
tests/test_background_loader.py
|
pjeanjean/dakara-player
|
0251f42ab86a3ae8fdfc2bb61d156527807dedf2
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from unittest.mock import call, patch
from path import Path
from dakara_player_vlc.background_loader import (
BackgroundLoader,
BackgroundNotFoundError,
)
class BackgroundLoaderTestCase(TestCase):
"""Test the loader for backgrounds
"""
@patch(
"dakara_player_vlc.background_loader.exists", return_value=True, autospec=True
)
def test_load_default_name_default_directory(self, mocked_exists):
"""Test to load one default background from defauld directory
"""
# create the instance
loader = BackgroundLoader(
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds,
{"background": Path("default/background.png").normpath()},
)
# assert the call of the mocked method
mocked_exists.assert_called_with(Path("default/background.png").normpath())
@patch(
"dakara_player_vlc.background_loader.exists", return_value=True, autospec=True
)
def test_load_default_name_custom_directory(self, mocked_exists):
"""Test to load one default background from custom directory
"""
# create the instance
loader = BackgroundLoader(
directory=Path("custom"),
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds, {"background": Path("custom/background.png").normpath()}
)
# assert the call of the mocked method
mocked_exists.assert_called_with(Path("custom/background.png").normpath())
@patch(
"dakara_player_vlc.background_loader.exists", return_value=True, autospec=True
)
def test_load_custom_name_custom_directory(self, mocked_exists):
"""Test to load one custom background from custom directory
"""
# create the instance
loader = BackgroundLoader(
directory=Path("custom"),
background_filenames={"background": "custom.png"},
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds, {"background": Path("custom/custom.png").normpath()}
)
# assert the call of the mocked method
mocked_exists.assert_called_with(Path("custom/custom.png").normpath())
@patch(
"dakara_player_vlc.background_loader.exists", return_value=True, autospec=True
)
def test_load_custom_name_default_directory(self, mocked_exists):
"""Test to load one custom background from default directory
Should load default background from default directory.
"""
# create the instance
loader = BackgroundLoader(
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
background_filenames={"background": "other.png"},
)
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds,
{"background": Path("default/background.png").normpath()},
)
# assert the call of the mocked method
mocked_exists.assert_called_with(Path("default/background.png").normpath())
@patch("dakara_player_vlc.background_loader.exists", autospec=True)
def test_load_fallback_default_name_custom_directory(self, mocked_exists):
"""Test to fallback to load one default background from custom directory
Was initially trying to load one custom background from custom directory.
"""
# create the instance
loader = BackgroundLoader(
directory=Path("custom"),
background_filenames={"background": "custom.png"},
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# setup mock
mocked_exists.side_effect = [False, True]
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds, {"background": Path("custom/background.png").normpath()}
)
# assert the call of the mocked method
mocked_exists.assert_has_calls(
[
call(Path("custom/custom.png").normpath()),
call(Path("custom/background.png").normpath()),
]
)
@patch("dakara_player_vlc.background_loader.exists", autospec=True)
def test_load_fallback_default_name_default_directory(self, mocked_exists):
"""Test to fallback to load one default background from default directory
Was initially trying to load one custom background from custom directory.
"""
# create the instance
loader = BackgroundLoader(
directory=Path("custom"),
background_filenames={"background": "custom.png"},
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# setup mock
mocked_exists.side_effect = [False, False, True]
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds,
{"background": Path("default/background.png").normpath()},
)
# assert the call of the mocked method
mocked_exists.assert_has_calls(
[
call(Path("custom/custom.png").normpath()),
call(Path("custom/background.png").normpath()),
call(Path("default/background.png").normpath()),
]
)
@patch("dakara_player_vlc.background_loader.exists", autospec=True)
def test_load_error(self, mocked_exists):
"""Test to load one unexisting background
Was initially trying to load one custom background from custom directory.
"""
# create the instance
loader = BackgroundLoader(
directory=Path("custom"),
background_filenames={"background": "custom.png"},
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# setup mock
mocked_exists.side_effect = [False, False, False]
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
with self.assertRaises(BackgroundNotFoundError) as error:
loader.load()
# assert the error
self.assertEqual(
str(error.exception), "Unable to find a background file for background"
)
# assert the backgrounds
self.assertDictEqual(loader.backgrounds, {})
# assert the call of the mocked method
mocked_exists.assert_has_calls(
[
call(Path("custom/custom.png").normpath()),
call(Path("custom/background.png").normpath()),
call(Path("default/background.png").normpath()),
]
)
@patch(
"dakara_player_vlc.background_loader.exists", return_value=True, autospec=True
)
def test_load_none_filename(self, mocked_exists):
"""Test to load a None custom filename
"""
loader = BackgroundLoader(
directory=Path("custom"),
background_filenames={"background": None},
default_directory=Path("default"),
default_background_filenames={"background": "background.png"},
)
# pre assert that there are no backgrounds
self.assertDictEqual(loader.backgrounds, {})
# load the backgrounds
loader.load()
# assert the backgrounds
self.assertDictEqual(
loader.backgrounds, {"background": Path("custom/background.png").normpath()}
)
# assert the call of the mocked method
mocked_exists.assert_called_with(Path("custom/background.png").normpath())
| 34.133829
| 88
| 0.629928
| 913
| 9,182
| 6.185104
| 0.085433
| 0.052948
| 0.085001
| 0.102001
| 0.917478
| 0.903666
| 0.899061
| 0.884009
| 0.870551
| 0.868249
| 0
| 0
| 0.272707
| 9,182
| 268
| 89
| 34.261194
| 0.845612
| 0.214332
| 0
| 0.629139
| 0
| 0
| 0.177529
| 0.093228
| 0
| 0
| 0
| 0
| 0.172185
| 1
| 0.05298
| false
| 0
| 0.02649
| 0
| 0.086093
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
63fed1e95095df19d7226e31196eefc5e4b8f627
| 192
|
py
|
Python
|
flixed_django/flixedREST/admin.py
|
nilesh1168/flixed-movie-tracker
|
1ca1c9c74731596e386da001d393230fb86045af
|
[
"MIT"
] | null | null | null |
flixed_django/flixedREST/admin.py
|
nilesh1168/flixed-movie-tracker
|
1ca1c9c74731596e386da001d393230fb86045af
|
[
"MIT"
] | null | null | null |
flixed_django/flixedREST/admin.py
|
nilesh1168/flixed-movie-tracker
|
1ca1c9c74731596e386da001d393230fb86045af
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import WatchList
from .models import WatchedMovie
admin.site.register(WatchList)
admin.site.register(WatchedMovie)
| 21.333333
| 33
| 0.822917
| 25
| 192
| 6.32
| 0.48
| 0.126582
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109375
| 192
| 8
| 34
| 24
| 0.923977
| 0.135417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
124e3939cb7bb6bb93fa8b02b9751c3c18e7de31
| 234
|
py
|
Python
|
tools/update.py
|
HadesD/LaraCC
|
cb4e52bb87c777042e68e54a52be73a1a1e3abbd
|
[
"MIT"
] | null | null | null |
tools/update.py
|
HadesD/LaraCC
|
cb4e52bb87c777042e68e54a52be73a1a1e3abbd
|
[
"MIT"
] | null | null | null |
tools/update.py
|
HadesD/LaraCC
|
cb4e52bb87c777042e68e54a52be73a1a1e3abbd
|
[
"MIT"
] | null | null | null |
import os
import subprocess
subprocess.call([
'git'
, 'submodule'
, 'foreach'
, 'git'
, 'checkout'
, 'master'
])
subprocess.call([
'git'
, 'submodule'
, 'foreach'
, 'git'
, 'pull'
, 'origin'
, 'master'
])
| 10.173913
| 17
| 0.538462
| 21
| 234
| 6
| 0.52381
| 0.222222
| 0.269841
| 0.412698
| 0.571429
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0.260684
| 234
| 22
| 18
| 10.636364
| 0.728324
| 0
| 0
| 0.736842
| 0
| 0
| 0.317597
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.105263
| 0
| 0.105263
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d6212ab5ad1d4d22a48ee2e0feb4f8d830252b93
| 803
|
py
|
Python
|
services/docService/app/models.py
|
anaquin135/modularCPQ
|
af8575a407813c6ef3e3c0ca3258266f0bc6a4e7
|
[
"MIT"
] | null | null | null |
services/docService/app/models.py
|
anaquin135/modularCPQ
|
af8575a407813c6ef3e3c0ca3258266f0bc6a4e7
|
[
"MIT"
] | null | null | null |
services/docService/app/models.py
|
anaquin135/modularCPQ
|
af8575a407813c6ef3e3c0ca3258266f0bc6a4e7
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from app import db
class DOCUMENT(db.Model):
id = db.Column(db.Integer, nullable=False, primary_key=True)
description = db.Column(db.String(300), nullable=False, default='Missing Description')
isActive = db.Column(db.Boolean, nullable=False, default=True)
template = db.Column(db.String(300), nullable=False)
class TERMS(db.Model):
id = db.Column(db.Integer, nullable=False, primary_key=True)
description = db.Column(db.String(300), nullable=False, default='Missing Description')
isActive = db.Column(db.Boolean, nullable=False, default=True)
template = db.Column(db.String(300), nullable=False)
enabledPNs = db.Column(db.Text, nullable=False, default='ALL')
| 50.1875
| 95
| 0.663761
| 101
| 803
| 5.257426
| 0.277228
| 0.135593
| 0.169492
| 0.120527
| 0.79096
| 0.79096
| 0.79096
| 0.79096
| 0.79096
| 0.79096
| 0
| 0.018987
| 0.212951
| 803
| 15
| 96
| 53.533333
| 0.821203
| 0
| 0
| 0.615385
| 0
| 0
| 0.051059
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
d62f24fb23f92f07d9930e9a9e5cfad8f3404a18
| 1,040
|
py
|
Python
|
units/volume/u_s_tablespoons.py
|
putridparrot/PyUnits
|
4f1095c6fc0bee6ba936921c391913dbefd9307c
|
[
"MIT"
] | null | null | null |
units/volume/u_s_tablespoons.py
|
putridparrot/PyUnits
|
4f1095c6fc0bee6ba936921c391913dbefd9307c
|
[
"MIT"
] | null | null | null |
units/volume/u_s_tablespoons.py
|
putridparrot/PyUnits
|
4f1095c6fc0bee6ba936921c391913dbefd9307c
|
[
"MIT"
] | null | null | null |
# <auto-generated>
# This code was generated by the UnitCodeGenerator tool
#
# Changes to this file will be lost if the code is regenerated
# </auto-generated>
def to_millilitres(value):
return value * 14.786764781249998848
def to_litres(value):
return value * 0.014786764781249998848
def to_kilolitres(value):
return value * 0.000014786764781249998
def to_teaspoons(value):
return value * 2.4980215213991718912
def to_tablespoons(value):
return value * 0.83267384046639071232
def to_quarts(value):
return value * 0.013010528757287354368
def to_pints(value):
return value * 0.026021057514574708736
def to_gallons(value):
return value * 0.003252632189321838592
def to_fluid_ounces(value):
return value * 0.52042115029149417472
def to_u_s_teaspoons(value):
return value * 3.0
def to_u_s_quarts(value):
return value / 64.0
def to_u_s_pints(value):
return value / 32.0
def to_u_s_gallons(value):
return value / 256.0
def to_u_s_fluid_ounces(value):
return value / 2.0
def to_u_s_cups(value):
return value / 16.0
| 27.368421
| 62
| 0.773077
| 158
| 1,040
| 4.905063
| 0.322785
| 0.096774
| 0.309677
| 0.153548
| 0.12129
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234568
| 0.143269
| 1,040
| 37
| 63
| 28.108108
| 0.635241
| 0.143269
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d638abe9246e84b142c0c7b3b89be6f19d7682be
| 2,310
|
py
|
Python
|
epytope/Data/pssms/smm/mat/A_02_03_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 7
|
2021-02-01T18:11:28.000Z
|
2022-01-31T19:14:07.000Z
|
epytope/Data/pssms/smm/mat/A_02_03_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 22
|
2021-01-02T15:25:23.000Z
|
2022-03-14T11:32:53.000Z
|
epytope/Data/pssms/smm/mat/A_02_03_9.py
|
christopher-mohr/epytope
|
8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd
|
[
"BSD-3-Clause"
] | 4
|
2021-05-28T08:50:38.000Z
|
2022-03-14T11:45:32.000Z
|
A_02_03_9 = {0: {'A': -0.168, 'C': -0.059, 'E': 0.895, 'D': 0.802, 'G': -0.046, 'F': -0.763, 'I': -0.133, 'H': 0.193, 'K': -0.118, 'M': -0.385, 'L': -0.198, 'N': 0.0, 'Q': 0.192, 'P': 0.615, 'S': -0.102, 'R': -0.076, 'T': 0.081, 'W': -0.063, 'V': -0.097, 'Y': -0.569}, 1: {'A': -0.131, 'C': 0.625, 'E': 0.554, 'D': 0.46, 'G': 0.435, 'F': -0.038, 'I': -0.889, 'H': 0.755, 'K': 0.371, 'M': -1.607, 'L': -1.559, 'N': 0.689, 'Q': -0.636, 'P': 0.84, 'S': -0.111, 'R': 0.506, 'T': -0.403, 'W': 0.407, 'V': -0.636, 'Y': 0.37}, 2: {'A': -0.384, 'C': 0.111, 'E': 0.728, 'D': 0.492, 'G': 0.288, 'F': -0.266, 'I': -0.301, 'H': -0.172, 'K': -0.008, 'M': -0.545, 'L': 0.246, 'N': -0.324, 'Q': 0.129, 'P': 0.056, 'S': -0.178, 'R': 0.102, 'T': 0.226, 'W': 0.321, 'V': -0.083, 'Y': -0.439}, 3: {'A': -0.09, 'C': -0.124, 'E': -0.19, 'D': -0.263, 'G': -0.031, 'F': -0.051, 'I': 0.129, 'H': -0.012, 'K': 0.111, 'M': -0.015, 'L': 0.166, 'N': -0.002, 'Q': 0.172, 'P': -0.059, 'S': -0.083, 'R': 0.124, 'T': 0.032, 'W': 0.031, 'V': 0.188, 'Y': -0.032}, 4: {'A': 0.017, 'C': 0.053, 'E': 0.109, 'D': 0.065, 'G': -0.065, 'F': -0.031, 'I': -0.07, 'H': -0.241, 'K': -0.003, 'M': 0.0, 'L': 0.036, 'N': 0.054, 'Q': 0.092, 'P': 0.226, 'S': -0.029, 'R': 0.056, 'T': 0.088, 'W': -0.168, 'V': -0.072, 'Y': -0.117}, 5: {'A': 0.044, 'C': 0.041, 'E': 0.349, 'D': 0.357, 'G': 0.065, 'F': -0.133, 'I': -0.341, 'H': 0.003, 'K': 0.456, 'M': -0.269, 'L': -0.431, 'N': 0.199, 'Q': 0.128, 'P': -0.105, 'S': -0.109, 'R': 0.278, 'T': -0.13, 'W': 0.128, 'V': -0.396, 'Y': -0.137}, 6: {'A': -0.227, 'C': 0.029, 'E': 0.176, 'D': 0.205, 'G': -0.114, 'F': 0.047, 'I': 0.055, 'H': -0.195, 'K': 0.28, 'M': -0.152, 'L': 0.053, 'N': 0.001, 'Q': 0.047, 'P': 0.081, 'S': -0.274, 'R': 0.258, 'T': -0.279, 'W': 0.021, 'V': 0.045, 'Y': -0.057}, 7: {'A': -0.099, 'C': 0.197, 'E': 0.003, 'D': 0.3, 'G': -0.083, 'F': -0.111, 'I': 0.118, 'H': -0.088, 'K': 0.111, 'M': 0.1, 'L': -0.061, 'N': -0.064, 'Q': 0.056, 'P': -0.178, 'S': -0.137, 'R': 0.068, 'T': -0.018, 'W': -0.128, 'V': 0.174, 'Y': -0.16}, 8: {'A': -0.935, 'C': 0.387, 'E': 0.429, 'D': 0.059, 'G': -0.146, 'F': 0.318, 'I': -0.925, 'H': 0.795, 'K': 0.975, 'M': -0.255, 'L': -0.793, 'N': -0.19, 'Q': 0.783, 'P': -0.302, 'S': 0.173, 'R': 0.497, 'T': -0.292, 'W': 0.293, 'V': -1.498, 'Y': 0.628}, -1: {'con': 4.65395}}
| 2,310
| 2,310
| 0.393939
| 557
| 2,310
| 1.628366
| 0.310592
| 0.019846
| 0.011025
| 0.01323
| 0.046307
| 0
| 0
| 0
| 0
| 0
| 0
| 0.373258
| 0.161472
| 2,310
| 1
| 2,310
| 2,310
| 0.094992
| 0
| 0
| 0
| 0
| 0
| 0.079187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d64b8cb0f895a18a9241423722106d183038db90
| 100
|
py
|
Python
|
llvm/utils/lit/tests/Inputs/shtest-not/fail.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 2,338
|
2018-06-19T17:34:51.000Z
|
2022-03-31T11:00:37.000Z
|
llvm/utils/lit/tests/Inputs/shtest-not/fail.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 11,789
|
2015-01-05T04:50:15.000Z
|
2022-03-31T23:39:19.000Z
|
llvm/utils/lit/tests/Inputs/shtest-not/fail.py
|
medismailben/llvm-project
|
e334a839032fe500c3bba22bf976ab7af13ce1c1
|
[
"Apache-2.0"
] | 500
|
2019-01-23T07:49:22.000Z
|
2022-03-30T02:59:37.000Z
|
#!/usr/bin/env python
import print_environment
import sys
print_environment.execute()
sys.exit(1)
| 12.5
| 27
| 0.79
| 15
| 100
| 5.133333
| 0.733333
| 0.415584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011111
| 0.1
| 100
| 7
| 28
| 14.285714
| 0.844444
| 0.2
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
c389f962dad891fac1010c6fc5b54b86b52efdc4
| 49
|
py
|
Python
|
graphzoo/dataloader/__init__.py
|
AnoushkaVyas/GraphZoo
|
a0013ac3a5eed6b46810d20b75f51bb7be812b3d
|
[
"MIT"
] | 2
|
2022-03-30T01:11:39.000Z
|
2022-03-30T11:08:12.000Z
|
graphzoo/dataloader/__init__.py
|
AnoushkaVyas/GraphZoo
|
a0013ac3a5eed6b46810d20b75f51bb7be812b3d
|
[
"MIT"
] | null | null | null |
graphzoo/dataloader/__init__.py
|
AnoushkaVyas/GraphZoo
|
a0013ac3a5eed6b46810d20b75f51bb7be812b3d
|
[
"MIT"
] | 2
|
2022-01-27T21:03:40.000Z
|
2022-03-15T20:20:12.000Z
|
from .dataloader import *
from .download import *
| 24.5
| 25
| 0.77551
| 6
| 49
| 6.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 2
| 26
| 24.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
c3cbe7803244bbe8d1cad750386bbf0c7dd12518
| 289
|
py
|
Python
|
challenges/8.3.Function_Documentation_Strings/lesson_tests.py
|
pradeepsaiu/python-coding-challenges
|
b435ab650d85de267eeaa31a55ff77ef5dbff86b
|
[
"BSD-3-Clause"
] | 141
|
2017-05-07T00:38:22.000Z
|
2022-03-25T10:14:25.000Z
|
challenges/8.3.Function_Documentation_Strings/lesson_tests.py
|
pradeepsaiu/python-coding-challenges
|
b435ab650d85de267eeaa31a55ff77ef5dbff86b
|
[
"BSD-3-Clause"
] | 23
|
2017-05-06T23:57:37.000Z
|
2018-03-23T19:07:32.000Z
|
challenges/8.3.Function_Documentation_Strings/lesson_tests.py
|
pradeepsaiu/python-coding-challenges
|
b435ab650d85de267eeaa31a55ff77ef5dbff86b
|
[
"BSD-3-Clause"
] | 143
|
2017-05-07T09:33:35.000Z
|
2022-03-12T21:04:13.000Z
|
import unittest
from main import *
class FunctionDocumentationStringsTests(unittest.TestCase):
def test_main(self):
self.assertIsNone(docstring_function())
self.assertIsNotNone(docstring_function.__doc__)
self.assertIsInstance(docstring_function.__doc__, str)
| 32.111111
| 62
| 0.775087
| 28
| 289
| 7.571429
| 0.607143
| 0.240566
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148789
| 289
| 8
| 63
| 36.125
| 0.861789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.142857
| false
| 0
| 0.285714
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
c3f7e6570cfddccaa1b153a44c093eedd8a3627c
| 7,151
|
py
|
Python
|
user_interface/test/test_dakota_class.py
|
ukaea/ALC_UQ
|
a2747c94036b04f1279abb5683c6a225a878aea3
|
[
"Apache-2.0"
] | 2
|
2021-11-24T10:43:50.000Z
|
2021-12-07T20:02:38.000Z
|
user_interface/test/test_dakota_class.py
|
ukaea/ALC_UQ
|
a2747c94036b04f1279abb5683c6a225a878aea3
|
[
"Apache-2.0"
] | null | null | null |
user_interface/test/test_dakota_class.py
|
ukaea/ALC_UQ
|
a2747c94036b04f1279abb5683c6a225a878aea3
|
[
"Apache-2.0"
] | null | null | null |
from dakota_class import DakotaClass
from exceptions import *
import unittest
import xarray as xr
import numpy as np
import os
class TestDakotaClass(unittest.TestCase):
# Try and create an instance of the dakota class
def test_create_dakota_template(self):
my_dakota = DakotaClass()
self.assertEqual( my_dakota.dakota.get_attribute('evaluation_concurrency'), 1 )
self.assertEqual( my_dakota.dakota.get_attribute('response_functions'), 1 )
def test_add_run_settings(self):
attrs = { 'sample_type':'sampling', 'seed':54 }
new_settings = xr.Dataset(attrs=attrs)
my_dakota = DakotaClass()
my_dakota.update_settings(new_settings)
self.assertEqual( my_dakota.dakota.get_attribute('sample_type').strip(), 'sampling' )
self.assertEqual( my_dakota.dakota.get_attribute('seed'), 54 )
def test_add_common_variable(self):
attrs = { 'type':'normal' }
means = [ 1.0,2.0,3.0,4.0 ]
sds = [ 0.1,0.2,0.3,0.4 ]
means = xr.DataArray( data=means, dims='T' )
sds = xr.DataArray( data=sds, dims='T' )
test_var = xr.Dataset( {'means':means, 'std_deviations':sds }, attrs=attrs )
my_dakota = DakotaClass()
my_dakota.add_variable('test_var', test_var)
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('means'), means ) )
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('std_deviations'), sds ) )
def test_add_lognormal_variable(self):
attrs = { 'type':'lognormal' }
means = [ 1.0,2.0,3.0,4.0 ]
sds = [ 0.1,0.2,0.3,0.4 ]
means = xr.DataArray( data=means, dims='T' )
sds = xr.DataArray( data=sds, dims='T' )
test_var = xr.Dataset( {'means':means, 'std_deviations':sds }, attrs=attrs )
my_dakota = DakotaClass()
my_dakota.add_variable('test_var', test_var)
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('means'), means ) )
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('std_deviations'), sds ) )
def test_add_scan_variable(self):
attrs = { 'type':'scan' }
lower = [ 0.1,0.2,0.3,0.4 ]
upper = [ 1.0,2.0,3.0,4.0 ]
partitions = [ 2,3,4,5 ]
lower = xr.DataArray( data=lower, dims='T' )
upper = xr.DataArray( data=upper, dims='T' )
partitions = xr.DataArray( data=partitions, dims='T' )
test_var = xr.Dataset( {'lower_bounds':lower, 'upper_bounds':upper, 'partitions':partitions }, attrs=attrs )
my_dakota = DakotaClass()
my_dakota.add_variable('test_var', test_var)
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('lower_bounds'), lower ) )
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('upper_bounds'), upper ) )
def test_add_correlated_scan_variable(self):
attrs = { 'type':'scan_correlated' }
lower = [ 0.1,0.2,0.3,0.4 ]
upper = [ 1.0,2.0,3.0,4.0 ]
partitions = [ 4,4,4,4 ]
lower = xr.DataArray( data=lower, dims='T' )
upper = xr.DataArray( data=upper, dims='T' )
partitions = xr.DataArray( data=partitions, dims='T' )
test_var = xr.Dataset( {'lower_bounds':lower, 'upper_bounds':upper, 'partitions':partitions }, attrs=attrs )
my_dakota = DakotaClass()
my_dakota.add_variable('test_var', test_var)
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('lower_bounds'), [0.0] ) )
self.assertTrue( np.array_equal( my_dakota.dakota.get_attribute('upper_bounds'), [1.0] ) )
def test_write_dakote_file(self):
my_dakota = DakotaClass()
my_dakota.write_input_file('test_dakota.dat')
self.assertTrue( os.path.isfile('test_dakota.dat') )
os.remove('test_dakota.dat')
######################################################
# FAILURE TESTS
######################################################
def test_add_variable_not_dataset(self):
means = [ 1.0,2.0,3.0,4.0 ]
sds = [ 0.1,0.2,0.3,0.4 ]
test_var = {'means':means, 'std_deviations':sds }
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
def test_add_variable_with_no_type(self):
means = [ 1.0,2.0,3.0,4.0 ]
sds = [ 0.1,0.2,0.3,0.4 ]
means = xr.DataArray( data=means, dims='T' )
sds = xr.DataArray( data=sds, dims='T' )
test_var = xr.Dataset( {'means':means, 'std_deviations':sds } )
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
def test_add_variable_unknown_type(self):
attrs = { 'type':'unknown' }
means = [ 1.0,2.0,3.0,4.0 ]
sds = [ 0.1,0.2,0.3,0.4 ]
test_var = xr.Dataset( {'means':means, 'std_deviations':sds }, attrs=attrs )
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
def test_add_variable_missing_data(self):
attrs = { 'type':'normal' }
means = [ 1.0,2.0,3.0,4.0 ]
test_var = xr.Dataset( {'means':means}, attrs=attrs )
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
def test_add_variable_incompatible_data(self):
attrs = { 'type':'normal' }
means = [ 1.0,2.0,3.0,4.0 ]
sds = [ 0.1,0.2,0.3,0.4,0.5 ]
test_var = xr.Dataset( {'means':means, 'std_deviations':sds}, attrs=attrs )
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
def test_add_variable_with_nans(self):
attrs = { 'type':'normal' }
means = [ 1.0,2.0,np.nan,4.0 ]
sds = [ 0.1,0.2,0.3,0.4 ]
test_var = xr.Dataset( {'means':means, 'std_deviations':sds}, attrs=attrs )
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
def test_add_correlated_scan_variable_with_inconsistent_partitions(self):
attrs = { 'type':'scan_correlated' }
lower = [ 0.1,0.2,0.3,0.4 ]
upper = [ 1.0,2.0,3.0,4.0 ]
partitions = [ 4,5,4,4 ]
lower = xr.DataArray( data=lower, dims='T' )
upper = xr.DataArray( data=upper, dims='T' )
partitions = xr.DataArray( data=partitions, dims='T' )
test_var = xr.Dataset( {'lower_bounds':lower, 'upper_bounds':upper, 'partitions':partitions }, attrs=attrs )
my_dakota = DakotaClass()
with self.assertRaises(DatasetError):
my_dakota.add_variable('test_var', test_var)
| 34.379808
| 116
| 0.583974
| 933
| 7,151
| 4.271168
| 0.102894
| 0.078294
| 0.015809
| 0.021079
| 0.818068
| 0.811292
| 0.78394
| 0.733501
| 0.73325
| 0.726223
| 0
| 0.03606
| 0.26318
| 7,151
| 207
| 117
| 34.545894
| 0.720251
| 0.00839
| 0
| 0.646154
| 0
| 0
| 0.095989
| 0.003152
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.107692
| false
| 0
| 0.046154
| 0
| 0.161538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7f1945310cda2125922872b0636e434f74923dba
| 8,413
|
py
|
Python
|
tests/_apis/test_BaseApi.py
|
physicsninja/Riot-Watcher
|
2cd51c2cade112d018dd515f282e248940429ea0
|
[
"MIT"
] | null | null | null |
tests/_apis/test_BaseApi.py
|
physicsninja/Riot-Watcher
|
2cd51c2cade112d018dd515f282e248940429ea0
|
[
"MIT"
] | null | null | null |
tests/_apis/test_BaseApi.py
|
physicsninja/Riot-Watcher
|
2cd51c2cade112d018dd515f282e248940429ea0
|
[
"MIT"
] | null | null | null |
import unittest
import sys
if sys.version_info > (3, 0):
from unittest.mock import MagicMock
from unittest import mock
else:
from mock import MagicMock
import mock
from riotwatcher._apis import BaseApi
class BaseApiTestCase(unittest.TestCase):
def setUp(self):
self._expected_preview_return = object()
self._expected_after_return = object()
# Mock with returns in the handler
self._request_handler_mock = MagicMock(name='request_handler')
self._request_handler_mock.preview_request = MagicMock(name='preview_request')
self._request_handler_mock.preview_request.return_value = self._expected_preview_return
self._request_handler_mock.after_request = MagicMock(name='after_request')
self._request_handler_mock.after_request.return_value = self._expected_after_return
self._request_handler_mock.preview_static_request = MagicMock(name='preview_static_request')
self._request_handler_mock.preview_static_request.return_value = self._expected_preview_return
self._request_handler_mock.after_static_request = MagicMock(name='after_static_request')
self._request_handler_mock.after_static_request.return_value = self._expected_after_return
# Mock without returns in the handler
self._request_handler_mock_no_return = MagicMock(name='request_handler')
self._request_handler_mock_no_return.preview_request = MagicMock(name='preview_request')
self._request_handler_mock_no_return.after_request = MagicMock(name='after_request')
self._request_handler_mock_no_return.preview_static_request = MagicMock(name='preview_static_request')
self._request_handler_mock_no_return.after_static_request = MagicMock(name='after_static_request')
self._request_handler_mock_no_return.preview_request.return_value = None
self._request_handler_mock_no_return.after_request.return_value = None
self._request_handler_mock_no_return.preview_static_request.return_value = None
self._request_handler_mock_no_return.after_static_request.return_value = None
self._api_key = 'sadf'
self._mock_api_response = 'api_response_xx'
# BASE TESTS
def test_base_api_request_preview_handler(self):
base_api = BaseApi(self._api_key, [self._request_handler_mock])
endpoint_name = 'endpoint_xx'
method_name = 'method_xx'
region = 'region_xx'
url_ext = 'url_xx'
kwarg = 'extra_xx'
base_api.request(endpoint_name, method_name, region, url_ext, extra=kwarg)
self._request_handler_mock.preview_request.assert_called_once_with(
region,
endpoint_name,
method_name,
'https://region_xx.api.riotgames.com%s' % url_ext,
{'extra': kwarg}
)
def test_base_api_request_after_handler_with_preview_return(self):
base_api = BaseApi(self._api_key, [self._request_handler_mock])
endpoint_name = 'endpoint_xx'
method_name = 'method_xx'
region = 'region_xx'
url_ext = 'url_xx'
kwarg = 'extra_xx'
ret = base_api.request(endpoint_name, method_name, region, url_ext, extra=kwarg)
self._request_handler_mock.after_request.assert_called_once_with(
region,
endpoint_name,
method_name,
'https://region_xx.api.riotgames.com%s' % url_ext,
self._expected_preview_return
)
self.assertEqual(ret, self._expected_after_return)
@mock.patch('requests.get')
def test_base_api_request_after_handler_with_preview_no_return(self, mock_get):
mock_get.return_value = self._mock_api_response
base_api = BaseApi(self._api_key, [self._request_handler_mock_no_return])
endpoint_name = 'endpoint_xx'
method_name = 'method_xx'
region = 'region_xx'
url_ext = 'url_xx'
kwarg = 'extra_xx'
ret = base_api.request(endpoint_name, method_name, region, url_ext, extra=kwarg)
self._request_handler_mock_no_return.after_request.assert_called_once_with(
region,
endpoint_name,
method_name,
'https://region_xx.api.riotgames.com%s' % url_ext,
self._mock_api_response
)
mock_get.assert_called_once_with(
'https://region_xx.api.riotgames.com%s' % url_ext,
headers={'X-Riot-Token': self._api_key},
params={'extra': kwarg}
)
self.assertEqual(ret, self._mock_api_response)
# STATIC TESTS
def test_base_api_request_static_preview_handler(self):
base_api = BaseApi(self._api_key, [self._request_handler_mock])
version = 'version_xx'
locale = 'locale_xx'
url_ext = 'url_xx'
ret = base_api.request_static(version, locale, url_ext)
self._request_handler_mock.preview_static_request.assert_called_once_with(
version,
locale,
'https://ddragon.leagueoflegends.com/cdn/%s/data/%s/%s.json' % (version, locale, url_ext)
)
def test_base_api_request_static_after_handler_with_preview_return(self):
base_api = BaseApi(self._api_key, [self._request_handler_mock])
version = 'version_xx'
locale = 'locale_xx'
url_ext = 'url_xx'
ret = base_api.request_static(version, locale, url_ext)
self._request_handler_mock.after_static_request.assert_called_once_with(
version,
locale,
'https://ddragon.leagueoflegends.com/cdn/%s/data/%s/%s.json' % (version, locale, url_ext),
self._expected_preview_return
)
self.assertEqual(ret, self._expected_after_return)
@mock.patch('requests.get')
def test_base_api_request_static_after_handler_with_preview_no_return(self, mock_get):
mock_get.return_value = self._mock_api_response
base_api = BaseApi(self._api_key, [self._request_handler_mock_no_return])
version = 'version_xx'
locale = 'locale_xx'
url_ext = 'url_xx'
ret = base_api.request_static(version, locale, url_ext)
self._request_handler_mock_no_return.after_static_request.assert_called_once_with(
version,
locale,
'https://ddragon.leagueoflegends.com/cdn/%s/data/%s/%s.json' % (version, locale, url_ext),
self._mock_api_response
)
mock_get.assert_called_once_with(
'https://ddragon.leagueoflegends.com/cdn/%s/data/%s/%s.json' % (version, locale, url_ext),
)
self.assertEqual(ret, self._mock_api_response)
# VERSION TESTS
def test_base_api_request_version_preview_handler(self):
base_api = BaseApi(self._api_key, [self._request_handler_mock])
region = 'region_xx'
ret = base_api.request_version(region)
self._request_handler_mock.preview_static_request.assert_called_once_with(
'',
'',
'https://ddragon.leagueoflegends.com/realms/%s.json' % region
)
def test_base_api_request_version_after_handler_with_preview_return(self):
base_api = BaseApi(self._api_key, [self._request_handler_mock])
region = 'region_xx'
ret = base_api.request_version(region)
self._request_handler_mock.after_static_request.assert_called_once_with(
'',
'',
'https://ddragon.leagueoflegends.com/realms/%s.json' % region,
self._expected_preview_return
)
self.assertEqual(ret, self._expected_after_return)
@mock.patch('requests.get')
def test_base_api_request_version_after_handler_with_preview_no_return(self, mock_get):
mock_get.return_value = self._mock_api_response
base_api = BaseApi(self._api_key, [self._request_handler_mock_no_return])
region = 'region_xx'
ret = base_api.request_version(region)
self._request_handler_mock_no_return.after_static_request.assert_called_once_with(
'',
'',
'https://ddragon.leagueoflegends.com/realms/%s.json' % region,
self._mock_api_response
)
mock_get.assert_called_once_with(
'https://ddragon.leagueoflegends.com/realms/%s.json' % region,
)
self.assertEqual(ret, self._mock_api_response)
| 37.558036
| 110
| 0.687389
| 1,043
| 8,413
| 5.061361
| 0.073826
| 0.100777
| 0.122751
| 0.150028
| 0.924607
| 0.923849
| 0.899413
| 0.854139
| 0.812654
| 0.795037
| 0
| 0.000306
| 0.223939
| 8,413
| 223
| 111
| 37.726457
| 0.80824
| 0.0126
| 0
| 0.618182
| 0
| 0
| 0.127455
| 0.005301
| 0
| 0
| 0
| 0
| 0.109091
| 1
| 0.060606
| false
| 0
| 0.042424
| 0
| 0.109091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6132d8062e0edd99c41980d1a5cf42b15cfb1c1f
| 19
|
py
|
Python
|
code/__init__.py
|
deekay2310/SE21_HW2B_Group6
|
8348c9a225231d370d8f579f619896bd7aace3ad
|
[
"MIT"
] | 16
|
2022-01-11T00:32:20.000Z
|
2022-03-25T21:40:52.000Z
|
code/__init__.py
|
deekay2310/SE21_HW2B_Group6
|
8348c9a225231d370d8f579f619896bd7aace3ad
|
[
"MIT"
] | 12
|
2021-07-05T11:42:01.000Z
|
2021-12-23T07:57:54.000Z
|
code/__init__.py
|
deekay2310/SE21_HW2B_Group6
|
8348c9a225231d370d8f579f619896bd7aace3ad
|
[
"MIT"
] | 3
|
2021-09-10T13:21:54.000Z
|
2021-11-23T11:37:55.000Z
|
from math import *
| 9.5
| 18
| 0.736842
| 3
| 19
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 19
| 1
| 19
| 19
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6136db8809b3410249fd1549a8ef68188c141846
| 30
|
py
|
Python
|
singleton/print_version.py
|
Tomvictor/python-design-patterns
|
6b99607d721bbe03d26a0a451a10e88cd1c1d112
|
[
"MIT"
] | null | null | null |
singleton/print_version.py
|
Tomvictor/python-design-patterns
|
6b99607d721bbe03d26a0a451a10e88cd1c1d112
|
[
"MIT"
] | null | null | null |
singleton/print_version.py
|
Tomvictor/python-design-patterns
|
6b99607d721bbe03d26a0a451a10e88cd1c1d112
|
[
"MIT"
] | null | null | null |
import sys
print(sys.version)
| 15
| 18
| 0.8
| 5
| 30
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 2
| 18
| 15
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
4ef07e4a7ac6fb47cb6a90de342243f285e64310
| 33,702
|
py
|
Python
|
tests/data_generation.py
|
drabenoro/geomstats
|
4a758aea808a127ed3afef4e330733757fb07846
|
[
"MIT"
] | null | null | null |
tests/data_generation.py
|
drabenoro/geomstats
|
4a758aea808a127ed3afef4e330733757fb07846
|
[
"MIT"
] | null | null | null |
tests/data_generation.py
|
drabenoro/geomstats
|
4a758aea808a127ed3afef4e330733757fb07846
|
[
"MIT"
] | null | null | null |
import itertools
import pytest
import geomstats.backend as gs
def better_squeeze(array):
"""Delete possible singleton dimension on first axis."""
if len(array) == 1:
return gs.squeeze(array, axis=0)
return array
class TestData:
"""Class for TestData objects."""
def generate_tests(self, smoke_test_data, random_test_data=[]):
"""Wrap test data with corresponding markers.
Parameters
----------
smoke_test_data : list
Test data that will be marked as smoke.
random_test_data : list
Test data that will be marked as random.
Optional, default: []
Returns
-------
_: list
Tests.
"""
tests = []
if smoke_test_data:
smoke_tests = [
pytest.param(*data.values(), marks=pytest.mark.smoke)
for data in smoke_test_data
]
tests += smoke_tests
if random_test_data:
random_tests = [
pytest.param(*data.values(), marks=pytest.mark.random)
if isinstance(data, dict)
else pytest.param(*data, marks=pytest.mark.random)
for data in random_test_data
]
tests += random_tests
return tests
class ManifoldTestData(TestData):
"""Class for ManifoldTestData: data to test manifold properties."""
def _random_point_belongs_data(
self,
smoke_space_args_list,
smoke_n_points_list,
space_args_list,
n_points_list,
belongs_atol=gs.atol,
):
"""Generate data to check that a random point belongs to the manifold.
Parameters
----------
smoke_space_args_list : list
List of spaces' args on which smoke tests will run.
smoke_n_points_list : list
Integers representing the numbers of points on which smoke tests will run.
space_args_list : list
List of spaces' (manifolds') args on which randomized tests will run.
n_points_list : list
List of integers as numbers of points on which randomized tests will run.
belongs_atol : float
Absolute tolerance for the belongs function.
"""
smoke_data = [
dict(space_args=space_args, n_points=n_points, belongs_atol=belongs_atol)
for space_args, n_points in zip(smoke_space_args_list, smoke_n_points_list)
]
random_data = [
dict(space_args=space_args, n_points=n_points, belongs_atol=belongs_atol)
for space_args, n_points in zip(space_args_list, n_points_list)
]
return self.generate_tests(smoke_data, random_data)
def _projection_belongs_data(
self, space_args_list, shape_list, n_samples_list, belongs_atol=gs.atol
):
"""Generate data to check that a point projected on a manifold belongs to the manifold.
Parameters
----------
space_args_list : list
List of spaces' args on which tests will run.
shape_list : list
List of shapes of the random data that is generated, and projected.
n_samples_list : list
List of integers for the number of random data is generated, and projected.
belongs_atol : float
Absolute tolerance for the belongs function.
"""
random_data = [
dict(
space_args=space_args,
data=gs.random.normal(size=(n_samples,) + shape),
belongs_atol=belongs_atol,
)
for space_args, shape, n_samples in zip(
space_args_list, shape_list, n_samples_list
)
]
return self.generate_tests([], random_data)
def _to_tangent_is_tangent_data(
self,
space_cls,
space_args_list,
shape_list,
n_vecs_list,
is_tangent_atol=gs.atol,
):
"""Generate data to check that to_tangent returns a tangent vector.
Parameters
----------
space_cls : Manifold
Class of the space, i.e. a child class of Manifold.
space_args_list : list
List of spaces' args on which tests will run.
shape_list : list
List of shapes of the random vectors generated, and projected.
n_vecs_list : list
List of integers for the number of random vectors generated, and projected.
is_tangent_atol : float
Absolute tolerance for the is_tangent function.
"""
random_data = []
for space_args, shape, n_vecs in zip(space_args_list, shape_list, n_vecs_list):
space = space_cls(*space_args)
vec = gs.random.normal(size=(n_vecs,) + shape)
base_point = space.random_point()
random_data.append(
dict(
space_args=space_args,
vec=vec,
base_point=base_point,
is_tangent_atol=is_tangent_atol,
)
)
return self.generate_tests([], random_data)
class OpenSetTestData(ManifoldTestData):
def _to_tangent_is_tangent_in_ambient_space_data(
self, space_cls, space_args_list, shape_list, is_tangent_atol=gs.atol
):
"""Generate data to check that tangent vectors are in ambient space's tangent space.
Parameters
----------
space_cls : Manifold
Class of the space, i.e. a child class of Manifold.
space_args_list : list
Arguments to pass to constructor of the manifold.
shape_list : list
List of shapes of the random data that is generated, and projected.
"""
random_data = [
dict(
space_args=space_args,
vector=gs.random.normal(size=shape),
base_point=space_cls(*space_args).random_point(shape[0]),
is_tangent_atol=is_tangent_atol,
)
for space_args, shape in zip(space_args_list, shape_list)
]
return self.generate_tests([], random_data)
class LevelSetTestData(ManifoldTestData):
def _extrinsic_intrinsic_composition_data(
self, space_cls, space_args_list, n_samples_list
):
"""Generate data to check that changing coordinate system twice gives back the point.
Assumes that random_point generates points in extrinsic coordinates.
Parameters
----------
space_cls : Manifold
Class of the space, i.e. a child class of Manifold.
space_args_list : list
Arguments to pass to constructor of the manifold.
n_samples_list : list
List of number of extrinsic points to generate.
"""
random_data = [
dict(
space_args=space_args,
point_extrinsic=space_cls(*space_args).random_point(n_samples),
)
for space_args, n_samples in zip(space_args_list, n_samples_list)
]
return self.generate_tests([], random_data)
def _intrinsic_extrinsic_composition_data(self, space_args_list, n_samples_list):
"""Generate data to check that changing coordinate system twice gives back the point.
Assumes that the first elements in space_args is the dimension of the space.
Parameters
----------
space_args_list : list
Arguments to pass to constructor of the manifold.
n_samples_list : list
List of number of intrinsic points to generate.
"""
random_data = [
dict(
space_args=space_args,
point_intrinsic=gs.random.normal(size=(n_samples,) + space_args[0]),
)
for space_args, n_samples in zip(space_args_list, n_samples_list)
]
return self.generate_tests([], random_data)
class LieGroupTestData(ManifoldTestData):
def _exp_log_composition_data(
self,
group_cls,
group_args_list,
shape_list,
n_samples_list,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that group exponential and logarithm are inverse.
Parameters
----------
group_cls : LieGroup
Class of the group, i.e. a child class of LieGroup.
group_args_list : list
Arguments to pass to constructor of the Lie group.
n_samples_list : list
List of number of points and tangent vectors to generate.
"""
random_data = []
for group_args, shape, n_samples in zip(
group_args_list, shape_list, n_samples_list
):
group = group_cls(*group_args)
for base_point in [group.random_point(), group.identity]:
tangent_vec = group.to_tangent(
gs.random.normal(size=(n_samples,) + shape), base_point
)
random_data.append(
dict(
group_args=group_args,
tangent_vec=tangent_vec,
base_point=base_point,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
def _log_exp_composition_data(
self, group_cls, group_args_list, n_samples_list, rtol=gs.rtol, atol=gs.atol
):
"""Generate data to check that group logarithm and exponential are inverse.
Parameters
----------
group_cls : LieGroup
Class of the group, i.e. a child class of LieGroup.
group_args_list : list
List of arguments to pass to constructor of the Lie group.
n_samples_list : list
List of number of points and tangent vectors to generate.
"""
random_data = []
for group_args, n_samples in zip(group_args_list, n_samples_list):
group = group_cls(*group_args)
for base_point in [group.random_point(), group.identity]:
point = group.random_point(n_samples)
random_data.append(
dict(
group_args=group_args,
point=point,
base_point=base_point,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
class VectorSpaceTestData(ManifoldTestData):
def _basis_belongs_data(self, space_args_list, belongs_atol=gs.atol):
"""Generate data to check that basis elements belong to vector space.
Parameters
----------
space_args_list : list
List of arguments to pass to constructor of the vector space.
belongs_atol : float
Absolute tolerance of the belongs function.
"""
random_data = [
dict(space_args=space_args, belongs_atol=belongs_atol)
for space_args in space_args_list
]
return self.generate_tests([], random_data)
def _basis_cardinality_data(self, space_args_list):
"""Generate data to check that the number of basis elements is the dimension.
Parameters
----------
space_args_list : list
List of arguments to pass to constructor of the vector space.
"""
random_data = [dict(space_args=space_args) for space_args in space_args_list]
return self.generate_tests([], random_data)
class MatrixLieAlgebraTestData(VectorSpaceTestData):
def _basis_representation_matrix_representation_composition_data(
self, space_cls, space_args_list, n_samples_list, rtol=gs.rtol, atol=gs.atol
):
"""Generate data to check that changing coordinates twice gives back the point.
Parameters
----------
space_cls : LieAlgebra
Class of the space, i.e. a child class of LieAlgebra.
space_args_list : list
Arguments to pass to constructor of the manifold.
n_samples_list : list
List of numbers of samples to generate.
"""
random_data = [
dict(
space_args=space_args,
matrix_rep=space_cls(*space_args).random_point(n_samples),
rtol=rtol,
atol=atol,
)
for space_args, n_samples in zip(space_args_list, n_samples_list)
]
return self.generate_tests([], random_data)
def _matrix_representation_basis_representation_composition_data(
self, space_cls, space_args_list, n_samples_list, rtol=gs.rtol, atol=gs.atol
):
"""Generate data to check that changing coordinates twice gives back the point.
Parameters
----------
space_cls : LieAlgebra
Class of the space, i.e. a child class of LieAlgebra.
space_args_list : list
Arguments to pass to constructor of the LieAlgebra.
n_samples_list : list
List of numbers of samples to generate.
"""
random_data = [
dict(
space_args=space_args,
basis_rep=space_cls(*space_args).basis_representation(
space_cls(*space_args).random_point(n_samples)
),
rtol=rtol,
atol=atol,
)
for space_args, n_samples in zip(space_args_list, n_samples_list)
]
return self.generate_tests([], random_data)
class ConnectionTestData(TestData):
def _exp_shape_data(
self, connection_args_list, space_list, shape_list, n_samples_list
):
"""Generate data to check that exp returns an array of the expected shape.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
shape_list : list
List of shapes for random data to generate.
n_samples_list : list
List of number of random data to generate.
"""
random_data = []
for connection_args, space, tangent_shape, n_samples in zip(
connection_args_list, space_list, shape_list, n_samples_list
):
base_point = space.random_point(n_samples)
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_samples,) + tangent_shape), base_point
)
n_points_list = itertools.product([1, n_samples], [1, n_samples])
expected_shape_list = [space.shape] + [(n_samples,) + space.shape] * 3
for (n_tangent_vecs, n_base_points), expected_shape in zip(
n_points_list, expected_shape_list
):
random_data.append(
dict(
connection_args=connection_args,
tangent_vec=better_squeeze(tangent_vec[:n_tangent_vecs]),
base_point=better_squeeze(base_point[:n_base_points]),
expected_shape=expected_shape,
)
)
return self.generate_tests([], random_data)
def _log_shape_data(self, connection_args_list, space_list, n_samples_list):
"""Generate data to check that log returns an array of the expected shape.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
n_samples_list : list
List of number of random data to generate.
"""
random_data = []
for connection_args, space, n_samples in zip(
connection_args_list, space_list, n_samples_list
):
base_point = space.random_point(n_samples)
point = space.random_point(n_samples)
n_points_list = itertools.product([1, n_samples], [1, n_samples])
expected_shape_list = [space.shape] + [(n_samples,) + space.shape] * 3
for (n_points, n_base_points), expected_shape in zip(
n_points_list, expected_shape_list
):
random_data.append(
dict(
connection_args=connection_args,
point=better_squeeze(point[:n_points]),
base_point=better_squeeze(base_point[:n_base_points]),
expected_shape=expected_shape,
)
)
return self.generate_tests([], random_data)
def _exp_belongs_data(
self,
connection_args_list,
space_list,
shape_list,
n_samples_list,
belongs_atol=gs.atol,
):
"""Generate data to check that exp gives a point on the manifold.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
shape_list : list
List of shapes for random data to generate.
n_samples_list : list
List of number of random data to generate.
"""
random_data = []
for connection_args, space, shape, n_tangent_vecs in zip(
connection_args_list, space_list, shape_list, n_samples_list
):
base_point = space.random_point()
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_tangent_vecs,) + shape), base_point
)
random_data.append(
dict(
connection_args=connection_args,
space=space,
tangent_vec=tangent_vec,
base_point=base_point,
belongs_atol=belongs_atol,
)
)
return self.generate_tests([], random_data)
def _log_is_tangent_data(
self, connection_args_list, space_list, n_samples_list, is_tangent_atol=gs.atol
):
"""Generate data to check that log gives a tangent vector.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
n_samples_list : list
List of number of random data to generate.
"""
random_data = []
for connection_args, space, n_samples in zip(
connection_args_list, space_list, n_samples_list
):
point = space.random_point(n_samples)
base_point = space.random_point()
random_data.append(
dict(
connection_args=connection_args,
space=space,
point=point,
base_point=base_point,
is_tangent_atol=is_tangent_atol,
)
)
return self.generate_tests([], random_data)
def _geodesic_ivp_belongs_data(
self,
connection_args_list,
space_list,
shape_list,
n_points_list,
belongs_atol=gs.atol,
):
"""Generate data to check that connection geodesics belong to manifold.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
shape_list : list
List of shapes for random data to generate.
n_points_list : list
List of number of times on the geodesics.
belongs_atol : float
Absolute tolerance for the belongs function.
"""
random_data = []
for connection_args, space, n_points, shape in zip(
connection_args_list, space_list, n_points_list, shape_list
):
initial_point = space.random_point()
initial_tangent_vec = space.to_tangent(
gs.random.normal(size=shape), initial_point
)
random_data.append(
dict(
connection_args=connection_args,
space=space,
n_points=n_points,
initial_point=initial_point,
initial_tangent_vec=initial_tangent_vec,
belongs_atol=belongs_atol,
)
)
return self.generate_tests([], random_data)
def _geodesic_bvp_belongs_data(
self,
connection_args_list,
space_list,
n_points_list,
belongs_atol=gs.atol,
):
"""Generate data to check that connection geodesics belong to manifold.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
n_points_list : list
List of number of points on the geodesics.
belongs_atol : float
Absolute tolerance for the belongs function.
"""
random_data = []
for connection_args, space, n_points in zip(
connection_args_list,
space_list,
n_points_list,
):
initial_point = space.random_point()
end_point = space.random_point()
random_data.append(
dict(
connection_args=connection_args,
space=space,
n_points=n_points,
initial_point=initial_point,
end_point=end_point,
belongs_atol=belongs_atol,
)
)
return self.generate_tests([], random_data)
def _log_exp_composition_data(
self,
connection_args_list,
space_list,
n_samples_list,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that logarithm and exponential are inverse.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
n_samples_list : list
List of number of random data to generate.
"""
random_data = []
for connection_args, space, n_samples in zip(
connection_args_list, space_list, n_samples_list
):
point = space.random_point(n_samples)
base_point = space.random_point()
random_data.append(
dict(
connection_args=connection_args,
point=point,
base_point=base_point,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
def _exp_log_composition_data(
self,
connection_args_list,
space_list,
shape_list,
n_samples_list,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that exponential and logarithm are inverse.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
shape_list : list
List of shapes for random data to generate.
n_samples_list : list
List of number of random data to generate.
"""
random_data = []
for connection_args, space, shape, n_samples in zip(
connection_args_list, space_list, shape_list, n_samples_list
):
base_point = space.random_point()
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_samples,) + shape), base_point
)
random_data.append(
dict(
connection_args=connection_args,
tangent_vec=tangent_vec,
base_point=base_point,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
def _exp_ladder_parallel_transport_data(
self,
connection_args_list,
space_list,
shape_list,
n_samples_list,
n_rungs_list,
alpha_list,
scheme_list,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that end point of ladder matches exponential.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
shape_list : list
List of shapes for random data to generate.
n_rungs_list : list
List of number of rungs for the ladder.
alpha_list : list
List of exponents for th scaling of the vector to transport.
scheme_list : list
List of ladder schemes to test.
rtol : float
Relative tolerance to test this property.
atol : float
Absolute tolerance to test this property.
"""
random_data = []
for (connection_args, space, shape, n_samples, n_rungs, alpha, scheme,) in zip(
connection_args_list,
space_list,
shape_list,
n_samples_list,
n_rungs_list,
alpha_list,
scheme_list,
):
base_point = space.random_point()
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_samples,) + shape), base_point
)
direction = space.to_tangent(gs.random.normal(size=shape), base_point)
random_data.append(
dict(
connection_args=connection_args,
direction=direction,
tangent_vec=tangent_vec,
base_point=base_point,
scheme=scheme,
n_rungs=n_rungs,
alpha=alpha,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
def _exp_geodesic_ivp_data(
self,
connection_args_list,
space_list,
shape_list,
n_samples_list,
n_points_list,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that end point of geodesic matches exponential.
Parameters
----------
connection_args_list : list
List of argument to pass to constructor of the connection.
space_list : list
List of manifolds on which the connection is defined.
shape_list : list
List of shapes for random data to generate.
n_samples_list : list
List of number of random data to generate.
n_points_list : list
List of number of times on the geodesics.
belongs_atol : float
Absolute tolerance for the belongs function.
"""
random_data = []
for connection_args, space, shape, n_samples, n_points in zip(
connection_args_list,
space_list,
shape_list,
n_samples_list,
n_points_list,
):
base_point = space.random_point()
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_samples,) + shape), base_point
)
random_data.append(
dict(
connection_args=connection_args,
n_points=n_points,
tangent_vec=tangent_vec,
base_point=base_point,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
class RiemannianMetricTestData(ConnectionTestData):
def _squared_dist_is_symmetric_data(
self,
metric_args_list,
space_list,
n_points_a_list,
n_points_b_list,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that the squared geodesic distance is symmetric.
Parameters
----------
metric_args_list : list
List of arguments to pass to constructor of the metric.
space_list : list
List of spaces on which the metric is defined.
n_points_a_list : list
List of number of points A to generate on the manifold.
n_points_b_list : list
List of number of points B to generate on the manifold.
rtol : float
Relative tolerance to test this property.
atol : float
Absolute tolerance to test this property.
"""
random_data = []
for metric_args, space, n_points_a, n_points_b in zip(
metric_args_list, space_list, n_points_a_list, n_points_b_list
):
point_a = space.random_point(n_points_a)
point_b = space.random_point(n_points_b)
random_data.append(
dict(
metric_args=metric_args,
point_a=point_a,
point_b=point_b,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
def _parallel_transport_ivp_is_isometry_data(
self,
metric_args_list,
space_list,
shape_list,
n_samples_list,
is_tangent_atol=gs.atol,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that parallel transport is an isometry.
Parameters
----------
metric_args_list : list
List of arguments to pass to constructor of the metric.
space_list : list
List of spaces on which the metric is defined.
shape_list : list
List of shapes for random data to generate.
n_samples_list : list
List of number of random data to generate.
rtol : float
Relative tolerance to test this property.
atol : float
Absolute tolerance to test this property.
"""
random_data = []
for metric_args, space, shape, n_samples in zip(
metric_args_list, space_list, shape_list, n_samples_list
):
base_point = space.random_point()
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_samples,) + shape), base_point
)
direction = space.to_tangent(gs.random.normal(size=shape), base_point)
random_data.append(
dict(
metric_args=metric_args,
space=space,
tangent_vec=tangent_vec,
base_point=base_point,
direction=direction,
is_tangent_atol=is_tangent_atol,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
def _parallel_transport_bvp_is_isometry_data(
self,
metric_args_list,
space_list,
shape_list,
n_samples_list,
is_tangent_atol=gs.atol,
rtol=gs.rtol,
atol=gs.atol,
):
"""Generate data to check that parallel transport is an isometry.
Parameters
----------
metric_args_list : list
List of arguments to pass to constructor of the metric.
space_list : list
List of spaces on which the metric is defined.
shape_list : list
List of shapes for random data to generate.
n_samples_list : list
List of number of random data to generate.
is_tangent_atol: float
Asbolute tolerance for the is_tangent function.
rtol : float
Relative tolerance to test this property.
atol : float
Absolute tolerance to test this property.
"""
random_data = []
for metric_args, space, tangent_shape, n_tangent_vecs in zip(
metric_args_list, space_list, shape_list, n_samples_list
):
base_point = space.random_point()
tangent_vec = space.to_tangent(
gs.random.normal(size=(n_tangent_vecs,) + tangent_shape), base_point
)
end_point = space.random_point()
random_data.append(
dict(
metric_args=metric_args,
space=space,
tangent_vec=tangent_vec,
base_point=base_point,
end_point=end_point,
is_tangent_atol=is_tangent_atol,
rtol=rtol,
atol=atol,
)
)
return self.generate_tests([], random_data)
| 35.179541
| 95
| 0.568779
| 3,812
| 33,702
| 4.750262
| 0.048531
| 0.064944
| 0.046388
| 0.05412
| 0.87315
| 0.845317
| 0.807544
| 0.786172
| 0.752761
| 0.736801
| 0
| 0.000467
| 0.365231
| 33,702
| 957
| 96
| 35.216301
| 0.845978
| 0.312207
| 0
| 0.675
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048214
| false
| 0
| 0.005357
| 0
| 0.119643
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4ef46bf5fabb2ef403b24a2a08fb3d5109a3a4d6
| 159
|
py
|
Python
|
localgraphclustering/algorithms/__init__.py
|
vishalbelsare/LocalGraphClustering
|
a6325350997932d548a876deb259c2387fc2c809
|
[
"MIT"
] | 106
|
2017-09-06T04:47:02.000Z
|
2022-03-30T07:43:27.000Z
|
localgraphclustering/algorithms/__init__.py
|
vishalbelsare/LocalGraphClustering
|
a6325350997932d548a876deb259c2387fc2c809
|
[
"MIT"
] | 51
|
2017-09-06T02:22:09.000Z
|
2021-12-15T11:39:28.000Z
|
localgraphclustering/algorithms/__init__.py
|
vishalbelsare/LocalGraphClustering
|
a6325350997932d548a876deb259c2387fc2c809
|
[
"MIT"
] | 38
|
2017-09-04T21:45:13.000Z
|
2022-01-19T09:48:25.000Z
|
from .acl_list import acl_list
from .eig2_nL import eig2_nL, eig2nL_subgraph
from .fista_dinput_dense import fista_dinput_dense
from .sweepcut import sweepcut
| 31.8
| 50
| 0.861635
| 26
| 159
| 4.923077
| 0.461538
| 0.109375
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021127
| 0.106918
| 159
| 4
| 51
| 39.75
| 0.880282
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4ef90e761346efd3eb56025fcea9c56bce6ec917
| 103
|
py
|
Python
|
terrascript/matchbox/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 4
|
2022-02-07T21:08:14.000Z
|
2022-03-03T04:41:28.000Z
|
terrascript/matchbox/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/matchbox/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 2
|
2022-02-06T01:49:42.000Z
|
2022-02-08T14:15:00.000Z
|
# terrascript/matchbox/__init__.py
import terrascript
class matchbox(terrascript.Provider):
pass
| 14.714286
| 37
| 0.796117
| 11
| 103
| 7.090909
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126214
| 103
| 6
| 38
| 17.166667
| 0.866667
| 0.31068
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
9c89ca786c790c0854fd43dd8512b80859e264d5
| 4,139
|
py
|
Python
|
imcsdk/mometa/bios/BiosProfileToken.py
|
ragupta-git/ImcSdk
|
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/bios/BiosProfileToken.py
|
ragupta-git/ImcSdk
|
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
|
[
"Apache-2.0"
] | null | null | null |
imcsdk/mometa/bios/BiosProfileToken.py
|
ragupta-git/ImcSdk
|
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
|
[
"Apache-2.0"
] | 3
|
2018-11-14T13:02:40.000Z
|
2018-11-14T13:49:38.000Z
|
"""This module contains the general information for BiosProfileToken ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class BiosProfileTokenConsts:
pass
class BiosProfileToken(ManagedObject):
"""This is BiosProfileToken class."""
consts = BiosProfileTokenConsts()
naming_props = set([u'name'])
mo_meta = {
"classic": MoMeta("BiosProfileToken", "biosProfileToken", "token-[name]", VersionMeta.Version301c, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'biosProfile'], [], ["Get"]),
"modular": MoMeta("BiosProfileToken", "biosProfileToken", "token-[name]", VersionMeta.Version301c, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'biosProfile'], [], ["Get"])
}
prop_meta = {
"classic": {
"actual_value": MoPropertyMeta("actual_value", "actualValue", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version301c, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"configured_value": MoPropertyMeta("configured_value", "configuredValue", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version301c, MoPropertyMeta.NAMING, None, 0, 510, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
},
"modular": {
"actual_value": MoPropertyMeta("actual_value", "actualValue", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version301c, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"configured_value": MoPropertyMeta("configured_value", "configuredValue", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version301c, MoPropertyMeta.NAMING, None, 0, 510, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version301c, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
},
}
prop_map = {
"classic": {
"actualValue": "actual_value",
"childAction": "child_action",
"configuredValue": "configured_value",
"dn": "dn",
"name": "name",
"rn": "rn",
"status": "status",
},
"modular": {
"actualValue": "actual_value",
"childAction": "child_action",
"configuredValue": "configured_value",
"dn": "dn",
"name": "name",
"rn": "rn",
"status": "status",
},
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.actual_value = None
self.child_action = None
self.configured_value = None
self.status = None
ManagedObject.__init__(self, "BiosProfileToken", parent_mo_or_dn, **kwargs)
| 50.47561
| 234
| 0.612708
| 385
| 4,139
| 6.454545
| 0.202597
| 0.14165
| 0.157746
| 0.23662
| 0.77505
| 0.77505
| 0.77505
| 0.77505
| 0.77505
| 0.77505
| 0
| 0.034007
| 0.211404
| 4,139
| 81
| 235
| 51.098765
| 0.727328
| 0.02706
| 0
| 0.533333
| 0
| 0.033333
| 0.251059
| 0.040349
| 0
| 0
| 0.005978
| 0
| 0
| 1
| 0.016667
| false
| 0.016667
| 0.05
| 0
| 0.183333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9c9d30f8bccc9623b9aa056b02d37346354ccf39
| 192
|
py
|
Python
|
feincms/admin/editor.py
|
duointeractive/feincms
|
29b6bc197e854ec69bfb9a4923f029803c8d8f50
|
[
"BSD-3-Clause"
] | null | null | null |
feincms/admin/editor.py
|
duointeractive/feincms
|
29b6bc197e854ec69bfb9a4923f029803c8d8f50
|
[
"BSD-3-Clause"
] | null | null | null |
feincms/admin/editor.py
|
duointeractive/feincms
|
29b6bc197e854ec69bfb9a4923f029803c8d8f50
|
[
"BSD-3-Clause"
] | null | null | null |
from feincms.admin.item_editor import ItemEditor, ItemEditorForm
from feincms.admin.tree_editor import TreeEditor, ajax_editable_boolean, \
ajax_editable_boolean_cell, django_boolean_icon
| 48
| 74
| 0.864583
| 25
| 192
| 6.28
| 0.64
| 0.140127
| 0.203822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088542
| 192
| 3
| 75
| 64
| 0.897143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9cc59b8851d2d0b77b3d92ebf4ae82a270cccc66
| 436
|
py
|
Python
|
tests/integration/unit_test/test_unit_test_python2_7.py
|
aahung/aws-sam-cli-app-templates
|
fb44b0030d124e53ee4db42bc95240081e4dbbd8
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/unit_test/test_unit_test_python2_7.py
|
aahung/aws-sam-cli-app-templates
|
fb44b0030d124e53ee4db42bc95240081e4dbbd8
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/unit_test/test_unit_test_python2_7.py
|
aahung/aws-sam-cli-app-templates
|
fb44b0030d124e53ee4db42bc95240081e4dbbd8
|
[
"Apache-2.0"
] | null | null | null |
from tests.integration.base import Base
class UnitTest_python2_7_cookiecutter_aws_sam_hello_python(Base.PythonUnitTestBase):
python_executable = "python2.7"
directory = "python2.7/cookiecutter-aws-sam-hello-python"
class UnitTest_python2_7_cookiecutter_aws_sam_step_functions_sample_app(Base.PythonUnitTestBase):
python_executable = "python2.7"
directory = "python2.7/cookiecutter-aws-sam-step-functions-sample-app"
| 36.333333
| 97
| 0.821101
| 56
| 436
| 6.071429
| 0.375
| 0.141176
| 0.235294
| 0.270588
| 0.9
| 0.9
| 0.9
| 0.682353
| 0.682353
| 0.476471
| 0
| 0.030303
| 0.091743
| 436
| 11
| 98
| 39.636364
| 0.828283
| 0
| 0
| 0.285714
| 0
| 0
| 0.268349
| 0.227064
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.