hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
384117f81df8e2578480b4c8e242e2a0c2e3af2c
729
py
Python
tests/penn_chime/model/test_parameters.py
riverkoko/chime
391a46056d59ac2dd622660251b03e377ea49ac5
[ "MIT" ]
222
2020-03-17T16:02:49.000Z
2022-03-24T15:45:29.000Z
tests/penn_chime/model/test_parameters.py
riverkoko/chime
391a46056d59ac2dd622660251b03e377ea49ac5
[ "MIT" ]
445
2020-03-17T16:08:49.000Z
2022-03-12T00:29:30.000Z
tests/penn_chime/model/test_parameters.py
riverkoko/chime
391a46056d59ac2dd622660251b03e377ea49ac5
[ "MIT" ]
176
2020-03-17T19:45:05.000Z
2022-02-23T02:53:03.000Z
"""Test Parameters.""" from penn_chime.model.parameters import Parameters def test_cypress_defaults(): """Ensure the cypress defaults have been updated.""" # TODO how to make this work when the module is installed? _ = Parameters.create({"PARAMETERS": "./defaults/cypress.cfg"}, []) def test_cli_defaults(): """Ensure the cli defaults have been updated.""" # TODO how to make this work when the module is installed? _ = Parameters.create({"PARAMETERS": "./defaults/cli.cfg"}, []) def test_webapp_defaults(): """Ensure the webapp defaults have been updated.""" # TODO how to make this work when the module is installed? _ = Parameters.create({"PARAMETERS": "./defaults/webapp.cfg"}, [])
33.136364
71
0.689986
93
729
5.301075
0.311828
0.042596
0.103448
0.139959
0.62069
0.62069
0.62069
0.62069
0.62069
0.62069
0
0
0.174211
729
21
72
34.714286
0.818937
0.444444
0
0
0
0
0.23822
0.112565
0
0
0
0.047619
0
1
0.428571
false
0
0.142857
0
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
4
6993e46d015c6887e7c689468f4059ab5ab8c40b
109
py
Python
python_modules/libraries/dagster-pagerduty/dagster_pagerduty/__init__.py
shahvineet98/dagster
2471d39c52f660e23e8c0d8e8ded873ddc3df036
[ "Apache-2.0" ]
3
2020-09-09T04:10:23.000Z
2021-11-08T02:10:42.000Z
python_modules/libraries/dagster-pagerduty/dagster_pagerduty/__init__.py
shahvineet98/dagster
2471d39c52f660e23e8c0d8e8ded873ddc3df036
[ "Apache-2.0" ]
2
2021-05-11T13:36:27.000Z
2021-09-03T01:53:11.000Z
python_modules/libraries/dagster-pagerduty/dagster_pagerduty/__init__.py
shahvineet98/dagster
2471d39c52f660e23e8c0d8e8ded873ddc3df036
[ "Apache-2.0" ]
1
2021-02-21T12:16:47.000Z
2021-02-21T12:16:47.000Z
from .resources import pagerduty_resource from .version import __version__ __all__ = ['pagerduty_resource']
21.8
41
0.825688
12
109
6.666667
0.583333
0.425
0
0
0
0
0
0
0
0
0
0
0.110092
109
4
42
27.25
0.824742
0
0
0
0
0
0.165138
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
69a343a68ab51ebe3b2c14b9f6bfcdc44e67b061
281
py
Python
rl_games/envs/__init__.py
Denys88/dqn_atari
7f259a6436f396274c9931d0bd7004cee2ecabfa
[ "MIT" ]
null
null
null
rl_games/envs/__init__.py
Denys88/dqn_atari
7f259a6436f396274c9931d0bd7004cee2ecabfa
[ "MIT" ]
null
null
null
rl_games/envs/__init__.py
Denys88/dqn_atari
7f259a6436f396274c9931d0bd7004cee2ecabfa
[ "MIT" ]
null
null
null
from rl_games.envs.connect4_network import ConnectBuilder from rl_games.envs.test_network import TestNetBuilder from rl_games.algos_torch import model_builder model_builder.register_network('connect4net', ConnectBuilder) model_builder.register_network('testnet', TestNetBuilder)
35.125
61
0.875445
36
281
6.527778
0.472222
0.076596
0.140426
0.12766
0
0
0
0
0
0
0
0.007605
0.064057
281
8
62
35.125
0.885932
0
0
0
0
0
0.064286
0
0
0
0
0
0
1
0
true
0
0.6
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
69b2c1c4e32d7180d38740d2b19fe953098e2cf9
294
py
Python
tasks/__init__.py
radeklat/sparkql
57d55c7599460f2e7e5957c037d7c25cedb92647
[ "MIT" ]
6
2020-02-12T14:01:08.000Z
2020-05-29T05:35:09.000Z
tasks/__init__.py
radeklat/sparkql
57d55c7599460f2e7e5957c037d7c25cedb92647
[ "MIT" ]
31
2020-02-09T18:52:52.000Z
2020-05-15T16:03:32.000Z
tasks/__init__.py
radeklat/sparkql
57d55c7599460f2e7e5957c037d7c25cedb92647
[ "MIT" ]
1
2020-04-03T19:23:08.000Z
2020-04-03T19:23:08.000Z
from tasks.formatting import reformat from tasks.testing import test from tasks.linting import lint from tasks.releasing import prepare_release, find_releasable_changes from tasks.typechecking import typecheck def verify_all(): """Run all checks.""" lint() typecheck() test()
22.615385
68
0.765306
38
294
5.815789
0.578947
0.20362
0
0
0
0
0
0
0
0
0
0
0.159864
294
12
69
24.5
0.894737
0.05102
0
0
0
0
0
0
0
0
0
0
0
1
0.111111
true
0
0.555556
0
0.666667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
69c10c9cf016d98054b677fc8e341f936f86be8d
35,837
py
Python
pytests/gsi/planner_gsi.py
sumedhpb/testrunner
9ff887231c75571624abc31a3fb5248110e01203
[ "Apache-2.0" ]
14
2015-02-06T02:47:57.000Z
2020-03-14T15:06:05.000Z
pytests/gsi/planner_gsi.py
sumedhpb/testrunner
9ff887231c75571624abc31a3fb5248110e01203
[ "Apache-2.0" ]
3
2019-02-27T19:29:11.000Z
2021-06-02T02:14:27.000Z
pytests/gsi/planner_gsi.py
sumedhpb/testrunner
9ff887231c75571624abc31a3fb5248110e01203
[ "Apache-2.0" ]
108
2015-03-26T08:58:49.000Z
2022-03-21T05:21:39.000Z
"""planner_gsi.py: description __author__ = "Hemant Rajput" __maintainer = "Hemant Rajput" __email__ = "Hemant.Rajput@couchbase.com" __git_user__ = "hrajput89" __created_on__ = "04/08/21 11:26 am" """ import json import os import random from couchbase_helper.query_definitions import QueryDefinition from membase.api.rest_client import RestHelper, RestConnection from .base_gsi import BaseSecondaryIndexingTests from remote.remote_util import RemoteMachineShellConnection from gsi.collections_concurrent_indexes import powerset class PlannerGSI(BaseSecondaryIndexingTests): def setUp(self): super(PlannerGSI, self).setUp() self.log.info("============== PlannerGSI setup has started ==============") self.enable_gsi_planner = self.input.param("enable_gsi_planner", True) if not self.enable_gsi_planner: self.index_rest.set_index_settings("indexer.planner.useGreedyPlanner", False) self.bucket_params = self._create_bucket_params(server=self.master, size=self.bucket_size, replicas=self.num_replicas, bucket_type=self.bucket_type, enable_replica_index=self.enable_replica_index, eviction_policy=self.eviction_policy, lww=self.lww) self.cluster.create_standard_bucket(name=self.test_bucket, port=11222, bucket_params=self.bucket_params) self.buckets = self.rest.get_buckets() self.new_index_nodes = self.input.param('new_index_nodes', 1) self.new_indexes = self.input.param('new_indexes', 1) self.initial_index_num = self.input.param('initial_index_num', 1) self.num_indexes_to_delete = self.input.param('num_indexes_to_delete', 0) self.index_field_set = powerset(['age', 'city', 'country', 'title', 'firstName', 'lastName', 'streetAddress', 'suffix', 'filler1', 'phone', 'zipcode']) self.log.info("============== PlannerGSI setup has completed ==============") def tearDown(self): self.log.info("============== PlannerGSI tearDown has started ==============") super(PlannerGSI, self).tearDown() self.log.info("============== PlannerGSI tearDown has completed ==============") def suite_tearDown(self): pass def suite_setUp(self): pass def _find_least_loaded_index_node(self, count=1): index_node = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True)[0] remote = RemoteMachineShellConnection(index_node) output_file = '/tmp/index_plan.log' dest_file = 'index_plan.log' del_cmd = f'rm -rf {output_file}' self.log.info("Deleting index_plan.log from Remote host") remote.execute_command(del_cmd) cmd = f'/opt/couchbase/bin/cbindexplan -command=retrieve -cluster="127.0.0.1:8091" ' \ f'-username="Administrator" -password="password" -getUsage -numNewReplica {count}' \ f' -output {output_file}' remote.execute_command(cmd) if os.path.exists(dest_file): self.log.info("Deleting index_plan.log from slave") os.remove(dest_file) remote.copy_file_remote_to_local(rem_path=output_file, des_path=dest_file) # cleaning file on remote host self.log.info("Deleting index_plan.log from Remote host after copying") remote.execute_command(del_cmd) if os.path.exists(dest_file): fh = open(dest_file) json_obj = json.load(fh) index_loads = {} for index in json_obj["placement"]: curr_index_load = index['usageRatio'] index_node = index['nodeId'].split(':')[0] index_loads[index_node] = curr_index_load sorted_indexer_nodes = sorted(index_loads.items(), key=lambda x: x[1]) ll_nodes_list = [] for item in range(count): ll_nodes_list.append(sorted_indexer_nodes[item][0]) return ll_nodes_list self.fail("Couldn't copy cbindexplan output to local directory") def test_new_index_placement_by_greedy_planner(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) < 3: self.fail("Need at least 3 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_gen = QueryDefinition(index_name='idx', index_fields=['age', 'country', 'city']) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() least_loaded_node = self._find_least_loaded_index_node()[0] # creating new index and checking where does planner place it idx_new = 'idx_new' new_index_gen = QueryDefinition(index_name=idx_new, index_fields=['age']) query = new_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build) self.run_cbq_query(query=query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() index_metadata = self.index_rest.get_indexer_metadata()['status'] for index in index_metadata: if index['indexName'] == idx_new: host = index['hosts'][0].split(':')[0] self.assertEqual(host, least_loaded_node, "Index not created on Least Loaded Index node") break else: self.fail("new Index stats not available in index_metadata") def test_placement_for_new_index_with_replica(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) < 3: self.fail("Need at least 3 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_gen = QueryDefinition(index_name='idx', index_fields=['age', 'country', 'city']) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() index_metadata = self.index_rest.get_indexer_metadata()['status'] self.log.info(index_metadata) meta_index_gen = QueryDefinition(index_name='meta_idx', index_fields=['meta().id']) query = meta_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = meta_index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() least_loaded_nodes = self._find_least_loaded_index_node(count=2) # creating new index and checking where does planner place it idx_new = 'idx_new' new_index_gen = QueryDefinition(index_name=idx_new, index_fields=['age']) query = new_index_gen.generate_index_create_query(namespace=collection_namespace, num_replica=self.num_replicas, defer_build=self.defer_build) self.run_cbq_query(query=query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() index_metadata = self.index_rest.get_indexer_metadata()['status'] for index in index_metadata: if index['indexName'] == idx_new: host = index['hosts'][0].split(':')[0] self.assertTrue(host in least_loaded_nodes, "Index not created on Least Loaded Index node") def test_index_placement_on_new_indexer_node(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) < 2: self.fail("Need at least 3 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_gen = QueryDefinition(index_name='idx', index_fields=['age', 'country', 'city']) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) meta_index_gen = QueryDefinition(index_name='meta_idx', index_fields=['meta().id']) query = meta_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = meta_index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() # Adding in a new Index node add_nodes = self.servers[self.nodes_init:self.nodes_init + self.new_index_nodes] add_node_services = ['index' for _ in range(len(add_nodes))] rebalance_task = self.cluster.async_rebalance(servers=self.servers[:self.nodes_init], to_add=add_nodes, to_remove=[], services=add_node_services) rebalance_task.result() rebalance_status = RestHelper(self.rest).rebalance_reached() self.assertTrue(rebalance_status, "rebalance failed, stuck or did not complete") for item in range(self.new_indexes): least_loaded_node = self._find_least_loaded_index_node(count=2) # creating new index and checking where does planner place it idx_new = f'idx_new_{item}' index_fields = self.index_field_set[item % len(self.index_field_set)] new_index_gen = QueryDefinition(index_name=idx_new, index_fields=index_fields) query = new_index_gen.generate_index_create_query(namespace=collection_namespace, num_replica=self.num_replicas, defer_build=self.defer_build) self.run_cbq_query(query=query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(30) index_metadata = self.index_rest.get_indexer_metadata()['status'] for index in index_metadata: if index['indexName'] == idx_new: host = index['hosts'][0].split(':')[0] self.assertTrue(host in least_loaded_node, "Index not created on Least Loaded Index node") def test_index_placements_with_server_group(self): self._create_server_groups() rebalance_task = self.cluster.async_rebalance(servers=self.servers[:self.nodes_init], to_add=[], to_remove=[], services=[]) rebalance_task.result() rebalance_status = RestHelper(self.rest).rebalance_reached() self.assertTrue(rebalance_status, "rebalance failed, stuck or did not complete") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_gen = QueryDefinition(index_name='idx', index_fields=['age', 'country', 'city']) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) meta_index_gen = QueryDefinition(index_name='meta_idx', index_fields=['meta().id']) query = meta_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = meta_index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() indexer_metadata = self.index_rest.get_indexer_metadata()['status'] index_hosts_dict = {} for index in indexer_metadata: index_name = index['indexName'] host = index['hosts'][0].split(':')[0] if index_name in index_hosts_dict: index_hosts_dict[index_name].append(host) else: index_hosts_dict[index_name] = [host] zoneinfo = self.rest.get_all_zones_info()['groups'] groupinfo = {} for zone in zoneinfo: if zone['nodes']: name = zone['name'] groupinfo[name] = [] for node in zone['nodes']: host = node['hostname'].split(':')[0] groupinfo[name].append(host) group_1, group_2 = [groupinfo[item] for item in groupinfo] for index in index_hosts_dict: hosts = index_hosts_dict[index] if hosts[0] in group_1 and hosts[1] in group_2: self.log.info("Replicas are created on different server group") elif hosts[0] in group_2 and hosts[1] in group_1: self.log.info("Replicas are created on different server group") else: self.fail("Replicas are created on the same server group") def test_index_placements_with_skewed_server_group(self): self._create_server_groups() rebalance_task = self.cluster.async_rebalance(servers=self.servers[:self.nodes_init], to_add=[], to_remove=[], services=[]) rebalance_task.result() rebalance_status = RestHelper(self.rest).rebalance_reached() self.assertTrue(rebalance_status, "rebalance failed, stuck or did not complete") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_gen = QueryDefinition(index_name='idx', index_fields=['age', 'country', 'city']) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) meta_index_gen = QueryDefinition(index_name='meta_idx', index_fields=['meta().id']) query = meta_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = meta_index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() indexer_metadata = self.index_rest.get_indexer_metadata()['status'] index_hosts_dict = {} for index in indexer_metadata: index_name = index['indexName'] host = index['hosts'][0].split(':')[0] if index_name in index_hosts_dict: index_hosts_dict[index_name].append(host) else: index_hosts_dict[index_name] = [host] hosts = [server.ip for server in self.servers[:self.nodes_init]] for index in index_hosts_dict: index_hosts = sorted(index_hosts_dict[index]) self.assertEqual(hosts, index_hosts, "Index Placement is not matching expected value") def test_index_placements_with_skewed_load(self): is_new_index_equivalent = self.input.param("is_new_index_equivalent", False) index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) < 3: self.fail("Need at least 3 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] try: for item, index_fields in zip(range(self.initial_index_num), self.index_field_set): index_gen = QueryDefinition(index_name=f'idx_{item}', index_fields=list(index_fields)) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() except Exception as err: if 'Build Already In Progress' not in str(err): self.fail(err) self.sleep(10) indexer_metadata = self.index_rest.get_indexer_metadata()['status'] random_index_node = random.choice(index_nodes) random_index_node = random_index_node.ip # finding all indexes on selected index node indexes_to_be_dropped = {} for item in indexer_metadata: host = item['hosts'][0].split(':')[0] if random_index_node == host: indexes_to_be_dropped[item['indexName']] = item['replicaId'] # Running alter index with drop_replica action for item in indexes_to_be_dropped: query = f'ALTER INDEX {item} ON {collection_namespace} WITH' \ f' {{"action": "drop_replica", "replicaId": {indexes_to_be_dropped[item]}}};' self.run_cbq_query(query=query) self.sleep(60, "Giving some time for clean-up after replicas drop") least_loaded_node = self._find_least_loaded_index_node()[0] self.assertEqual(random_index_node, least_loaded_node) # Now adding indexes on a skewed cluster if is_new_index_equivalent: new_index_set = self.index_field_set else: new_index_set = self.index_field_set[self.initial_index_num:] for item, index_fields in zip(range(len(indexes_to_be_dropped)), new_index_set): index_fields = [f"`{field}`" for field in index_fields] least_loaded_node = self._find_least_loaded_index_node(count=self.num_replicas+1) index_name = f'new_idx_{item}' new_index_gen = QueryDefinition(index_name=index_name, index_fields=list(index_fields)) query = new_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) if self.defer_build: build_query = new_index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() indexer_metadata = self.index_rest.get_indexer_metadata()['status'] index_hosts = [] for index in indexer_metadata: if index_name == index['indexName']: index_host = index['hosts'][0].split(':')[0] index_hosts.append(index_host) if not set(index_hosts) == set(least_loaded_node): node_miss = list(set(least_loaded_node) - set(index_hosts))[0] if is_new_index_equivalent: # Checking if node miss has equivalent index equivalent_index_found = False for index in indexer_metadata: fields = index['secExprs'] host = index['hosts'][0].split(':')[0] if list(index_fields) == fields and host == node_miss: equivalent_index_found = True break if not equivalent_index_found: self.fail("No equivalent index on least loaded node but Index was not created on it.") else: self.fail("Index replica was not placed on least loaded node") def test_index_placement_scaled_up_with_deferred_indexes(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) < 3: self.fail("Need at least 3 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_names = [] for item in range(self.initial_index_num): index_fields = self.index_field_set[item % len(self.index_field_set)] index_name = f'idx_{item}' index_names.append(index_name) index_gen = QueryDefinition(index_name=index_name, index_fields=list(index_fields)) defer_build = random.choice([True, False]) try: query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=defer_build, num_replica=self.num_replicas) self.run_cbq_query(query) except Exception as err: if 'Build Already In Progress.' not in str(err): self.fail(err) self.sleep(10) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online(defer_build=True) self.sleep(10) # dropping some random indexes indexes_to_delete = [] out = int(self.num_indexes_to_delete/self.num_replicas) for item in range(out): idx = random.choice(index_names) indexes_to_delete.append(idx) drop_idx_query = f"DROP index {idx} on {collection_namespace}" self.run_cbq_query(drop_idx_query) index_names.remove(idx) self.wait_until_indexes_online(defer_build=True) # creating new indexes to check distribution hits = 0 miss = 0 for item in range(self.new_indexes): least_loaded_node = self._find_least_loaded_index_node(count=2) # creating new index and checking where does planner place it idx_new = f'idx_new_{item}' index_fields = self.index_field_set[self.initial_index_num + item % len(self.index_field_set)] defer_build = random.choice([True, False]) new_index_gen = QueryDefinition(index_name=idx_new, index_fields=index_fields) query = new_index_gen.generate_index_create_query(namespace=collection_namespace, num_replica=self.num_replicas, defer_build=defer_build) self.run_cbq_query(query=query) self.wait_until_indexes_online(defer_build=True) index_metadata = self.index_rest.get_indexer_metadata()['status'] for index in index_metadata: if index['indexName'] == idx_new: host = index['hosts'][0].split(':')[0] # self.assertTrue(host in least_loaded_node, "Index not created on Least Loaded Index node") if host in least_loaded_node: hits += 1 else: miss += 1 self.log.info(f"Least loaded nodes were: {least_loaded_node}") self.log.info(f"Index not created on Least Loaded Index node. index created on host {host}") self.log.info(f"Hits: {hits}") self.log.info(f"Miss: {miss}") hits_percentage = hits / (hits + miss) * 100 self.assertTrue(hits_percentage > 90, "Hits percentage is lower than 80%") def test_index_placement_with_existing_partitioned_index(self): self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] for item in range(self.initial_index_num): index_fields = list(self.index_field_set[item % len(self.index_field_set)]) index_gen = QueryDefinition(index_name=f'idx_{item}', index_fields=index_fields, partition_by_fields=index_fields) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build, num_partition=12) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) # Creating non-partitioned indexes now for item in range(self.new_indexes): least_loaded_node = self._find_least_loaded_index_node()[0] index_fields = list(self.index_field_set[self.initial_index_num + item % len(self.index_field_set)]) idx_new = f'new_idx_{item}' index_gen = QueryDefinition(index_name=idx_new, index_fields=index_fields) query = index_gen.generate_index_create_query(namespace=collection_namespace) self.run_cbq_query(query) self.wait_until_indexes_online() self.sleep(30) index_metadata = self.index_rest.get_indexer_metadata()['status'] for index in index_metadata: if index['indexName'] == idx_new: host = index['hosts'][0].split(':')[0] self.assertTrue(host in least_loaded_node, "Index not created on Least Loaded Index node") def test_index_placement_with_deferred_indexes(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) != 2: self.fail("Need 2 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] for item in range(self.initial_index_num): index_fields = list(self.index_field_set[item % len(self.index_field_set)]) index_gen = QueryDefinition(index_name=f'idx_{item}', index_fields=index_fields) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) # creating a deferred index index_fields = list(self.index_field_set[self.initial_index_num % len(self.index_field_set)]) deferred_idx = 'deferred_idx' index_gen = QueryDefinition(index_name=deferred_idx, index_fields=index_fields) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=True) self.run_cbq_query(query) self.wait_until_indexes_online() indexer_metadata = self.index_rest.get_indexer_metadata()['status'] deferred_index_host = None for index in indexer_metadata: if index['indexName'] == deferred_idx: deferred_index_host = index['hosts'][0].split(':')[0] break # create another index and check index placement least_loaded_node = self._find_least_loaded_index_node() new_idx = 'new_idx' meta_index_gen = QueryDefinition(index_name=new_idx, index_fields=['meta_idx']) query = meta_index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build) self.run_cbq_query(query) self.assertTrue(deferred_index_host != least_loaded_node) indexer_metadata = self.index_rest.get_indexer_metadata()['status'] for index in indexer_metadata: if index['indexName'] == new_idx: host = index['hosts'][0].split(':')[0] self.assertTrue(host in least_loaded_node, "Index not created on Least Loaded Index node") def test_index_placement_with_partitioned_deferred_index(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) != 2: self.fail("Need 2 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] index_gen = QueryDefinition(index_name='idx', index_fields=['age', 'city', 'country']) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) # creating a deferred index index_fields = ['age', 'country', 'name'] deferred_idx = 'deferred_idx' index_gen = QueryDefinition(index_name=deferred_idx, index_fields=index_fields, partition_by_fields=index_fields) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=True, num_partition=5) self.run_cbq_query(query) self.wait_until_indexes_online(defer_build=True) indexer_metadata = self.index_rest.get_indexer_metadata()['status'] partitioned_loaded_node = None num_partition_on_node = 0 for index in indexer_metadata: if index['partitioned']: for node in index['partitionMap']: if num_partition_on_node < len(index['partitionMap'][node]): num_partition_on_node = len(index['partitionMap'][node]) partitioned_loaded_node = node.split(':')[0] break self.sleep(30) # create another index and check index placement least_loaded_node = self._find_least_loaded_index_node()[0] new_idx = 'new_idx' meta_index_gen = QueryDefinition(index_name=new_idx, index_fields=['meta_idx']) query = meta_index_gen.generate_index_create_query(namespace=collection_namespace) self.run_cbq_query(query) self.assertTrue(partitioned_loaded_node != least_loaded_node) indexer_metadata = self.index_rest.get_indexer_metadata()['status'] for index in indexer_metadata: if index['indexName'] == new_idx: host = index['hosts'][0].split(':')[0] self.assertTrue(host in least_loaded_node, "Index not created on Least Loaded Index node") def test_index_placement_for_equivalent_indexes(self): index_nodes = self.get_nodes_from_services_map(service_type="index", get_all_nodes=True) if len(index_nodes) != 3: self.fail("Need 3 index nodes") self.prepare_collection_for_indexing(num_of_docs_per_collection=self.num_of_docs_per_collection) collection_namespace = self.namespaces[0] for item in range(self.initial_index_num): index_fields = list(self.index_field_set[item % len(self.index_field_set)]) index_gen = QueryDefinition(index_name=f'idx_{item}', index_fields=index_fields) query = index_gen.generate_index_create_query(namespace=collection_namespace, defer_build=self.defer_build) self.run_cbq_query(query) if self.defer_build: build_query = index_gen.generate_build_query(namespace=collection_namespace) self.run_cbq_query(build_query) self.wait_until_indexes_online() self.sleep(10) least_loaded_node = self._find_least_loaded_index_node() indexer_meta_data = self.index_rest.get_indexer_metadata()['status'] equivalent_index_field = None for index in indexer_meta_data: host = index['hosts'][0].split(':')[0] if host in least_loaded_node: equivalent_index_field = index['secExprs'][0] break # creating equivalent index with one replica, so that index replicas be place on other than least loaded node new_idx = "new_idx" new_index_gen = QueryDefinition(index_name=new_idx, index_fields=[equivalent_index_field]) query = new_index_gen.generate_index_create_query(namespace=collection_namespace, num_replica=self.num_replicas) self.run_cbq_query(query=query) self.wait_until_indexes_online() indexer_meta_data = self.index_rest.get_indexer_metadata()['status'] for index in indexer_meta_data: if index['indexName'] == new_idx: host = index['hosts'][0].split(':')[0] self.assertTrue(host not in least_loaded_node, "Equivalent index replica created on least loaded node, not maintaining HA")
53.890226
120
0.642883
4,343
35,837
4.944508
0.076905
0.026451
0.021887
0.03283
0.772795
0.750396
0.725063
0.708485
0.696936
0.685154
0
0.006072
0.269303
35,837
664
121
53.971386
0.813985
0.031113
0
0.640071
0
0.001773
0.09148
0.006687
0
0
0
0
0.026596
1
0.028369
false
0.005319
0.014184
0
0.046099
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
69ce01a02b2fa73da586c622950504f9adcca93e
98
py
Python
src/scope_control/__init__.py
an-oreo/Scope_Control
c585394c212430b726f4e065928a9f1a954d42a2
[ "MIT" ]
null
null
null
src/scope_control/__init__.py
an-oreo/Scope_Control
c585394c212430b726f4e065928a9f1a954d42a2
[ "MIT" ]
null
null
null
src/scope_control/__init__.py
an-oreo/Scope_Control
c585394c212430b726f4e065928a9f1a954d42a2
[ "MIT" ]
null
null
null
__version_info = (0,0,1) __version__ = '.'.join(map(str,__version_info)) from .tek_scope import *
24.5
47
0.72449
15
98
4
0.733333
0.366667
0
0
0
0
0
0
0
0
0
0.034091
0.102041
98
4
48
24.5
0.647727
0
0
0
0
0
0.010101
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
69d03de5dd0d43e3dd6be25b7c4ab4b6c5d15670
117
py
Python
pywb/main.py
karenhanson/autoscalar
40010721a62de1e59a8bfaf6273c47d8977e1f4c
[ "Apache-2.0" ]
4
2018-08-29T07:00:27.000Z
2019-05-23T18:55:30.000Z
pywb/main.py
karenhanson/autoscalar
40010721a62de1e59a8bfaf6273c47d8977e1f4c
[ "Apache-2.0" ]
null
null
null
pywb/main.py
karenhanson/autoscalar
40010721a62de1e59a8bfaf6273c47d8977e1f4c
[ "Apache-2.0" ]
2
2020-06-24T15:08:59.000Z
2021-05-14T11:08:59.000Z
from gevent.monkey import patch_all; patch_all() from dynproxyapp import DynProxyPywb application = DynProxyPywb()
19.5
48
0.820513
14
117
6.714286
0.642857
0.170213
0
0
0
0
0
0
0
0
0
0
0.119658
117
5
49
23.4
0.912621
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
69e20f89b12a5c9657f9f9e6b5e63f719bee93bc
332
py
Python
eqnsolve/eqn3.py
nickinack/Genesis
1e3c76460855001ec626eaf6e4e2f63e7acfe2ae
[ "MIT" ]
3
2020-12-07T13:30:49.000Z
2020-12-07T13:33:36.000Z
eqnsolve/eqn3.py
nickinack/Library
1e3c76460855001ec626eaf6e4e2f63e7acfe2ae
[ "MIT" ]
4
2020-10-18T04:26:43.000Z
2020-11-16T06:06:07.000Z
eqnsolve/eqn3.py
nickinack/Library
1e3c76460855001ec626eaf6e4e2f63e7acfe2ae
[ "MIT" ]
5
2020-10-07T17:11:27.000Z
2020-11-23T19:00:58.000Z
import numpy as np var = 1 def eqnfit(chromosome): ''' Equation 1: x^143 + 2x^324 - x^13 = 10362 ''' eqn = chromosome[0]**143 + 2*chromosome[0]**324 - chromosome[0]**13 val = 10362 return (1/(eqn-val)) def value(chromosome): return chromosome[0]**143 + 2*chromosome[0]**324 - chromosome[0]**13
19.529412
75
0.590361
49
332
4
0.44898
0.336735
0.142857
0.153061
0.428571
0.428571
0.428571
0.428571
0.428571
0.428571
0
0.181102
0.23494
332
17
75
19.529412
0.590551
0.123494
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.125
0.125
0.625
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
69e694ffa16986e83e90c22303ef591d104b689e
52
py
Python
caluma/extensions/permissions.py
czosel/caluma
4a3e81b2000961ab934bfc1c6840ec00f0ba2c19
[ "MIT" ]
null
null
null
caluma/extensions/permissions.py
czosel/caluma
4a3e81b2000961ab934bfc1c6840ec00f0ba2c19
[ "MIT" ]
1
2020-07-11T01:07:44.000Z
2020-07-11T01:07:44.000Z
caluma/extensions/permissions.py
czosel/caluma
4a3e81b2000961ab934bfc1c6840ec00f0ba2c19
[ "MIT" ]
null
null
null
# To be overwritten for permission extensions point
26
51
0.826923
7
52
6.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.153846
52
1
52
52
0.977273
0.942308
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
69ead2c83cca25ec2d4df29e983c0bbf117d4093
197
py
Python
src/apps/domains/hello_world/views.py
antiline/bootstrap-django
63fe07fbecc7634aec5dab1ddfd0cc10a7a8543e
[ "MIT" ]
null
null
null
src/apps/domains/hello_world/views.py
antiline/bootstrap-django
63fe07fbecc7634aec5dab1ddfd0cc10a7a8543e
[ "MIT" ]
12
2018-12-02T16:19:45.000Z
2021-06-10T17:52:26.000Z
src/apps/domains/hello_world/views.py
antiline/bootstrap-django
63fe07fbecc7634aec5dab1ddfd0cc10a7a8543e
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.views import View class HelloWorldView(View): @staticmethod def get(request): return render(request, 'hello_world/index.html', {})
21.888889
60
0.725888
24
197
5.916667
0.75
0.140845
0
0
0
0
0
0
0
0
0
0
0.177665
197
8
61
24.625
0.876543
0
0
0
0
0
0.111675
0.111675
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.833333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
0
0
0
4
69f40b4d71d63aa21756a9ada2b5efd1836e35ab
460
py
Python
ws.py
pombredanne/hg.mozilla.org-users-gerv_mozilla.org-relic
cc13a0f44c2bca1cfa7bf10a8bea90beff529ca0
[ "MIT" ]
null
null
null
ws.py
pombredanne/hg.mozilla.org-users-gerv_mozilla.org-relic
cc13a0f44c2bca1cfa7bf10a8bea90beff529ca0
[ "MIT" ]
null
null
null
ws.py
pombredanne/hg.mozilla.org-users-gerv_mozilla.org-relic
cc13a0f44c2bca1cfa7bf10a8bea90beff529ca0
[ "MIT" ]
null
null
null
############################################################################### # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. ############################################################################### import re def collapse(line): # Collapse whitespace return re.sub("\s+", " ", line).strip()
41.818182
79
0.45
53
460
3.90566
0.792453
0.048309
0
0
0
0
0
0
0
0
0
0.01
0.130435
460
10
80
46
0.5075
0.46087
0
0
0
0
0.047059
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
69f887bad4d7b87519a7e0c51737ead31fcff8d6
167
py
Python
dash_archer/_imports_.py
iqqmuT/dash-archer
c9bff305975df2306f73967156b568e14b32f716
[ "MIT" ]
null
null
null
dash_archer/_imports_.py
iqqmuT/dash-archer
c9bff305975df2306f73967156b568e14b32f716
[ "MIT" ]
6
2020-07-07T20:49:14.000Z
2022-02-13T01:58:54.000Z
dash_archer/_imports_.py
iqqmuT/dash-archer
c9bff305975df2306f73967156b568e14b32f716
[ "MIT" ]
null
null
null
from .DashArcherContainer import DashArcherContainer from .DashArcherElement import DashArcherElement __all__ = [ "DashArcherContainer", "DashArcherElement" ]
23.857143
52
0.808383
11
167
11.909091
0.454545
0
0
0
0
0
0
0
0
0
0
0
0.131737
167
7
53
23.857143
0.903448
0
0
0
0
0
0.214286
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
385e987605f7f124c16fbee8e0758a6395b5cbef
6,648
py
Python
knights-ai/pawn.py
DavidSolomon22/knights-ai
f1edeeadac1f2d025e005dc204fd7bcf3889b0a7
[ "MIT" ]
null
null
null
knights-ai/pawn.py
DavidSolomon22/knights-ai
f1edeeadac1f2d025e005dc204fd7bcf3889b0a7
[ "MIT" ]
null
null
null
knights-ai/pawn.py
DavidSolomon22/knights-ai
f1edeeadac1f2d025e005dc204fd7bcf3889b0a7
[ "MIT" ]
1
2021-04-04T11:50:54.000Z
2021-04-04T11:50:54.000Z
import pygame class Pawn(pygame.sprite.Sprite): def __init__(self, image): super().__init__() if image == "white": self.defaultPawnImage = pygame.image.load('resources/white_pawn.png') self.chosenPawnImage = pygame.image.load('resources/chosen_pawn.png') self.color = image else: self.defaultPawnImage = pygame.image.load('resources/black_pawn.png') self.chosenPawnImage = pygame.image.load('resources/chosen_pawn.png') self.color = image self.image = self.defaultPawnImage self.rect = self.image.get_rect() def set_pawn_position(self, mx, my): self.rect.x = mx self.rect.y = my def pawn_selected(self): self.image = self.chosenPawnImage def pawn_unselected(self): self.image = self.defaultPawnImage def get_tile_center_x(self): return int(self.rect.x + (self.image.get_width()) / 2) def get_tile_center_y(self): return int(self.rect.y + (self.image.get_height()) / 2) def check_if_pawn_is_moving_to_the_nearest_tile(self, newPawnTile: pygame.sprite.Sprite): distanceX = abs(newPawnTile.get_tile_center_x() - self.get_tile_center_x()) distanceY = abs(newPawnTile.get_tile_center_y() - self.get_tile_center_y()) if (distanceX == newPawnTile.image.get_width()) ^ (distanceY == newPawnTile.image.get_height()): if (distanceX > newPawnTile.image.get_width()) or (distanceY > newPawnTile.image.get_height()): return False return True else: return False def check_if_pawn_is_jumping_over(self, newPawnTile: pygame.sprite.Sprite): distanceX = abs(newPawnTile.get_tile_center_x() - self.get_tile_center_x()) distanceY = abs(newPawnTile.get_tile_center_y() - self.get_tile_center_y()) if (distanceX == (newPawnTile.image.get_width() * 2)) ^ (distanceY == (newPawnTile.image.get_height() * 2)): if (distanceX == newPawnTile.image.get_width()) or (distanceY == newPawnTile.image.get_height()): return False return True else: return False def double_jump(self, newPawnTile: pygame.sprite.Sprite, pawnsSprintTable: pygame.sprite.Group): distanceX = newPawnTile.get_tile_center_x() - self.get_tile_center_x() distanceY = newPawnTile.get_tile_center_y() - self.get_tile_center_y() if distanceX != 0: if distanceX > 0: pawnInBettwenX = newPawnTile.get_tile_center_x_for_drawing_pawn(self) - newPawnTile.image.get_width() for pawn in pawnsSprintTable: if pawn.rect.x == pawnInBettwenX and pawn.rect.y == self.rect.y: return True else: pawnInBettwenX = newPawnTile.get_tile_center_x_for_drawing_pawn(self) + newPawnTile.image.get_width() for pawn in pawnsSprintTable: if pawn.rect.x == pawnInBettwenX and pawn.rect.y == self.rect.y: return True else: if distanceY > 0: pawnInBettwenY = newPawnTile.get_tile_center_y_for_drawing_pawn(self) - newPawnTile.image.get_height() for pawn in pawnsSprintTable: if pawn.rect.y == pawnInBettwenY and pawn.rect.x == self.rect.x: return True else: pawnInBettwenY = newPawnTile.get_tile_center_y_for_drawing_pawn(self) + newPawnTile.image.get_height() for pawn in pawnsSprintTable: if pawn.rect.y == pawnInBettwenY and pawn.rect.x == self.rect.x: return True def move_pawn(self, mx, my, chessTilesSprintTable: pygame.sprite.Group, pawnsSprintTable: pygame.sprite.Group, hasdouble_jumped, madeAMove): for tileSprite in chessTilesSprintTable: if tileSprite.rect.collidepoint((mx, my)): if self.check_if_pawn_is_moving_to_the_nearest_tile(tileSprite): if not madeAMove: if tileSprite.check_if_contains_pawn(pawnsSprintTable): self.set_pawn_position(tileSprite.get_tile_center_x_for_drawing_pawn(self), tileSprite.get_tile_center_y_for_drawing_pawn(self)) return False, True else: return False, False elif hasdouble_jumped: return True, True else: return False, True else: if self.check_if_pawn_is_jumping_over(tileSprite): if not hasdouble_jumped: if not madeAMove: if tileSprite.check_if_contains_pawn(pawnsSprintTable): if self.double_jump(tileSprite, pawnsSprintTable): self.set_pawn_position(tileSprite.get_tile_center_x_for_drawing_pawn(self), tileSprite.get_tile_center_y_for_drawing_pawn(self)) return True, True else: return False, False else: return False, False else: return False, True else: if tileSprite.check_if_contains_pawn(pawnsSprintTable): if self.double_jump(tileSprite, pawnsSprintTable): self.set_pawn_position(tileSprite.get_tile_center_x_for_drawing_pawn(self), tileSprite.get_tile_center_y_for_drawing_pawn(self)) return True, True else: return True, True else: return True, True else: if madeAMove: return True, True else: return False, False if madeAMove: if hasdouble_jumped: return True, True else: return False, True else: return False, False
48.173913
118
0.543923
671
6,648
5.117735
0.110283
0.048923
0.090856
0.048923
0.806057
0.755387
0.709959
0.67385
0.665114
0.644729
0
0.001702
0.381318
6,648
137
119
48.525547
0.833212
0
0
0.644628
0
0
0.015493
0.014741
0
0
0
0
0
1
0.082645
false
0
0.008264
0.016529
0.330579
0.099174
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
3871e4e16a49ebefe3b4f5f2ec6370321639b115
109
py
Python
tests/data/validate/whoops_forgot_this.py
balihoo/blambda
305e55a6c02ba99d2d6fa2086202442cfb4c1108
[ "MIT" ]
null
null
null
tests/data/validate/whoops_forgot_this.py
balihoo/blambda
305e55a6c02ba99d2d6fa2086202442cfb4c1108
[ "MIT" ]
2
2020-03-23T17:33:12.000Z
2021-06-29T10:32:35.000Z
tests/data/validate/whoops_forgot_this.py
balihoo/blambda
305e55a6c02ba99d2d6fa2086202442cfb4c1108
[ "MIT" ]
2
2017-10-03T21:40:35.000Z
2017-10-25T17:02:31.000Z
# File used for unit tests # 'blambda validate' should indicate that this file was missing from the manifest
36.333333
81
0.788991
17
109
5.058824
0.941176
0
0
0
0
0
0
0
0
0
0
0
0.174312
109
2
82
54.5
0.955556
0.954128
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
388175c6d585abc51eca36a5e416ed37094a5087
218
py
Python
settings.py
GoogleCloudPlatform/appengine-guestbook-python-cloudsql
c8dcc70f75bd6dbc37c779a8f5441fcd02bec859
[ "Apache-2.0" ]
7
2016-04-20T02:17:58.000Z
2021-10-09T18:44:04.000Z
settings.py
GoogleCloudPlatform/appengine-guestbook-python-cloudsql
c8dcc70f75bd6dbc37c779a8f5441fcd02bec859
[ "Apache-2.0" ]
null
null
null
settings.py
GoogleCloudPlatform/appengine-guestbook-python-cloudsql
c8dcc70f75bd6dbc37c779a8f5441fcd02bec859
[ "Apache-2.0" ]
8
2015-03-19T22:00:39.000Z
2021-10-09T18:43:55.000Z
"""Setting file for the Cloud SQL guestbook""" CLOUDSQL_INSTANCE = 'ReplaceWithYourInstanceName' DATABASE_NAME = 'guestbook' USER_NAME = 'ReplaceWithYourDatabaseUserName' PASSWORD = 'ReplaceWithYourDatabasePassword'
27.25
49
0.821101
18
218
9.777778
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.09633
218
7
50
31.142857
0.893401
0.183486
0
0
0
0
0.573099
0.520468
0
0
0
0
0
1
0
false
0.25
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
388447fa906ae6d27118b756ccd5dea8a9071ecd
8,508
py
Python
beward/__init__.py
Limych/py-beward
b05c1748a92ba2bd32409e10a62c4799f230dd70
[ "MIT" ]
4
2019-07-27T23:21:58.000Z
2021-03-06T19:21:42.000Z
beward/__init__.py
Limych/py-beward
b05c1748a92ba2bd32409e10a62c4799f230dd70
[ "MIT" ]
63
2019-09-13T11:36:35.000Z
2022-03-29T08:09:06.000Z
beward/__init__.py
Limych/py-beward
b05c1748a92ba2bd32409e10a62c4799f230dd70
[ "MIT" ]
2
2020-05-16T09:48:51.000Z
2022-02-17T21:47:35.000Z
# Copyright (c) 2019-2021, Andrey "Limych" Khrolenok <andrey@khrolenok.ru> # Creative Commons BY-NC-SA 4.0 International Public License # (see LICENSE.md or https://creativecommons.org/licenses/by-nc-sa/4.0/) """Python API for Beward Cameras and Doorbells.""" import logging import struct from collections import namedtuple import hexdump from _socket import ( AF_INET, SO_BROADCAST, SO_REUSEADDR, SOCK_DGRAM, SOL_SOCKET, inet_ntoa, socket, timeout, ) # Will be parsed by setup.py to determine package metadata from beward.camera import BewardCamera from beward.const import BEWARD_CAMERA, BEWARD_DOORBELL, STARTUP_MESSAGE from beward.core import BewardGeneric from beward.doorbell import BewardDoorbell # You really should not `import *` - it is poor practice # but if you do, here is what you get: __all__ = [ "Beward", "BewardGeneric", "BewardCamera", "BewardDoorbell", ] _LOGGER = logging.getLogger(__name__) # # http://docs.python.org/2/howto/logging.html#library-config # Avoids spurious error messages if no logger is configured by the user _LOGGER.addHandler(logging.NullHandler()) # _LOGGER.info(STARTUP_MESSAGE) # pylint: disable=too-few-public-methods class Beward: """Beward device factory class.""" @staticmethod def discovery(): # pragma: no cover """Discover Beward devices in local network.""" server = socket(AF_INET, SOCK_DGRAM) server.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) server.setsockopt(SOL_SOCKET, SO_BROADCAST, 1) server.bind(("0.0.0.0", 0)) server.settimeout(1) _LOGGER.debug("Start discovery") server.sendto( b"\x67\x45\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", ("255.255.255.255", 59123), ) devices = {} while True: try: data = server.recvfrom(1024) _LOGGER.debug( "Discovery response data:\n%s", hexdump.hexdump(data[0][28:], result="return"), ) ( # packet header (28 bytes): # "\x67\x45\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00" # "\x48\x02\x00\x00" (packet data length) = 584 # packet data (584 bytes): # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00" device_id, # "\x5f\x06\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03" # "\x21\x00\x00" name, # "\x49\x50\x43\x31\x37\x32\x34\x00\x00\x00\x00..." host_ip, # "\x5a\x01\xa8\xc0" mac, # "\x00\x5a\x22\x30\x07\x5f" http_port, # "\x50\x00" data_port, # "\x88\x13" # "\x00\x00" net_mask, # "\x00\xff\xff\xff" gate_ip, # "\x01\x01\xa8\xc0" # "\x01\x08\x37\xe0" # "\x01\x01\xa8\xc0" (gate_ip) # "\x88\x13" (data_port) # "\x00\x00\x01\x00\x00\x00" # "\x5a\x01\xa8\xc0" (host_ip) # "\x00\xff\xff\xff" "\x01\x01\xa8\xc0" (net_mask + gate_ip) # "\x88\x13" "\x50\x00" (data_port + http_port) # "\x01\x08\x37\xe0" # "\x88\x13" (data_port) # "\x00\x5a\x22\x30\x07\x5f" (mac) # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x02\x30\x75" # "\x50\x00" "\x88\x13" (http_port + data_port) # "\x00\x00" # "\x01\x01\xa8\xc0" (gate_ip = dns1_ip?) # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x70\x17\x37\x01\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00" # "\x08\x08\x08\x08" (dns2_ip) # "\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00" # "\x00\x00\x00\x00\x01\x00" # "\xa0\x01\xa8\xc0" (ip?) # "\x00\xff\xff\xff" "\x01\x01\xa8\xc0" (net_mask + gate_ip) # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x38\x71\x32\x4d" # "\x75\x49\x62\x6d\x7a\x32\x67\x66\x4c\x5a\x35\x70\x6d\x42\x54\x51" # "\x49\x69\x49\x77\x6f\x37\x63\x71\x6c\x4e\x64\x30" ) = struct.unpack("<45xL19x64sI6s2H2x2I", data[0][:156]) name = name.replace(b"\x00", b"").decode("utf-8") def _unpack_ip(ip_addr): return inet_ntoa(struct.pack(">I", ip_addr)) (host_ip, net_mask, gate_ip) = ( _unpack_ip(host_ip), _unpack_ip(net_mask), _unpack_ip(gate_ip), ) mac = ":".join("%02x" % i for i in mac) _LOGGER.info( "Discovered %s (ID: %d) at http://%s:%d", name, device_id, host_ip, http_port, ) if mac not in devices: dev = { "device_id": device_id, "name": name, "host_ip": host_ip, "http_port": http_port, "data_port": data_port, "mac": mac, "net_mask": net_mask, "gate_ip": gate_ip, } devices[mac] = namedtuple("BewardDevice", dev.keys())(*dev.values()) except Exception as err: # pylint: disable=broad-except if not isinstance(err, timeout): _LOGGER.debug(err) break _LOGGER.debug("Stop discovery") server.close() return devices @staticmethod def factory(host_ip: str, username: str, password: str, **kwargs): """Return correct class for device.""" bwd = BewardGeneric(host_ip, username, password) model = bwd.system_info.get("DeviceModel") dev_type = bwd.get_device_type(model) if dev_type is None: raise ValueError('Unknown device "%s"' % model) inst = None if dev_type == BEWARD_CAMERA: inst = BewardCamera(host_ip, username, password, **kwargs) elif dev_type == BEWARD_DOORBELL: inst = BewardDoorbell(host_ip, username, password, **kwargs) _LOGGER.debug("Factory create instance of %s", inst.__class__) return inst
42.328358
88
0.513752
1,114
8,508
3.830341
0.233393
0.589173
0.839466
1.079916
0.384111
0.344739
0.323647
0.318725
0.318725
0.318725
0
0.211542
0.336037
8,508
200
89
42.54
0.543813
0.399624
0
0.092593
0
0.018519
0.08837
0.022342
0
0
0
0
0
1
0.027778
false
0.037037
0.083333
0.009259
0.148148
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
389084961999b460bc9c9bc34c6ebbb9bcf77387
50
py
Python
tests/__init__.py
make-all/metlink-nz
175582c911b1cc0407b7ce3220ee112d437458c2
[ "Apache-2.0" ]
3
2021-06-15T18:32:34.000Z
2022-01-10T07:03:45.000Z
tests/__init__.py
make-all/metlink-nz
175582c911b1cc0407b7ce3220ee112d437458c2
[ "Apache-2.0" ]
2
2021-06-18T23:14:16.000Z
2021-06-19T20:18:52.000Z
tests/__init__.py
make-all/metlink-nz
175582c911b1cc0407b7ce3220ee112d437458c2
[ "Apache-2.0" ]
2
2021-11-23T06:17:28.000Z
2022-02-18T22:56:47.000Z
"""Tests for the metlink-nz custom component."""
25
49
0.7
7
50
5
1
0
0
0
0
0
0
0
0
0
0
0
0.14
50
1
50
50
0.813953
0.84
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
38c6344df3a9af0836fd999ca106eff8c2b91a31
59
py
Python
lang/Python/character-codes-1.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
lang/Python/character-codes-1.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
lang/Python/character-codes-1.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
print(ord('a')) # prints "97" print(chr(97)) # prints "a"
19.666667
29
0.576271
10
59
3.4
0.6
0
0
0
0
0
0
0
0
0
0
0.08
0.152542
59
2
30
29.5
0.6
0.372881
0
0
0
0
0.029412
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
2a03638b7e15a673b4745bc1287502cb93727148
195
py
Python
Client/ThreadWorker.py
liran121211/PyChat
f2643b87b588a3f81230425dfd65c253019130ae
[ "PostgreSQL", "Unlicense" ]
null
null
null
Client/ThreadWorker.py
liran121211/PyChat
f2643b87b588a3f81230425dfd65c253019130ae
[ "PostgreSQL", "Unlicense" ]
null
null
null
Client/ThreadWorker.py
liran121211/PyChat
f2643b87b588a3f81230425dfd65c253019130ae
[ "PostgreSQL", "Unlicense" ]
null
null
null
from PyQt5.QtCore import QThread, pyqtSignal class ThreadWorker(QThread): progress = pyqtSignal(int) finished = pyqtSignal(int) def run(self): while True: pass
17.727273
44
0.651282
21
195
6.047619
0.809524
0.204724
0
0
0
0
0
0
0
0
0
0.007042
0.271795
195
10
45
19.5
0.887324
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0.142857
0.142857
0
0.714286
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
1
0
0
4
2a6313d04653977780926cdfeaf5c52e61f64347
121
py
Python
scripts/RecreateSecrets.sample.py
agrc/recreate-web
f98e050e1d23d9863e1d437f173370ad2ffbc11c
[ "MIT" ]
2
2018-08-13T09:42:02.000Z
2021-11-09T20:35:32.000Z
scripts/RecreateSecrets.sample.py
agrc/recreate-web
f98e050e1d23d9863e1d437f173370ad2ffbc11c
[ "MIT" ]
34
2017-10-23T20:06:04.000Z
2018-03-12T19:15:19.000Z
scripts/RecreateSecrets.sample.py
agrc/recreate-web
f98e050e1d23d9863e1d437f173370ad2ffbc11c
[ "MIT" ]
null
null
null
TEST_SERVER_IP = '<ip address>' PROD_SERVER_IP = '<ip address of web server>' KDRIVE = '<path to ParksAndMonuments.gdb>'
30.25
45
0.735537
18
121
4.722222
0.666667
0.188235
0.235294
0.4
0
0
0
0
0
0
0
0
0.132231
121
3
46
40.333333
0.809524
0
0
0
0
0
0.570248
0.181818
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
2a8119e55ee88975e8bf8a5a7f25abd89d09926c
786
py
Python
resources/sublimetext/flower/commands/context.py
advancedwebdeveloper/flow9
26e19216495bcb948d00aaea6b5c190e30fc6e3f
[ "MIT" ]
583
2019-04-26T11:52:35.000Z
2022-02-22T17:53:19.000Z
resources/sublimetext/flower/commands/context.py
advancedwebdeveloper/flow9
26e19216495bcb948d00aaea6b5c190e30fc6e3f
[ "MIT" ]
279
2019-04-26T11:53:17.000Z
2022-02-21T13:35:08.000Z
resources/sublimetext/flower/commands/context.py
advancedwebdeveloper/flow9
26e19216495bcb948d00aaea6b5c190e30fc6e3f
[ "MIT" ]
44
2019-04-29T18:09:19.000Z
2021-12-23T16:06:05.000Z
import sublime import sublime_plugin from .utils import rootSplit, open_file, findConfig from ..pathutils import getName, isFlowFile class FlowerCopyImport(sublime_plugin.TextCommand): def run(self, edit): _, name = rootSplit(self.view.file_name()) main = getName(name) if main: sublime.set_clipboard("import {};".format(main)) def is_enabled(self): return isFlowFile(self.view.file_name()) class FlowerOpenConfig(sublime_plugin.TextCommand): def run(self, edit, configpath=None): configpath = configpath or findConfig(self.view.file_name()) if configpath: sublime.set_timeout_async(lambda f=configpath: open_file(f)) def is_enabled(self): return isFlowFile(self.view.file_name())
27.103448
72
0.694656
94
786
5.648936
0.393617
0.060264
0.090395
0.120527
0.323917
0.323917
0.323917
0.180791
0.180791
0.180791
0
0
0.203562
786
28
73
28.071429
0.848243
0
0
0.210526
0
0
0.012723
0
0
0
0
0
0
1
0.210526
false
0
0.315789
0.105263
0.736842
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
2a878f7e120ace28643ce9f32de705f31ad40343
102
py
Python
v3/Libraries/os/path/realpath/current script location.py
TheShellLand/python
a35e9b32bec3a3ff03d6f0f4c2c2cc891180e516
[ "MIT" ]
null
null
null
v3/Libraries/os/path/realpath/current script location.py
TheShellLand/python
a35e9b32bec3a3ff03d6f0f4c2c2cc891180e516
[ "MIT" ]
1
2021-06-01T22:50:19.000Z
2021-06-01T22:50:19.000Z
v3/Libraries/os/path/realpath/current script location.py
TheShellLand/python
a35e9b32bec3a3ff03d6f0f4c2c2cc891180e516
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf8 -*- import os os.path.split(os.path.realpath(__file__))[0]
14.571429
44
0.656863
16
102
3.9375
0.8125
0.190476
0
0
0
0
0
0
0
0
0
0.022222
0.117647
102
6
45
17
0.677778
0.401961
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
2a8b3d5352b5cfa83ef4adbe246a6da7a44cd6c1
1,281
py
Python
sqlmap_client/__init__.py
ddoroshevsky/sqlmap_python_client
ba105fbf67a9efcf631bc4883e004ccedf724704
[ "MIT" ]
null
null
null
sqlmap_client/__init__.py
ddoroshevsky/sqlmap_python_client
ba105fbf67a9efcf631bc4883e004ccedf724704
[ "MIT" ]
null
null
null
sqlmap_client/__init__.py
ddoroshevsky/sqlmap_python_client
ba105fbf67a9efcf631bc4883e004ccedf724704
[ "MIT" ]
null
null
null
# coding: utf-8 # flake8: noqa """ SQLMap Server API No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 The version of the OpenAPI document: 0.0.1 Generated by: https://openapi-generator.tech """ from __future__ import absolute_import __version__ = "0.0.1" # import apis into sdk package from sqlmap_client.api.default_api import DefaultApi # import ApiClient from sqlmap_client.api_client import ApiClient from sqlmap_client.configuration import Configuration from sqlmap_client.exceptions import OpenApiException from sqlmap_client.exceptions import ApiTypeError from sqlmap_client.exceptions import ApiValueError from sqlmap_client.exceptions import ApiKeyError from sqlmap_client.exceptions import ApiException # import models into sdk package from sqlmap_client.models.create_task_response import CreateTaskResponse from sqlmap_client.models.get_task_options_response import GetTaskOptionsResponse from sqlmap_client.models.standard_response import StandardResponse from sqlmap_client.models.task_log import TaskLog from sqlmap_client.models.task_options import TaskOptions from sqlmap_client.models.task_status import TaskStatus from sqlmap_client.models.task_url import TaskUrl
32.846154
124
0.84153
169
1,281
6.16568
0.372781
0.143954
0.230326
0.147793
0.370441
0.057582
0
0
0
0
0
0.009649
0.11007
1,281
38
125
33.710526
0.904386
0.259953
0
0
1
0
0.005441
0
0
0
0
0
0
1
0
false
0
0.941176
0
0.941176
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
2a945b634d87a1c705e1417014bd3e5f9721875b
182
py
Python
data/getSinaData.py
jettom/JtSpider
7e2cb32415ca5d439b117c0277a7f7b2b27fa0bf
[ "Apache-2.0" ]
1
2019-01-25T12:54:24.000Z
2019-01-25T12:54:24.000Z
data/getSinaData.py
jettom/JtSpider
7e2cb32415ca5d439b117c0277a7f7b2b27fa0bf
[ "Apache-2.0" ]
null
null
null
data/getSinaData.py
jettom/JtSpider
7e2cb32415ca5d439b117c0277a7f7b2b27fa0bf
[ "Apache-2.0" ]
1
2020-05-16T01:18:25.000Z
2020-05-16T01:18:25.000Z
# URLhttp://vip.stock.finance.sina.com.cn/corp/go.php/vMS_MarketHistory/stockid/000001/type/S.phtml?year=2015&jidu=3 # table id:FundHoldSharesTable # https://xueqiu.com/S/SH000001/
36.4
116
0.78022
29
182
4.862069
0.931034
0
0
0
0
0
0
0
0
0
0
0.097701
0.043956
182
4
117
45.5
0.712644
0.956044
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
aa49ee52455a56222fa71e12de4a8d871dd7fc59
91
py
Python
Day2/Chap3/Ex2.py
ssahn0806/kw-SW_Preparatory
1ebed10bfc664d16d2d71440547f61ab3577ab64
[ "MIT" ]
null
null
null
Day2/Chap3/Ex2.py
ssahn0806/kw-SW_Preparatory
1ebed10bfc664d16d2d71440547f61ab3577ab64
[ "MIT" ]
null
null
null
Day2/Chap3/Ex2.py
ssahn0806/kw-SW_Preparatory
1ebed10bfc664d16d2d71440547f61ab3577ab64
[ "MIT" ]
null
null
null
r = int(input('r: ')) h = int(input('h: ')) PI = 3.141592 print(f'원기둥의 부피:\t{PI*(r**2)*h}')
22.75
33
0.516484
20
91
2.35
0.65
0.340426
0
0
0
0
0
0
0
0
0
0.101266
0.131868
91
4
33
22.75
0.493671
0
0
0
0
0
0.315217
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
aa6c4376829a64819d30343336e2ea857fe171f6
58
py
Python
forseti/__init__.py
Bram-Hub/Forseti
6431f710f9b4b9d9c79bf14f3e7d66e2ed57bd34
[ "MIT" ]
4
2018-08-14T23:08:14.000Z
2020-09-08T19:04:03.000Z
forseti/__init__.py
Bram-Hub/Forseti
6431f710f9b4b9d9c79bf14f3e7d66e2ed57bd34
[ "MIT" ]
7
2015-04-23T21:28:20.000Z
2016-02-26T16:10:48.000Z
forseti/__init__.py
Bram-Hub/Forseti
6431f710f9b4b9d9c79bf14f3e7d66e2ed57bd34
[ "MIT" ]
3
2019-01-16T21:04:36.000Z
2021-02-16T09:43:48.000Z
""" A Formal Logic Framework for various applications """
14.5
49
0.741379
7
58
6.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.155172
58
3
50
19.333333
0.877551
0.844828
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
aa772a5043e973f8faf85324a14c21145c926963
280
py
Python
task4/generate/generagefeature/readfeaturebin.py
dataminer996/DSTC10_Track3_QS_Goal_Diggers
24fe45c9a7098cf21c3278758fa8d9d073644a36
[ "MIT" ]
3
2021-12-30T08:21:10.000Z
2022-01-20T07:46:07.000Z
task4/generate/generagefeature/readfeaturebin.py
dataminer996/DSTC10_Track3_QS_Goal_Diggers
24fe45c9a7098cf21c3278758fa8d9d073644a36
[ "MIT" ]
null
null
null
task4/generate/generagefeature/readfeaturebin.py
dataminer996/DSTC10_Track3_QS_Goal_Diggers
24fe45c9a7098cf21c3278758fa8d9d073644a36
[ "MIT" ]
1
2022-01-19T03:13:04.000Z
2022-01-19T03:13:04.000Z
import utils_f import pickle import random import sys import numpy as np import os filename = sys.argv[1] allimagefea = [] with open(filename,'rb') as f: features = pickle.load(f) print(features[0]) print(features[0][0]) #print(features[0][1].shape)
15.555556
33
0.660714
42
280
4.380952
0.52381
0.211957
0.228261
0.163043
0
0
0
0
0
0
0
0.027273
0.214286
280
17
34
16.470588
0.809091
0.096429
0
0
0
0
0.007937
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0.166667
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
aa782f1211f5c9cf191357493dfc3f2dcac0f327
149
py
Python
tests/attributes_test.py
ghost2718/torchlayers
2f0f44ab64115c0a14ac8a27cf0159c2119d3f8f
[ "MIT" ]
1
2020-04-15T02:17:51.000Z
2020-04-15T02:17:51.000Z
tests/attributes_test.py
devanshuDesai/torchlayers
585e250c2a03d330841551f3612cfe9588985d13
[ "MIT" ]
null
null
null
tests/attributes_test.py
devanshuDesai/torchlayers
585e250c2a03d330841551f3612cfe9588985d13
[ "MIT" ]
null
null
null
import torch import torchlayers def test_module(): layer = torchlayers.Conv2d(64, kernel_size=3) assert layer.__module__ == "torchlayers"
16.555556
49
0.738255
18
149
5.777778
0.722222
0
0
0
0
0
0
0
0
0
0
0.032258
0.167785
149
8
50
18.625
0.806452
0
0
0
0
0
0.073826
0
0
0
0
0
0.2
1
0.2
false
0
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
aaa5a975e0ce61e08a29d23741c0cf09188dad6a
204
py
Python
data_sets/synthetic_review_prediction/article_0/__init__.py
Octavian-ai/synthetic-graph-data
b327cfb06d420d216a5377f2ce953355089e0e6b
[ "MIT" ]
16
2018-09-06T09:27:03.000Z
2021-05-28T01:35:44.000Z
data_sets/synthetic_review_prediction/article_0/__init__.py
Octavian-ai/generate-data
b327cfb06d420d216a5377f2ce953355089e0e6b
[ "MIT" ]
1
2021-02-10T00:02:43.000Z
2021-02-10T00:02:43.000Z
data_sets/synthetic_review_prediction/article_0/__init__.py
Octavian-ai/generate-data
b327cfb06d420d216a5377f2ce953355089e0e6b
[ "MIT" ]
7
2018-07-23T08:39:54.000Z
2021-02-08T16:24:54.000Z
from .configure import DATASET_NAME, create_data_set_properties from .generate import run as _run def run(client): return _run(client, create_data_set_properties()) runner = { DATASET_NAME: run }
18.545455
63
0.779412
29
204
5.137931
0.551724
0.147651
0.174497
0.308725
0
0
0
0
0
0
0
0
0.147059
204
10
64
20.4
0.856322
0
0
0
1
0
0
0
0
0
0
0
0
1
0.142857
false
0
0.285714
0.142857
0.571429
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
4
aaaace6c8c6cf3093343c34d40dd8dd7f2130d14
88
py
Python
tests/__init__.py
ananthu2050/investpy
2e2890de8dee1ee6bdcca3909a406d92dffcbfac
[ "MIT" ]
1
2021-03-10T14:57:41.000Z
2021-03-10T14:57:41.000Z
tests/__init__.py
royopa/investpy
6c527cc8501e5326ab2d4d102364fea2656b9b6d
[ "MIT" ]
null
null
null
tests/__init__.py
royopa/investpy
6c527cc8501e5326ab2d4d102364fea2656b9b6d
[ "MIT" ]
1
2020-10-30T22:47:13.000Z
2020-10-30T22:47:13.000Z
# Copyright 2018-2020 Alvaro Bartolome, alvarobartt @ GitHub # See LICENSE for details.
29.333333
60
0.784091
11
88
6.272727
1
0
0
0
0
0
0
0
0
0
0
0.106667
0.147727
88
2
61
44
0.813333
0.943182
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
aadba144dbdc7a8631561f373a069b9fc9f9205f
469
py
Python
hiquant/core/__init__.py
floatinghotpot/hiquant
a4a68c3ff88c35f1076e884861d365f7c5d6fae7
[ "Apache-2.0" ]
7
2021-06-16T14:21:49.000Z
2021-09-30T14:34:57.000Z
hiquant/core/__init__.py
webclinic017/hiquant
3f5de4f61b6dded2f082dc5b9adf086de485bb7c
[ "Apache-2.0" ]
null
null
null
hiquant/core/__init__.py
webclinic017/hiquant
3f5de4f61b6dded2f082dc5b9adf086de485bb7c
[ "Apache-2.0" ]
2
2021-09-24T16:13:04.000Z
2021-11-29T01:10:50.000Z
# -*- coding: utf-8; py-indent-offset:4 -*- from .data_cache import * from .stock_market import * from .indicator_signal import * from .order_cost import * from .stock import * from .portfolio import * from .strategy import * from .agent_simulated import * from .agent_human import * from .fund import * from .trader import * from .push_base import * from .push_email import * from .push_master import * from .lang import * from .backtest import * from .conf import *
23.45
43
0.739872
67
469
5.044776
0.462687
0.473373
0.12426
0
0
0
0
0
0
0
0
0.005076
0.159915
469
19
44
24.684211
0.852792
0.08742
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
2aa97a03e9833bf23456659fbcc5fd411cdee460
739
py
Python
001-100/40/40.py
junwei-wang/project-euler
abd728037e0f73f0f52f2ae4d40d468b307f34de
[ "MIT" ]
null
null
null
001-100/40/40.py
junwei-wang/project-euler
abd728037e0f73f0f52f2ae4d40d468b307f34de
[ "MIT" ]
null
null
null
001-100/40/40.py
junwei-wang/project-euler
abd728037e0f73f0f52f2ae4d40d468b307f34de
[ "MIT" ]
null
null
null
#!/usr/bin/env python d1 = 1 d10 = 1 def get_d_n(n): if n < 10: return n if n < 190: # 9 + 2 * 90 + 1: t = n - 10 s = 10 + (t>>1) return str(s)[t&1] if n < 2890: # 9 + 2*90 + 3*900 + 1 t = n - 190 s = 100 + t / 3 return str(s)[t%3] if n < 38890: # 9 + 2*90 + 3*900 + 4*9000 t = n - 2890 s = 1000 + (t>>2) return str(s)[t&0b11] if n < 488890: # 38890 + 5*90000 t = n - 38890 s = 10000 + (t/5) return str(s)[t%5] if n < 5888890: # 38890 + 5*90000 + 6*900000 t = n - 488890 s = 100000 + (t/6) return str(s)[t%6] prod = 1 for i in range(1, 7): prod *= int(get_d_n(10**i)) print prod
21.735294
48
0.423545
135
739
2.288889
0.340741
0.058252
0.161812
0.177994
0.05178
0
0
0
0
0
0
0.334101
0.41272
739
33
49
22.393939
0.37788
0.170501
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.034483
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
2ac18dc261b7decfb3853a7c1de74519f7d35d15
218
py
Python
assisted_storyboarding/asgi.py
AkashPushkar/assisted-storyboarding
bf4d389ec408e4145b3c725a48d8395f79ac2f2c
[ "MIT" ]
null
null
null
assisted_storyboarding/asgi.py
AkashPushkar/assisted-storyboarding
bf4d389ec408e4145b3c725a48d8395f79ac2f2c
[ "MIT" ]
null
null
null
assisted_storyboarding/asgi.py
AkashPushkar/assisted-storyboarding
bf4d389ec408e4145b3c725a48d8395f79ac2f2c
[ "MIT" ]
1
2021-03-01T03:58:48.000Z
2021-03-01T03:58:48.000Z
import os import django from channels.routing import get_default_application os.environ.setDefault("DJANGO_SETTINGS_MODULE", "assisted_storyboarding.settings") django.setup() application = get_default_application()
24.222222
82
0.844037
26
218
6.807692
0.615385
0.112994
0.237288
0
0
0
0
0
0
0
0
0
0.077982
218
8
83
27.25
0.880597
0
0
0
0
0
0.243119
0.243119
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
2ac68fce19598e5567e887179445ab5f37fe4edc
515
py
Python
problem_16/test_power_digit_sum.py
plilja/project-euler
646d1989cf15e903ef7e3c6e487284847d522ec9
[ "Apache-2.0" ]
null
null
null
problem_16/test_power_digit_sum.py
plilja/project-euler
646d1989cf15e903ef7e3c6e487284847d522ec9
[ "Apache-2.0" ]
null
null
null
problem_16/test_power_digit_sum.py
plilja/project-euler
646d1989cf15e903ef7e3c6e487284847d522ec9
[ "Apache-2.0" ]
null
null
null
import unittest from power_digit_sum import * class TestPowerDigitSum(unittest.TestCase): def test_power_digit_sum(self): self.assertEqual(power_digit_sum(0), 1) self.assertEqual(power_digit_sum(1), 2) self.assertEqual(power_digit_sum(2), 4) self.assertEqual(power_digit_sum(4), 7) self.assertEqual(power_digit_sum(15), 26) def test_project_euler_input(self): self.assertEqual(power_digit_sum(1000), 1366) if __name__ == '__main__': unittest.main()
25.75
53
0.708738
70
515
4.814286
0.4
0.237389
0.308605
0.445104
0.522255
0.189911
0
0
0
0
0
0.047619
0.184466
515
19
54
27.105263
0.754762
0
0
0
0
0
0.015534
0
0
0
0
0
0.461538
1
0.153846
false
0
0.153846
0
0.384615
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
2ace3201e80bdb7cf74a4a09ebd5f9a544c6fcf3
57,198
py
Python
modules/cctbx_project/mmtbx/monomer_library/tst_paral_geo.py
jorgediazjr/dials-dev20191018
77d66c719b5746f37af51ad593e2941ed6fbba17
[ "BSD-3-Clause" ]
null
null
null
modules/cctbx_project/mmtbx/monomer_library/tst_paral_geo.py
jorgediazjr/dials-dev20191018
77d66c719b5746f37af51ad593e2941ed6fbba17
[ "BSD-3-Clause" ]
null
null
null
modules/cctbx_project/mmtbx/monomer_library/tst_paral_geo.py
jorgediazjr/dials-dev20191018
77d66c719b5746f37af51ad593e2941ed6fbba17
[ "BSD-3-Clause" ]
1
2020-02-04T15:39:06.000Z
2020-02-04T15:39:06.000Z
from __future__ import absolute_import, division, print_function from libtbx import easy_run small_pdb = ''' CRYST1 67.764 73.374 89.188 90.00 90.00 90.00 P 21 21 21 SCALE1 0.014757 0.000000 0.000000 0.00000 SCALE2 0.000000 0.013629 0.000000 0.00000 SCALE3 0.000000 0.000000 0.011212 0.00000 ATOM 1 CD1 LEU B 72 8.667 -35.470 -5.077 1.00 59.79 C ANISOU 1 CD1 LEU B 72 6414 8428 7875 523 1603 1997 C ATOM 2 OD2 ASP B 73 12.724 -38.374 -11.812 1.00 88.34 O ANISOU 2 OD2 ASP B 73 10138 12571 10858 -103 489 1759 O ATOM 3 NH2 ARG B 75 14.028 -38.071 -14.297 1.00 37.51 N ANISOU 3 NH2 ARG B 75 3656 6350 4245 -76 315 1819 N ATOM 4 CE LYS B 82 17.480 -35.482 -12.533 1.00 53.69 C ANISOU 4 CE LYS B 82 6220 7461 6719 236 729 1552 C ATOM 5 NZ LYS B 82 16.277 -34.849 -13.106 1.00 47.71 N ANISOU 5 NZ LYS B 82 5220 6893 6016 349 794 1830 N TER ATOM 6 C6 DG C 5 13.380 -35.454 -16.997 1.00 55.38 C ANISOU 6 C6 DG C 5 7038 7537 6465 -2349 -2046 1030 C ATOM 7 O6 DG C 5 12.844 -35.974 -16.003 1.00 55.39 O ANISOU 7 O6 DG C 5 6490 7702 6853 -2340 -2093 1187 O ATOM 8 N1 DG C 5 12.788 -34.348 -17.615 1.00 55.50 N ANISOU 8 N1 DG C 5 7219 7595 6272 -2261 -2308 1153 N ATOM 9 C5 DG C 6 15.752 -32.181 -16.614 1.00 49.45 C ANISOU 9 C5 DG C 6 6580 6784 5425 -1809 -942 838 C ATOM 10 C6 DG C 6 14.837 -32.426 -15.573 1.00 47.58 C ANISOU 10 C6 DG C 6 5799 6807 5471 -1736 -1178 1041 C ATOM 11 O6 DG C 6 14.820 -33.371 -14.798 1.00 48.06 O ANISOU 11 O6 DG C 6 5544 6935 5781 -1777 -1175 1056 O ATOM 12 N1 DG C 6 13.878 -31.427 -15.468 1.00 46.77 N ANISOU 12 N1 DG C 6 5555 6852 5363 -1576 -1370 1262 N ATOM 13 C2 DG C 6 13.795 -30.332 -16.288 1.00 48.97 C ANISOU 13 C2 DG C 6 6199 7043 5364 -1495 -1408 1290 C ATOM 14 N2 DG C 6 12.777 -29.485 -16.060 1.00 49.05 N ANISOU 14 N2 DG C 6 5979 7190 5466 -1303 -1608 1541 N ATOM 15 N3 DG C 6 14.631 -30.103 -17.301 1.00 51.62 N ANISOU 15 N3 DG C 6 7118 7128 5367 -1581 -1217 1106 N ATOM 16 O2 DC C 7 14.973 -27.621 -14.169 1.00 47.56 O ANISOU 16 O2 DC C 7 5780 6947 5345 -996 -605 1243 O ATOM 17 C3' DA C 16 37.852 -9.655 4.024 1.00183.91 C ANISOU 17 C3' DA C 16 24314 16850 28714 -7056 -9451 -4932 C ATOM 18 O3' DA C 16 38.675 -8.488 3.955 1.00187.48 O ANISOU 18 O3' DA C 16 24548 17238 29446 -7688 -8925 -5266 O TER ATOM 19 C5' DG D 10 11.714 -32.153 -4.599 1.00 52.39 C ANISOU 19 C5' DG D 10 5649 7656 6599 612 672 1410 C ATOM 20 C4' DG D 10 11.196 -31.311 -5.747 1.00 52.11 C ANISOU 20 C4' DG D 10 5418 7630 6751 490 384 1385 C ATOM 21 O4' DG D 10 12.222 -31.189 -6.760 1.00 49.39 O ANISOU 21 O4' DG D 10 5315 7201 6250 233 30 1112 O ATOM 22 C3' DG D 10 9.952 -31.848 -6.451 1.00 54.58 C ANISOU 22 C3' DG D 10 5206 7973 7559 275 284 1638 C ATOM 23 O3' DG D 10 9.016 -30.796 -6.576 1.00 58.75 O ANISOU 23 O3' DG D 10 5490 8567 8267 512 297 1794 O ATOM 24 C2' DG D 10 10.477 -32.307 -7.809 1.00 51.83 C ANISOU 24 C2' DG D 10 4978 7529 7186 -143 -178 1447 C ATOM 25 C1' DG D 10 11.640 -31.352 -8.025 1.00 49.44 C ANISOU 25 C1' DG D 10 5132 7167 6485 -32 -268 1161 C ATOM 26 N9 DG D 10 12.684 -31.840 -8.943 1.00 47.12 N ANISOU 26 N9 DG D 10 5146 6716 6042 -316 -476 956 N ATOM 27 C8 DG D 10 13.307 -33.070 -8.914 1.00 46.20 C ANISOU 27 C8 DG D 10 5133 6515 5904 -515 -409 937 C ATOM 28 N7 DG D 10 14.254 -33.199 -9.818 1.00 44.53 N ANISOU 28 N7 DG D 10 5246 6119 5554 -679 -524 774 N ATOM 29 C5 DG D 10 14.280 -31.969 -10.477 1.00 43.30 C ANISOU 29 C5 DG D 10 5225 5914 5314 -597 -675 667 C ATOM 30 C6 DG D 10 15.112 -31.506 -11.556 1.00 41.62 C ANISOU 30 C6 DG D 10 5395 5462 4955 -668 -744 507 C ATOM 31 O6 DG D 10 15.979 -32.137 -12.188 1.00 41.25 O ANISOU 31 O6 DG D 10 5636 5194 4842 -819 -674 445 O ATOM 32 N1 DG D 10 14.797 -30.192 -11.935 1.00 41.60 N ANISOU 32 N1 DG D 10 5469 5445 4892 -509 -832 459 N ATOM 33 C2 DG D 10 13.813 -29.418 -11.332 1.00 43.73 C ANISOU 33 C2 DG D 10 5468 5915 5233 -286 -849 563 C ATOM 34 N2 DG D 10 13.646 -28.173 -11.808 1.00 43.93 N ANISOU 34 N2 DG D 10 5658 5867 5167 -113 -891 518 N ATOM 35 N3 DG D 10 13.041 -29.841 -10.321 1.00 45.36 N ANISOU 35 N3 DG D 10 5302 6335 5597 -199 -748 732 N ATOM 36 C4 DG D 10 13.317 -31.120 -9.954 1.00 44.89 C ANISOU 36 C4 DG D 10 5171 6288 5599 -373 -668 771 C ATOM 37 P DC D 11 7.464 -31.076 -6.856 1.00 64.47 P ANISOU 37 P DC D 11 5497 9365 9635 459 284 2173 P ATOM 38 OP1 DC D 11 6.694 -30.199 -5.955 1.00 68.29 O ANISOU 38 OP1 DC D 11 5821 9892 10233 961 768 2440 O ATOM 39 OP2 DC D 11 7.221 -32.542 -6.838 1.00 66.22 O ANISOU 39 OP2 DC D 11 5442 9536 10183 92 297 2280 O ATOM 40 O5' DC D 11 7.294 -30.564 -8.369 1.00 64.31 O ANISOU 40 O5' DC D 11 5423 9353 9659 251 -357 2068 O ATOM 41 C5' DC D 11 7.627 -29.216 -8.713 1.00 63.28 C ANISOU 41 C5' DC D 11 5632 9211 9200 523 -437 1929 C ATOM 42 C4' DC D 11 8.011 -29.086 -10.181 1.00 62.67 C ANISOU 42 C4' DC D 11 5821 9041 8950 261 -1009 1721 C ATOM 43 O4' DC D 11 9.255 -29.758 -10.428 1.00 58.49 O ANISOU 43 O4' DC D 11 5760 8365 8097 -24 -1057 1430 O ATOM 44 C3' DC D 11 7.016 -29.681 -11.186 1.00 67.45 C ANISOU 44 C3' DC D 11 6000 9677 9951 -27 -1557 1885 C ATOM 45 O3' DC D 11 6.256 -28.657 -11.805 1.00 72.04 O ANISOU 45 O3' DC D 11 6413 10328 10632 230 -1830 2033 O ATOM 46 C2' DC D 11 7.890 -30.400 -12.212 1.00 64.48 C ANISOU 46 C2' DC D 11 6150 9097 9252 -428 -1934 1593 C ATOM 47 C1' DC D 11 9.306 -30.020 -11.805 1.00 58.64 C ANISOU 47 C1' DC D 11 5982 8251 8047 -301 -1549 1328 C ATOM 48 N1 DC D 11 10.301 -31.101 -12.087 1.00 54.72 N ANISOU 48 N1 DC D 11 5853 7578 7362 -630 -1553 1136 N ATOM 49 C2 DC D 11 11.214 -30.932 -13.141 1.00 52.51 C ANISOU 49 C2 DC D 11 6165 7054 6734 -727 -1703 915 C ATOM 50 O2 DC D 11 11.240 -29.851 -13.751 1.00 52.40 O ANISOU 50 O2 DC D 11 6386 6974 6548 -537 -1809 863 O ATOM 51 N3 DC D 11 12.076 -31.957 -13.436 1.00 51.07 N ANISOU 51 N3 DC D 11 6319 6673 6412 -983 -1636 793 N ATOM 52 C4 DC D 11 12.018 -33.112 -12.738 1.00 51.92 C ANISOU 52 C4 DC D 11 6202 6832 6694 -1150 -1478 871 C ATOM 53 N4 DC D 11 12.895 -34.084 -13.036 1.00 50.99 N ANISOU 53 N4 DC D 11 6456 6495 6424 -1347 -1360 775 N ATOM 54 C5 DC D 11 11.056 -33.313 -11.693 1.00 53.75 C ANISOU 54 C5 DC D 11 5848 7303 7273 -1078 -1345 1085 C ATOM 55 C6 DC D 11 10.213 -32.306 -11.428 1.00 54.96 C ANISOU 55 C6 DC D 11 5658 7636 7588 -821 -1379 1220 C ATOM 56 P DC D 12 4.982 -29.027 -12.712 1.00 79.92 P ANISOU 56 P DC D 12 6815 11412 12140 35 -2495 2280 P ATOM 57 OP1 DC D 12 4.045 -27.883 -12.655 1.00 84.68 O ANISOU 57 OP1 DC D 12 7009 12168 12999 510 -2460 2587 O ATOM 58 OP2 DC D 12 4.513 -30.390 -12.342 1.00 82.50 O ANISOU 58 OP2 DC D 12 6639 11752 12957 -372 -2514 2401 O ATOM 59 O5' DC D 12 5.591 -29.119 -14.190 1.00 78.68 O ANISOU 59 O5' DC D 12 7339 11049 11508 -225 -3141 1981 O ATOM 60 C5' DC D 12 6.341 -28.033 -14.735 1.00 75.42 C ANISOU 60 C5' DC D 12 7617 10501 10539 57 -3086 1788 C ATOM 61 C4' DC D 12 7.081 -28.473 -15.987 1.00 73.85 C ANISOU 61 C4' DC D 12 8167 10013 9878 -235 -3505 1508 C ATOM 62 O4' DC D 12 8.144 -29.361 -15.612 1.00 68.93 O ANISOU 62 O4' DC D 12 7855 9247 9089 -518 -3156 1294 O ATOM 63 C1' DC D 12 8.392 -30.247 -16.669 1.00 69.20 C ANISOU 63 C1' DC D 12 8373 9022 8899 -878 -3589 1142 C ATOM 64 N1 DC D 12 8.969 -31.516 -16.114 1.00 65.22 N ANISOU 64 N1 DC D 12 7875 8442 8465 -1214 -3298 1054 N ATOM 65 C2 DC D 12 10.097 -32.088 -16.724 1.00 61.84 C ANISOU 65 C2 DC D 12 8201 7678 7618 -1383 -3174 828 C ATOM 66 O2 DC D 12 10.648 -31.493 -17.653 1.00 61.62 O ANISOU 66 O2 DC D 12 8828 7406 7177 -1244 -3231 703 O ATOM 67 N3 DC D 12 10.570 -33.278 -16.249 1.00 59.55 N ANISOU 67 N3 DC D 12 7919 7307 7399 -1647 -2908 788 N ATOM 68 C4 DC D 12 9.933 -33.914 -15.251 1.00 60.34 C ANISOU 68 C4 DC D 12 7360 7627 7939 -1767 -2789 947 C ATOM 69 N4 DC D 12 10.438 -35.079 -14.825 1.00 58.98 N ANISOU 69 N4 DC D 12 7286 7338 7787 -1987 -2495 915 N ATOM 70 C5 DC D 12 8.755 -33.372 -14.641 1.00 63.69 C ANISOU 70 C5 DC D 12 7020 8364 8816 -1619 -2885 1183 C ATOM 71 C6 DC D 12 8.305 -32.190 -15.105 1.00 65.90 C ANISOU 71 C6 DC D 12 7254 8739 9047 -1342 -3145 1236 C TER HETATM 72 O HOH B 355 10.526 -35.905 -9.116 1.00 45.59 O HETATM 73 O HOH D 105 4.788 -32.025 -5.002 1.00 63.52 O HETATM 74 O HOH D 108 9.056 -30.329 -2.445 1.00 63.77 O END ''' large_pdb = ''' CRYST1 67.764 73.374 89.188 90.00 90.00 90.00 P 21 21 21 SCALE1 0.014757 0.000000 0.000000 0.00000 SCALE2 0.000000 0.013629 0.000000 0.00000 SCALE3 0.000000 0.000000 0.011212 0.00000 ATOM 1 CA SER B 71 13.994 -39.212 -4.120 1.00 45.22 C ANISOU 1 CA SER B 71 5466 6212 5502 -96 997 1012 C ATOM 2 C SER B 71 12.571 -39.663 -4.589 1.00 50.17 C ANISOU 2 C SER B 71 5879 7033 6149 -132 964 1216 C ATOM 3 O SER B 71 12.262 -40.864 -4.559 1.00 49.13 O ANISOU 3 O SER B 71 5722 6986 5958 -273 854 1220 O ATOM 4 CB SER B 71 14.796 -38.614 -5.271 1.00 45.76 C ANISOU 4 CB SER B 71 5553 6230 5602 -72 937 1003 C ATOM 5 OG SER B 71 14.965 -39.550 -6.319 1.00 45.13 O ANISOU 5 OG SER B 71 5444 6258 5445 -202 754 1021 O ATOM 6 N LEU B 72 11.699 -38.711 -4.977 1.00 48.67 N ANISOU 6 N LEU B 72 5526 6910 6058 -5 1067 1390 N ATOM 7 CA LEU B 72 10.334 -39.015 -5.453 1.00 50.38 C ANISOU 7 CA LEU B 72 5509 7338 6295 -29 1037 1605 C ATOM 8 C LEU B 72 10.357 -39.904 -6.717 1.00 58.22 C ANISOU 8 C LEU B 72 6426 8488 7208 -190 816 1628 C ATOM 9 O LEU B 72 9.649 -40.909 -6.806 1.00 61.15 O ANISOU 9 O LEU B 72 6697 8995 7543 -329 717 1670 O ATOM 10 CB LEU B 72 9.546 -37.719 -5.735 1.00 51.47 C ANISOU 10 CB LEU B 72 5480 7515 6561 160 1196 1807 C ATOM 11 CG LEU B 72 9.294 -36.752 -4.569 1.00 56.90 C ANISOU 11 CG LEU B 72 6211 8058 7352 332 1444 1792 C ATOM 12 CD1 LEU B 72 8.667 -35.470 -5.077 1.00 59.79 C ANISOU 12 CD1 LEU B 72 6414 8428 7875 523 1603 1997 C ATOM 13 CD2 LEU B 72 8.398 -37.364 -3.475 1.00 54.12 C ANISOU 13 CD2 LEU B 72 5814 7778 6971 310 1511 1824 C ATOM 14 N ASP B 73 11.181 -39.505 -7.668 1.00 54.44 N ANISOU 14 N ASP B 73 5994 7984 6705 -172 751 1590 N ATOM 15 CA ASP B 73 11.484 -40.139 -8.937 1.00 55.25 C ANISOU 15 CA ASP B 73 6061 8221 6710 -296 560 1571 C ATOM 16 C ASP B 73 12.080 -41.544 -8.698 1.00 56.62 C ANISOU 16 C ASP B 73 6382 8333 6798 -487 428 1371 C ATOM 17 O ASP B 73 11.678 -42.493 -9.366 1.00 57.96 O ANISOU 17 O ASP B 73 6469 8649 6906 -643 282 1361 O ATOM 18 CB ASP B 73 12.562 -39.251 -9.613 1.00 57.88 C ANISOU 18 CB ASP B 73 6477 8461 7052 -192 579 1542 C ATOM 19 CG ASP B 73 12.772 -39.405 -11.096 1.00 79.37 C ANISOU 19 CG ASP B 73 9113 11366 9678 -242 431 1592 C ATOM 20 OD1 ASP B 73 13.028 -40.544 -11.543 1.00 83.06 O ANISOU 20 OD1 ASP B 73 9627 11903 10029 -414 269 1463 O ATOM 21 OD2 ASP B 73 12.724 -38.374 -11.812 1.00 88.34 O ANISOU 21 OD2 ASP B 73 10138 12571 10858 -103 489 1759 O ATOM 22 N GLY B 74 13.046 -41.628 -7.772 1.00 48.60 N ANISOU 22 N GLY B 74 5573 7105 5786 -472 486 1214 N ATOM 23 N ARG B 75 15.493 -41.737 -8.902 1.00 39.07 N ANISOU 23 N ARG B 75 4633 5732 4480 -510 345 971 N ATOM 24 CA ARG B 75 16.798 -41.429 -9.486 1.00 37.13 C ANISOU 24 CA ARG B 75 4511 5393 4205 -479 311 879 C ATOM 25 C ARG B 75 17.640 -40.524 -8.596 1.00 37.86 C ANISOU 25 C ARG B 75 4735 5282 4368 -365 439 810 C ATOM 26 O ARG B 75 17.117 -39.707 -7.866 1.00 37.75 O ANISOU 26 O ARG B 75 4682 5225 4438 -263 575 866 O ATOM 27 CB ARG B 75 16.629 -40.738 -10.847 1.00 38.49 C ANISOU 27 CB ARG B 75 4548 5717 4361 -414 277 1012 C ATOM 28 CG ARG B 75 16.196 -41.644 -11.998 1.00 46.70 C ANISOU 28 CG ARG B 75 5485 6977 5280 -546 114 1018 C ATOM 29 CD ARG B 75 16.191 -40.897 -13.329 1.00 42.74 C ANISOU 29 CD ARG B 75 4853 6657 4731 -462 83 1157 C ATOM 30 NE ARG B 75 15.155 -39.854 -13.375 1.00 34.72 N ANISOU 30 NE ARG B 75 3639 5757 3797 -328 183 1395 N ATOM 31 CZ ARG B 75 15.001 -38.974 -14.356 1.00 51.34 C ANISOU 31 CZ ARG B 75 5594 8014 5897 -207 205 1586 C ATOM 32 NH1 ARG B 75 15.823 -38.982 -15.401 1.00 48.43 N ANISOU 32 NH1 ARG B 75 5253 7720 5430 -204 128 1565 N ATOM 33 NH2 ARG B 75 14.028 -38.071 -14.297 1.00 37.51 N ANISOU 33 NH2 ARG B 75 3656 6350 4245 -76 315 1819 N ATOM 34 N LEU B 76 18.954 -40.655 -8.687 1.00 33.74 N ANISOU 34 N LEU B 76 4363 4641 3815 -385 398 682 N ATOM 35 N GLN B 77 20.993 -37.735 -8.751 1.00 33.28 N ANISOU 35 N GLN B 77 4419 4239 3985 -115 619 643 N ATOM 36 CA GLN B 77 21.843 -36.938 -9.638 1.00 33.15 C ANISOU 36 CA GLN B 77 4397 4160 4039 -46 635 687 C ATOM 37 CB GLN B 77 21.184 -35.623 -10.076 1.00 35.27 C ANISOU 37 CB GLN B 77 4517 4421 4464 102 775 870 C ATOM 38 CG GLN B 77 22.036 -34.806 -11.064 1.00 39.95 C ANISOU 38 CG GLN B 77 5082 4952 5147 183 804 956 C ATOM 39 CD GLN B 77 21.334 -33.565 -11.593 1.00 51.06 C ANISOU 39 CD GLN B 77 6315 6362 6724 342 953 1186 C ATOM 40 OE1 GLN B 77 20.152 -33.585 -11.965 1.00 40.99 O ANISOU 40 OE1 GLN B 77 4884 5265 5425 385 959 1360 O ATOM 41 NE2 GLN B 77 22.054 -32.452 -11.652 1.00 41.50 N ANISOU 41 NE2 GLN B 77 5113 4951 5704 433 1081 1206 N ATOM 42 N LYS B 82 21.845 -38.324 -14.088 1.00 38.89 N ANISOU 42 N LYS B 82 4873 5491 4414 -116 277 974 N ATOM 43 CA LYS B 82 20.912 -39.089 -13.254 1.00 37.55 C ANISOU 43 CA LYS B 82 4702 5349 4216 -212 250 910 C ATOM 44 CB LYS B 82 19.461 -38.613 -13.473 1.00 40.49 C ANISOU 44 CB LYS B 82 4868 5899 4618 -155 290 1104 C ATOM 45 CG LYS B 82 19.108 -37.357 -12.703 1.00 48.79 C ANISOU 45 CG LYS B 82 5876 6808 5853 -12 472 1204 C ATOM 46 CD LYS B 82 17.731 -36.862 -13.078 1.00 56.45 C ANISOU 46 CD LYS B 82 6621 7968 6858 64 520 1433 C ATOM 47 CE LYS B 82 17.480 -35.482 -12.533 1.00 53.69 C ANISOU 47 CE LYS B 82 6220 7461 6719 236 729 1552 C ATOM 48 NZ LYS B 82 16.277 -34.849 -13.106 1.00 47.71 N ANISOU 48 NZ LYS B 82 5220 6893 6016 349 794 1830 N TER ATOM 49 N3 DC C 3 10.163 -41.537 -19.049 1.00 80.79 N ANISOU 49 N3 DC C 3 10710 9779 10207 -4007 -4243 909 N ATOM 50 C4 DC C 3 9.646 -41.759 -17.831 1.00 79.18 C ANISOU 50 C4 DC C 3 9652 9809 10625 -3952 -4092 1165 C ATOM 51 N4 DC C 3 8.666 -40.948 -17.413 1.00 79.82 N ANISOU 51 N4 DC C 3 9076 10166 11085 -3841 -4325 1478 N ATOM 52 C2' DA C 4 16.585 -39.755 -19.866 1.00 71.00 C ANISOU 52 C2' DA C 4 11283 7990 7703 -2982 -1073 -32 C ATOM 53 C1' DA C 4 15.216 -40.359 -20.154 1.00 72.80 C ANISOU 53 C1' DA C 4 11569 8233 7860 -3259 -1829 63 C ATOM 54 N9 DA C 4 14.239 -40.124 -19.086 1.00 68.67 N ANISOU 54 N9 DA C 4 10182 8121 7788 -3226 -2202 355 N ATOM 55 C8 DA C 4 14.092 -40.840 -17.918 1.00 65.84 C ANISOU 55 C8 DA C 4 9183 7910 7923 -3213 -2132 479 C ATOM 56 N7 DA C 4 13.100 -40.424 -17.159 1.00 64.40 N ANISOU 56 N7 DA C 4 8366 8045 8057 -3173 -2415 759 N ATOM 57 C5 DA C 4 12.555 -39.366 -17.877 1.00 65.98 C ANISOU 57 C5 DA C 4 8738 8326 8005 -3149 -2743 825 C ATOM 58 C6 DA C 4 11.470 -38.496 -17.619 1.00 66.40 C ANISOU 58 C6 DA C 4 8321 8648 8261 -3067 -3101 1105 C ATOM 59 N6 DA C 4 10.712 -38.580 -16.525 1.00 65.91 N ANISOU 59 N6 DA C 4 7510 8816 8718 -3005 -3106 1369 N ATOM 60 N1 DA C 4 11.182 -37.548 -18.542 1.00 68.26 N ANISOU 60 N1 DA C 4 8922 8859 8155 -3028 -3410 1122 N ATOM 61 C2 DA C 4 11.944 -37.464 -19.645 1.00 69.97 C ANISOU 61 C2 DA C 4 9995 8792 7799 -3080 -3314 872 C ATOM 62 N3 DA C 4 12.986 -38.224 -20.004 1.00 70.04 N ANISOU 62 N3 DA C 4 10532 8513 7567 -3161 -2900 591 N ATOM 63 C4 DA C 4 13.241 -39.169 -19.068 1.00 68.13 C ANISOU 63 C4 DA C 4 9830 8317 7740 -3187 -2648 582 C ATOM 64 O4' DG C 5 16.905 -36.381 -21.325 1.00 69.22 O ANISOU 64 O4' DG C 5 11852 7803 6647 -2679 -800 43 O ATOM 65 C2' DG C 5 18.423 -35.229 -20.012 1.00 62.44 C ANISOU 65 C2' DG C 5 10161 7150 6414 -2353 39 49 C ATOM 66 C1' DG C 5 16.956 -35.376 -20.348 1.00 63.78 C ANISOU 66 C1' DG C 5 10472 7460 6302 -2477 -714 200 C ATOM 67 N9 DG C 5 16.122 -35.773 -19.211 1.00 59.65 N ANISOU 67 N9 DG C 5 9179 7287 6199 -2475 -1125 383 N ATOM 68 C8 DG C 5 16.347 -36.806 -18.326 1.00 57.78 C ANISOU 68 C8 DG C 5 8490 7105 6359 -2501 -1037 364 C ATOM 69 N7 DG C 5 15.436 -36.911 -17.398 1.00 55.67 N ANISOU 69 N7 DG C 5 7639 7129 6384 -2483 -1374 578 N ATOM 70 C5 DG C 5 14.560 -35.866 -17.675 1.00 55.99 C ANISOU 70 C5 DG C 5 7667 7329 6279 -2425 -1701 748 C ATOM 71 C6 DG C 5 13.380 -35.454 -16.997 1.00 55.38 C ANISOU 71 C6 DG C 5 7038 7537 6465 -2349 -2046 1030 C ATOM 72 O6 DG C 5 12.844 -35.974 -16.003 1.00 55.39 O ANISOU 72 O6 DG C 5 6490 7702 6853 -2340 -2093 1187 O ATOM 73 N1 DG C 5 12.788 -34.348 -17.615 1.00 55.50 N ANISOU 73 N1 DG C 5 7219 7595 6272 -2261 -2308 1153 N ATOM 74 C2 DG C 5 13.289 -33.704 -18.728 1.00 56.19 C ANISOU 74 C2 DG C 5 7998 7470 5880 -2256 -2243 1022 C ATOM 75 N2 DG C 5 12.616 -32.638 -19.150 1.00 56.89 N ANISOU 75 N2 DG C 5 8185 7613 5819 -2132 -2533 1197 N ATOM 76 N3 DG C 5 14.397 -34.064 -19.359 1.00 57.59 N ANISOU 76 N3 DG C 5 8742 7365 5774 -2338 -1849 760 N ATOM 77 C4 DG C 5 14.980 -35.151 -18.781 1.00 57.50 C ANISOU 77 C4 DG C 5 8503 7316 6030 -2415 -1591 633 C ATOM 78 C4' DG C 6 17.499 -30.332 -21.669 1.00 68.00 C ANISOU 78 C4' DG C 6 11949 7860 6028 -1980 -49 456 C ATOM 79 O4' DG C 6 16.616 -30.873 -20.651 1.00 64.59 O ANISOU 79 O4' DG C 6 10767 7811 5963 -1974 -585 584 O ATOM 80 C2' DG C 6 18.230 -29.602 -19.482 1.00 61.13 C ANISOU 80 C2' DG C 6 9767 7395 6063 -1789 273 481 C ATOM 81 C1' DG C 6 16.819 -30.179 -19.428 1.00 59.91 C ANISOU 81 C1' DG C 6 9488 7475 5799 -1811 -424 654 C ATOM 82 N9 DG C 6 16.602 -31.109 -18.320 1.00 53.93 N ANISOU 82 N9 DG C 6 8075 6960 5456 -1834 -591 674 N ATOM 83 C8 DG C 6 17.312 -32.253 -18.046 1.00 52.41 C ANISOU 83 C8 DG C 6 7749 6681 5482 -1940 -385 514 C ATOM 84 N7 DG C 6 16.843 -32.925 -17.035 1.00 50.66 N ANISOU 84 N7 DG C 6 6993 6689 5566 -1929 -618 607 N ATOM 85 C5 DG C 6 15.752 -32.181 -16.614 1.00 49.45 C ANISOU 85 C5 DG C 6 6580 6784 5425 -1809 -942 838 C ATOM 86 C6 DG C 6 14.837 -32.426 -15.573 1.00 47.58 C ANISOU 86 C6 DG C 6 5799 6807 5471 -1736 -1178 1041 C ATOM 87 O6 DG C 6 14.820 -33.371 -14.798 1.00 48.06 O ANISOU 87 O6 DG C 6 5544 6935 5781 -1777 -1175 1056 O ATOM 88 N1 DG C 6 13.878 -31.427 -15.468 1.00 46.77 N ANISOU 88 N1 DG C 6 5555 6852 5363 -1576 -1370 1262 N ATOM 89 C2 DG C 6 13.795 -30.332 -16.288 1.00 48.97 C ANISOU 89 C2 DG C 6 6199 7043 5364 -1495 -1408 1290 C ATOM 90 N2 DG C 6 12.777 -29.485 -16.060 1.00 49.05 N ANISOU 90 N2 DG C 6 5979 7190 5466 -1303 -1608 1541 N ATOM 91 N3 DG C 6 14.631 -30.103 -17.301 1.00 51.62 N ANISOU 91 N3 DG C 6 7118 7128 5367 -1581 -1217 1106 N ATOM 92 C4 DG C 6 15.584 -31.069 -17.402 1.00 51.37 C ANISOU 92 C4 DG C 6 7205 6946 5366 -1740 -958 883 C ATOM 93 O5' DC C 7 18.156 -25.995 -19.708 1.00 66.35 O ANISOU 93 O5' DC C 7 10858 7922 6431 -1452 599 728 O ATOM 94 C5' DC C 7 16.798 -25.510 -19.857 1.00 65.57 C ANISOU 94 C5' DC C 7 10834 7992 6086 -1300 39 981 C ATOM 95 C4' DC C 7 16.151 -25.269 -18.506 1.00 60.75 C ANISOU 95 C4' DC C 7 9495 7728 5861 -1138 -201 1105 C ATOM 96 O4' DC C 7 16.069 -26.519 -17.796 1.00 58.03 O ANISOU 96 O4' DC C 7 8663 7588 5798 -1238 -353 1042 O ATOM 97 C3' DC C 7 16.912 -24.316 -17.587 1.00 57.29 C ANISOU 97 C3' DC C 7 8824 7258 5685 -1055 199 1018 C ATOM 98 O3' DC C 7 16.250 -23.034 -17.546 1.00 56.66 O ANISOU 98 O3' DC C 7 8861 7173 5496 -827 123 1208 O ATOM 99 C2' DC C 7 16.930 -25.017 -16.210 1.00 54.04 C ANISOU 99 C2' DC C 7 7770 7103 5659 -1052 111 960 C ATOM 100 C1' DC C 7 16.110 -26.290 -16.412 1.00 53.52 C ANISOU 100 C1' DC C 7 7547 7214 5573 -1119 -279 1054 C ATOM 101 N1 DC C 7 16.692 -27.554 -15.723 1.00 49.41 N ANISOU 101 N1 DC C 7 6672 6774 5329 -1253 -234 909 N ATOM 102 C2 DC C 7 16.009 -28.150 -14.619 1.00 47.47 C ANISOU 102 C2 DC C 7 5948 6781 5306 -1177 -451 1029 C ATOM 103 O2 DC C 7 14.973 -27.621 -14.169 1.00 47.56 O ANISOU 103 O2 DC C 7 5780 6947 5345 -996 -605 1243 O ATOM 104 N3 DC C 7 16.526 -29.288 -14.063 1.00 45.88 N ANISOU 104 N3 DC C 7 5514 6609 5309 -1282 -425 925 N ATOM 105 C4 DC C 7 17.633 -29.855 -14.584 1.00 45.68 C ANISOU 105 C4 DC C 7 5646 6382 5329 -1438 -211 714 C ATOM 106 N4 DC C 7 18.076 -30.991 -14.027 1.00 44.11 N ANISOU 106 N4 DC C 7 5203 6194 5363 -1502 -215 643 N ATOM 107 C5 DC C 7 18.322 -29.282 -15.715 1.00 47.08 C ANISOU 107 C5 DC C 7 6265 6291 5331 -1513 75 590 C ATOM 108 C6 DC C 7 17.832 -28.144 -16.233 1.00 48.38 C ANISOU 108 C6 DC C 7 6721 6425 5238 -1426 57 692 C ATOM 109 O5' DG C 8 16.620 -21.727 -15.437 1.00 51.98 O ANISOU 109 O5' DG C 8 7763 6659 5327 -615 406 1138 O ATOM 110 C5' DG C 8 15.245 -21.796 -15.034 1.00 49.83 C ANISOU 110 C5' DG C 8 7234 6629 5071 -380 82 1394 C ATOM 111 C4' DG C 8 15.121 -21.983 -13.541 1.00 45.19 C ANISOU 111 C4' DG C 8 6242 6210 4717 -280 118 1367 C ATOM 112 O4' DG C 8 15.466 -23.341 -13.181 1.00 43.09 O ANISOU 112 O4' DG C 8 5688 6092 4594 -465 15 1257 O ATOM 113 C2' DG C 8 17.141 -22.000 -12.279 1.00 42.03 C ANISOU 113 C2' DG C 8 5789 5636 4543 -531 402 922 C ATOM 114 C1' DG C 8 16.387 -23.312 -12.104 1.00 41.26 C ANISOU 114 C1' DG C 8 5371 5812 4493 -517 163 1053 C ATOM 115 N9 DG C 8 17.227 -24.515 -12.167 1.00 39.38 N ANISOU 115 N9 DG C 8 4970 5588 4403 -751 119 887 N ATOM 116 C8 DG C 8 18.311 -24.733 -12.994 1.00 38.73 C ANISOU 116 C8 DG C 8 5009 5311 4395 -970 281 704 C ATOM 117 N7 DG C 8 18.819 -25.921 -12.882 1.00 37.78 N ANISOU 117 N7 DG C 8 4679 5224 4452 -1104 233 609 N ATOM 118 C5 DG C 8 18.045 -26.530 -11.903 1.00 36.89 C ANISOU 118 C5 DG C 8 4319 5347 4352 -987 0 735 C ATOM 119 C6 DG C 8 18.153 -27.826 -11.324 1.00 36.01 C ANISOU 119 C6 DG C 8 3952 5336 4393 -1041 -135 725 C ATOM 120 O6 DG C 8 18.959 -28.725 -11.611 1.00 36.04 O ANISOU 120 O6 DG C 8 3874 5247 4573 -1190 -100 595 O ATOM 121 N1 DG C 8 17.162 -28.049 -10.352 1.00 35.01 N ANISOU 121 N1 DG C 8 3683 5401 4218 -879 -261 910 N ATOM 122 C2 DG C 8 16.214 -27.122 -9.978 1.00 36.10 C ANISOU 122 C2 DG C 8 3879 5610 4226 -669 -217 1080 C ATOM 123 N2 DG C 8 15.345 -27.510 -9.058 1.00 37.34 N ANISOU 123 N2 DG C 8 3884 5899 4404 -520 -221 1264 N ATOM 124 N3 DG C 8 16.091 -25.920 -10.524 1.00 37.64 N ANISOU 124 N3 DG C 8 4281 5716 4305 -594 -120 1091 N ATOM 125 C4 DG C 8 17.045 -25.684 -11.469 1.00 37.58 C ANISOU 125 C4 DG C 8 4453 5531 4294 -771 -36 912 C ATOM 126 C4' DC C 9 13.333 -21.206 -8.026 1.00 44.80 C ANISOU 126 C4' DC C 9 5853 6335 4834 614 696 1592 C ATOM 127 O4' DC C 9 14.262 -22.312 -8.097 1.00 42.52 O ANISOU 127 O4' DC C 9 5441 6135 4581 295 463 1401 O ATOM 128 O2 DC C 9 16.471 -24.699 -7.001 1.00 40.14 O ANISOU 128 O2 DC C 9 4964 5890 4398 -201 32 981 O ATOM 129 C3' DC C 15 37.907 -15.298 2.284 1.00168.93 C ANISOU 129 C3' DC C 15 19382 15636 29167 -5738 -10509 -3429 C ATOM 130 O3' DC C 15 38.465 -14.761 1.075 1.00170.68 O ANISOU 130 O3' DC C 15 18669 15664 30516 -6074 -9655 -3380 O ATOM 131 C2' DC C 15 36.591 -14.607 2.676 1.00167.52 C ANISOU 131 C2' DC C 15 20689 15100 27862 -5472 -10714 -3518 C ATOM 132 P DA C 16 39.540 -13.579 1.112 1.00176.14 P ANISOU 132 P DA C 16 19180 16467 31279 -6762 -9164 -3643 P ATOM 133 OP1 DA C 16 40.110 -13.464 -0.249 1.00176.33 O ANISOU 133 OP1 DA C 16 18191 16349 32458 -6993 -8227 -3436 O ATOM 134 OP2 DA C 16 40.445 -13.802 2.272 1.00178.26 O ANISOU 134 OP2 DA C 16 19468 17458 30806 -7003 -9460 -3849 O ATOM 135 O5' DA C 16 38.631 -12.284 1.402 1.00177.90 O ANISOU 135 O5' DA C 16 20632 16164 30796 -6779 -9257 -3894 O ATOM 136 C5' DA C 16 38.946 -11.384 2.476 1.00181.14 C ANISOU 136 C5' DA C 16 21791 16799 30236 -7115 -9322 -4304 C ATOM 137 C4' DA C 16 37.833 -10.366 2.678 1.00181.77 C ANISOU 137 C4' DA C 16 23133 16330 29602 -6938 -9362 -4468 C ATOM 138 O4' DA C 16 36.553 -11.043 2.621 1.00178.16 O ANISOU 138 O4' DA C 16 23351 15711 28630 -6287 -9708 -4194 O ATOM 139 C3' DA C 16 37.852 -9.655 4.024 1.00183.91 C ANISOU 139 C3' DA C 16 24314 16850 28714 -7056 -9451 -4932 C ATOM 140 O3' DA C 16 38.675 -8.488 3.955 1.00187.48 O ANISOU 140 O3' DA C 16 24548 17238 29446 -7688 -8925 -5266 O ATOM 141 C2' DA C 16 36.381 -9.292 4.229 1.00182.60 C ANISOU 141 C2' DA C 16 25526 16220 27635 -6539 -9609 -4887 C ATOM 142 C1' DA C 16 35.647 -10.446 3.531 1.00178.66 C ANISOU 142 C1' DA C 16 24888 15661 27335 -6045 -9868 -4392 C ATOM 143 N9 DA C 16 35.137 -11.478 4.447 1.00177.48 N ANISOU 143 N9 DA C 16 25312 15858 26263 -5591 -10406 -4293 N ATOM 144 C8 DA C 16 35.866 -12.428 5.117 1.00178.63 C ANISOU 144 C8 DA C 16 24887 16495 26489 -5611 -10852 -4307 C ATOM 145 N7 DA C 16 35.141 -13.238 5.853 1.00177.03 N ANISOU 145 N7 DA C 16 25449 16482 25331 -5133 -11210 -4171 N ATOM 146 C5 DA C 16 33.851 -12.763 5.699 1.00174.71 C ANISOU 146 C5 DA C 16 26368 15833 24180 -4793 -10927 -4077 C ATOM 147 N3 DA C 16 32.738 -10.969 4.451 1.00170.48 N ANISOU 147 N3 DA C 16 26470 14921 23384 -4780 -9425 -4092 N ATOM 148 C4 DA C 16 33.826 -11.677 4.831 1.00175.56 C ANISOU 148 C4 DA C 16 26414 15480 24810 -5063 -10533 -4139 C TER ATOM 149 C2' DG D 8 21.125 -32.880 -7.497 1.00 41.74 C ANISOU 149 C2' DG D 8 5133 5644 5083 -268 -267 271 C ATOM 150 N2 DG D 8 17.756 -27.223 -6.279 1.00 41.65 N ANISOU 150 N2 DG D 8 5670 5704 4452 256 -451 -153 N ATOM 151 OP2 DC D 9 19.444 -36.087 -6.966 1.00 45.49 O ANISOU 151 OP2 DC D 9 5707 6215 5362 -240 158 751 O ATOM 152 O5' DC D 9 17.963 -35.148 -5.189 1.00 47.61 O ANISOU 152 O5' DC D 9 5891 6796 5401 108 130 782 O ATOM 153 C4' DC D 9 16.438 -33.511 -4.181 1.00 47.68 C ANISOU 153 C4' DC D 9 5905 6954 5256 421 173 777 C ATOM 154 O4' DC D 9 16.636 -32.414 -5.115 1.00 46.32 O ANISOU 154 O4' DC D 9 5720 6717 5161 270 -41 594 O ATOM 155 C3' DC D 9 15.222 -34.281 -4.663 1.00 47.87 C ANISOU 155 C3' DC D 9 5691 6956 5542 259 368 1004 C ATOM 156 O3' DC D 9 14.066 -33.831 -3.936 1.00 49.99 O ANISOU 156 O3' DC D 9 5865 7288 5841 500 604 1169 O ATOM 157 C2' DC D 9 15.155 -33.937 -6.154 1.00 46.75 C ANISOU 157 C2' DC D 9 5461 6721 5580 -58 121 904 C ATOM 158 C1' DC D 9 15.761 -32.532 -6.208 1.00 46.36 C ANISOU 158 C1' DC D 9 5559 6672 5385 57 -46 686 C ATOM 159 N1 DC D 9 16.555 -32.244 -7.476 1.00 45.92 N ANISOU 159 N1 DC D 9 5610 6451 5387 -177 -223 525 N ATOM 160 C2 DC D 9 16.550 -30.940 -8.025 1.00 45.71 C ANISOU 160 C2 DC D 9 5676 6352 5341 -158 -331 390 C ATOM 161 O2 DC D 9 15.858 -30.056 -7.494 1.00 46.93 O ANISOU 161 O2 DC D 9 5811 6591 5429 45 -303 403 O ATOM 162 N3 DC D 9 17.299 -30.692 -9.148 1.00 44.69 N ANISOU 162 N3 DC D 9 5710 6013 5258 -327 -387 274 N ATOM 163 C4 DC D 9 18.002 -31.679 -9.726 1.00 44.18 C ANISOU 163 C4 DC D 9 5723 5809 5255 -494 -326 301 C ATOM 164 N4 DC D 9 18.688 -31.392 -10.831 1.00 43.68 N ANISOU 164 N4 DC D 9 5885 5481 5229 -605 -277 224 N ATOM 165 C5 DC D 9 18.008 -33.015 -9.199 1.00 44.61 C ANISOU 165 C5 DC D 9 5687 5940 5323 -518 -233 435 C ATOM 166 C6 DC D 9 17.303 -33.245 -8.072 1.00 45.48 C ANISOU 166 C6 DC D 9 5629 6264 5389 -363 -193 536 C ATOM 167 P DG D 10 12.633 -34.536 -4.085 1.00 51.63 P ANISOU 167 P DG D 10 5690 7480 6448 388 859 1471 P ATOM 168 OP1 DG D 10 11.993 -34.566 -2.745 1.00 55.90 O ANISOU 168 OP1 DG D 10 6283 8030 6926 780 1329 1697 O ATOM 169 OP2 DG D 10 12.792 -35.793 -4.861 1.00 51.10 O ANISOU 169 OP2 DG D 10 5538 7308 6569 6 778 1496 O ATOM 170 O5' DG D 10 11.835 -33.511 -4.991 1.00 51.95 O ANISOU 170 O5' DG D 10 5466 7555 6717 297 634 1466 O ATOM 171 C5' DG D 10 11.714 -32.153 -4.599 1.00 52.39 C ANISOU 171 C5' DG D 10 5649 7656 6599 612 672 1410 C ATOM 172 C4' DG D 10 11.196 -31.311 -5.747 1.00 52.11 C ANISOU 172 C4' DG D 10 5418 7630 6751 490 384 1385 C ATOM 173 O4' DG D 10 12.222 -31.189 -6.760 1.00 49.39 O ANISOU 173 O4' DG D 10 5315 7201 6250 233 30 1112 O ATOM 174 C3' DG D 10 9.952 -31.848 -6.451 1.00 54.58 C ANISOU 174 C3' DG D 10 5206 7973 7559 275 284 1638 C ATOM 175 O3' DG D 10 9.016 -30.796 -6.576 1.00 58.75 O ANISOU 175 O3' DG D 10 5490 8567 8267 512 297 1794 O ATOM 176 C2' DG D 10 10.477 -32.307 -7.809 1.00 51.83 C ANISOU 176 C2' DG D 10 4978 7529 7186 -143 -178 1447 C ATOM 177 C1' DG D 10 11.640 -31.352 -8.025 1.00 49.44 C ANISOU 177 C1' DG D 10 5132 7167 6485 -32 -268 1161 C ATOM 178 N9 DG D 10 12.684 -31.840 -8.943 1.00 47.12 N ANISOU 178 N9 DG D 10 5146 6716 6042 -316 -476 956 N ATOM 179 C8 DG D 10 13.307 -33.070 -8.914 1.00 46.20 C ANISOU 179 C8 DG D 10 5133 6515 5904 -515 -409 937 C ATOM 180 N7 DG D 10 14.254 -33.199 -9.818 1.00 44.53 N ANISOU 180 N7 DG D 10 5246 6119 5554 -679 -524 774 N ATOM 181 C5 DG D 10 14.280 -31.969 -10.477 1.00 43.30 C ANISOU 181 C5 DG D 10 5225 5914 5314 -597 -675 667 C ATOM 182 C6 DG D 10 15.112 -31.506 -11.556 1.00 41.62 C ANISOU 182 C6 DG D 10 5395 5462 4955 -668 -744 507 C ATOM 183 O6 DG D 10 15.979 -32.137 -12.188 1.00 41.25 O ANISOU 183 O6 DG D 10 5636 5194 4842 -819 -674 445 O ATOM 184 N1 DG D 10 14.797 -30.192 -11.935 1.00 41.60 N ANISOU 184 N1 DG D 10 5469 5445 4892 -509 -832 459 N ATOM 185 C2 DG D 10 13.813 -29.418 -11.332 1.00 43.73 C ANISOU 185 C2 DG D 10 5468 5915 5233 -286 -849 563 C ATOM 186 N2 DG D 10 13.646 -28.173 -11.808 1.00 43.93 N ANISOU 186 N2 DG D 10 5658 5867 5167 -113 -891 518 N ATOM 187 N3 DG D 10 13.041 -29.841 -10.321 1.00 45.36 N ANISOU 187 N3 DG D 10 5302 6335 5597 -199 -748 732 N ATOM 188 C4 DG D 10 13.317 -31.120 -9.954 1.00 44.89 C ANISOU 188 C4 DG D 10 5171 6288 5599 -373 -668 771 C ATOM 189 P DC D 11 7.464 -31.076 -6.856 1.00 64.47 P ANISOU 189 P DC D 11 5497 9365 9635 459 284 2173 P ATOM 190 OP1 DC D 11 6.694 -30.199 -5.955 1.00 68.29 O ANISOU 190 OP1 DC D 11 5821 9892 10233 961 768 2440 O ATOM 191 OP2 DC D 11 7.221 -32.542 -6.838 1.00 66.22 O ANISOU 191 OP2 DC D 11 5442 9536 10183 92 297 2280 O ATOM 192 O5' DC D 11 7.294 -30.564 -8.369 1.00 64.31 O ANISOU 192 O5' DC D 11 5423 9353 9659 251 -357 2068 O ATOM 193 C5' DC D 11 7.627 -29.216 -8.713 1.00 63.28 C ANISOU 193 C5' DC D 11 5632 9211 9200 523 -437 1929 C ATOM 194 C4' DC D 11 8.011 -29.086 -10.181 1.00 62.67 C ANISOU 194 C4' DC D 11 5821 9041 8950 261 -1009 1721 C ATOM 195 O4' DC D 11 9.255 -29.758 -10.428 1.00 58.49 O ANISOU 195 O4' DC D 11 5760 8365 8097 -24 -1057 1430 O ATOM 196 C3' DC D 11 7.016 -29.681 -11.186 1.00 67.45 C ANISOU 196 C3' DC D 11 6000 9677 9951 -27 -1557 1885 C ATOM 197 O3' DC D 11 6.256 -28.657 -11.805 1.00 72.04 O ANISOU 197 O3' DC D 11 6413 10328 10632 230 -1830 2033 O ATOM 198 C2' DC D 11 7.890 -30.400 -12.212 1.00 64.48 C ANISOU 198 C2' DC D 11 6150 9097 9252 -428 -1934 1593 C ATOM 199 C1' DC D 11 9.306 -30.020 -11.805 1.00 58.64 C ANISOU 199 C1' DC D 11 5982 8251 8047 -301 -1549 1328 C ATOM 200 N1 DC D 11 10.301 -31.101 -12.087 1.00 54.72 N ANISOU 200 N1 DC D 11 5853 7578 7362 -630 -1553 1136 N ATOM 201 C2 DC D 11 11.214 -30.932 -13.141 1.00 52.51 C ANISOU 201 C2 DC D 11 6165 7054 6734 -727 -1703 915 C ATOM 202 O2 DC D 11 11.240 -29.851 -13.751 1.00 52.40 O ANISOU 202 O2 DC D 11 6386 6974 6548 -537 -1809 863 O ATOM 203 N3 DC D 11 12.076 -31.957 -13.436 1.00 51.07 N ANISOU 203 N3 DC D 11 6319 6673 6412 -983 -1636 793 N ATOM 204 C4 DC D 11 12.018 -33.112 -12.738 1.00 51.92 C ANISOU 204 C4 DC D 11 6202 6832 6694 -1150 -1478 871 C ATOM 205 N4 DC D 11 12.895 -34.084 -13.036 1.00 50.99 N ANISOU 205 N4 DC D 11 6456 6495 6424 -1347 -1360 775 N ATOM 206 C5 DC D 11 11.056 -33.313 -11.693 1.00 53.75 C ANISOU 206 C5 DC D 11 5848 7303 7273 -1078 -1345 1085 C ATOM 207 C6 DC D 11 10.213 -32.306 -11.428 1.00 54.96 C ANISOU 207 C6 DC D 11 5658 7636 7588 -821 -1379 1220 C ATOM 208 P DC D 12 4.982 -29.027 -12.712 1.00 79.92 P ANISOU 208 P DC D 12 6815 11412 12140 35 -2495 2280 P ATOM 209 OP1 DC D 12 4.045 -27.883 -12.655 1.00 84.68 O ANISOU 209 OP1 DC D 12 7009 12168 12999 510 -2460 2587 O ATOM 210 OP2 DC D 12 4.513 -30.390 -12.342 1.00 82.50 O ANISOU 210 OP2 DC D 12 6639 11752 12957 -372 -2514 2401 O ATOM 211 O5' DC D 12 5.591 -29.119 -14.190 1.00 78.68 O ANISOU 211 O5' DC D 12 7339 11049 11508 -225 -3141 1981 O ATOM 212 C5' DC D 12 6.341 -28.033 -14.735 1.00 75.42 C ANISOU 212 C5' DC D 12 7617 10501 10539 57 -3086 1788 C ATOM 213 C4' DC D 12 7.081 -28.473 -15.987 1.00 73.85 C ANISOU 213 C4' DC D 12 8167 10013 9878 -235 -3505 1508 C ATOM 214 O4' DC D 12 8.144 -29.361 -15.612 1.00 68.93 O ANISOU 214 O4' DC D 12 7855 9247 9089 -518 -3156 1294 O ATOM 215 C3' DC D 12 6.231 -29.244 -16.987 1.00 79.69 C ANISOU 215 C3' DC D 12 8782 10711 10787 -563 -4329 1573 C ATOM 216 O3' DC D 12 5.932 -28.434 -18.123 1.00 84.39 O ANISOU 216 O3' DC D 12 9773 11213 11079 -333 -4853 1581 O ATOM 217 C2' DC D 12 7.065 -30.470 -17.365 1.00 76.49 C ANISOU 217 C2' DC D 12 8925 10024 10113 -1022 -4366 1313 C ATOM 218 C1' DC D 12 8.392 -30.247 -16.669 1.00 69.20 C ANISOU 218 C1' DC D 12 8373 9022 8899 -878 -3589 1142 C ATOM 219 N1 DC D 12 8.969 -31.516 -16.114 1.00 65.22 N ANISOU 219 N1 DC D 12 7875 8442 8465 -1214 -3298 1054 N ATOM 220 C2 DC D 12 10.097 -32.088 -16.724 1.00 61.84 C ANISOU 220 C2 DC D 12 8201 7678 7618 -1383 -3174 828 C ATOM 221 O2 DC D 12 10.648 -31.493 -17.653 1.00 61.62 O ANISOU 221 O2 DC D 12 8828 7406 7177 -1244 -3231 703 O ATOM 222 N3 DC D 12 10.570 -33.278 -16.249 1.00 59.55 N ANISOU 222 N3 DC D 12 7919 7307 7399 -1647 -2908 788 N ATOM 223 C4 DC D 12 9.933 -33.914 -15.251 1.00 60.34 C ANISOU 223 C4 DC D 12 7360 7627 7939 -1767 -2789 947 C ATOM 224 N4 DC D 12 10.438 -35.079 -14.825 1.00 58.98 N ANISOU 224 N4 DC D 12 7286 7338 7787 -1987 -2495 915 N ATOM 225 C5 DC D 12 8.755 -33.372 -14.641 1.00 63.69 C ANISOU 225 C5 DC D 12 7020 8364 8816 -1619 -2885 1183 C ATOM 226 C6 DC D 12 8.305 -32.190 -15.105 1.00 65.90 C ANISOU 226 C6 DC D 12 7254 8739 9047 -1342 -3145 1236 C ATOM 227 P DT D 13 4.832 -28.910 -19.199 1.00 92.81 P ANISOU 227 P DT D 13 10681 12271 12312 -565 -5913 1685 P ATOM 228 OP1 DT D 13 4.277 -27.696 -19.840 1.00 97.01 O ANISOU 228 OP1 DT D 13 11287 12872 12702 -83 -6269 1848 O ATOM 229 OP2 DT D 13 3.912 -29.890 -18.553 1.00 96.47 O ANISOU 229 OP2 DT D 13 10177 12940 13537 -943 -6106 1880 O ATOM 230 O5' DT D 13 5.711 -29.720 -20.264 1.00 91.31 O ANISOU 230 O5' DT D 13 11574 11637 11481 -915 -6194 1339 O ATOM 231 C5' DT D 13 6.930 -29.169 -20.789 1.00 86.22 C ANISOU 231 C5' DT D 13 11952 10670 10139 -680 -5761 1116 C ATOM 232 C4' DT D 13 7.719 -30.231 -21.542 1.00 85.45 C ANISOU 232 C4' DT D 13 12743 10137 9587 -1049 -5843 853 C ATOM 233 O4' DT D 13 8.322 -31.135 -20.598 1.00 80.33 O ANISOU 233 O4' DT D 13 11803 9525 9192 -1339 -5271 790 O ATOM 234 C3' DT D 13 6.887 -31.101 -22.483 1.00 92.93 C ANISOU 234 C3' DT D 13 13905 10926 10480 -1423 -6845 817 C ATOM 235 C2' DT D 13 7.251 -32.549 -22.122 1.00 90.86 C ANISOU 235 C2' DT D 13 13659 10505 10357 -1953 -6665 685 C ATOM 236 C1' DT D 13 8.437 -32.407 -21.184 1.00 81.85 C ANISOU 236 C1' DT D 13 12491 9402 9208 -1782 -5589 643 C ATOM 237 N1 DT D 13 8.458 -33.445 -20.074 1.00 78.39 N ANISOU 237 N1 DT D 13 11412 9119 9254 -2111 -5221 685 N ATOM 238 C2 DT D 13 9.496 -34.372 -20.017 1.00 74.10 C ANISOU 238 C2 DT D 13 11412 8277 8466 -2303 -4743 532 C ATOM 239 O2 DT D 13 10.411 -34.417 -20.833 1.00 72.77 O ANISOU 239 O2 DT D 13 12199 7705 7744 -2231 -4569 375 O ATOM 240 N3 DT D 13 9.417 -35.256 -18.956 1.00 71.85 N ANISOU 240 N3 DT D 13 10523 8153 8624 -2538 -4412 609 N ATOM 241 C4 DT D 13 8.409 -35.325 -17.983 1.00 73.57 C ANISOU 241 C4 DT D 13 9705 8751 9497 -2610 -4462 823 C ATOM 242 O4 DT D 13 8.462 -36.195 -17.112 1.00 72.40 O ANISOU 242 O4 DT D 13 9208 8650 9652 -2798 -4081 884 O ATOM 243 C5 DT D 13 7.341 -34.344 -18.119 1.00 77.71 C ANISOU 243 C5 DT D 13 9668 9555 10302 -2410 -4932 995 C ATOM 244 C7 DT D 13 6.188 -34.325 -17.149 1.00 80.34 C ANISOU 244 C7 DT D 13 8862 10255 11407 -2420 -4921 1294 C ATOM 245 C6 DT D 13 7.412 -33.474 -19.145 1.00 79.83 C ANISOU 245 C6 DT D 13 10493 9706 10133 -2174 -5316 917 C ATOM 246 C6 DG D 14 8.280 -38.494 -20.423 1.00 83.95 C ANISOU 246 C6 DG D 14 12861 8857 10179 -3894 -5663 374 C ATOM 247 O6 DG D 14 7.769 -38.670 -19.310 1.00 82.63 O ANISOU 247 O6 DG D 14 11704 9034 10658 -3957 -5402 563 O TER HETATM 248 O HOH B 350 21.454 -30.750 -15.477 1.00 65.67 O HETATM 249 O HOH B 355 10.526 -35.905 -9.116 1.00 45.59 O HETATM 250 O HOH C 104 21.084 -29.582 -13.041 1.00 44.23 O HETATM 251 O HOH C 105 13.755 -24.488 -10.051 1.00 55.10 O HETATM 252 O HOH C 108 19.457 -33.371 -15.015 1.00 54.30 O HETATM 253 O HOH C 114 10.055 -22.443 -5.392 1.00 51.50 O HETATM 254 O HOH C 115 14.423 -33.496 -22.721 1.00 58.74 O HETATM 255 O HOH D 102 17.487 -36.654 -8.759 1.00 46.91 O HETATM 256 O HOH D 104 14.192 -29.033 -5.516 1.00 45.92 O HETATM 257 O HOH D 105 4.788 -32.025 -5.002 1.00 63.52 O HETATM 258 O HOH D 108 9.056 -30.329 -2.445 1.00 63.77 O END ''' def write_and_run(pdb_name): input_model_filename = 'paral_geo_output.pdb' f=file(input_model_filename, 'wb') f.write(pdb_name) del f cmd = 'phenix.fmodel %s high_resolution=3' % input_model_filename print('\n~> %s\n' % cmd) rc = easy_run.go(cmd) cmd = 'phenix.real_space_refine %s %s.mtz' % (input_model_filename, input_model_filename) cmd += ' refinement.macro_cycles=0' cmd += ' base_pair.restrain_planarity=True' print('\n~> %s\n' % cmd) rc = easy_run.go(cmd) def test_geo(result, opposite=False): input_model_filename = 'paral_geo_output.pdb' f=file('%s_initial.geo' % input_model_filename.replace('.pdb', '')) lines = f.read() del f if 0: print(lines) for i in range(len(result)): print(i, lines.find(result[:i]), result[i-10:i]) if opposite: assert lines.find(result)==-1, 'found: %s' % result else: assert lines.find(result)>-1, 'not found: %s' % result print('OK') def main(): write_and_run(large_pdb) result = '''Basepair planarity restraints: 5 Sorted by residual: delta sigma weight rms_deltas residual plane pdb=" C1' DC C 7 " -0.366 1.76e-01 3.23e+01 1.84e-01 2.28e+01 pdb=" N1 DC C 7 " -0.112 1.76e-01 3.23e+01 pdb=" C2 DC C 7 " -0.073 1.76e-01 3.23e+01 pdb=" O2 DC C 7 " -0.185 1.76e-01 3.23e+01 pdb=" N3 DC C 7 " 0.110 1.76e-01 3.23e+01 pdb=" C4 DC C 7 " 0.202 1.76e-01 3.23e+01 pdb=" N4 DC C 7 " 0.340 1.76e-01 3.23e+01 pdb=" C5 DC C 7 " 0.138 1.76e-01 3.23e+01 pdb=" C6 DC C 7 " -0.002 1.76e-01 3.23e+01 pdb=" C1' DG D 10 " 0.142 1.76e-01 3.23e+01 pdb=" N9 DG D 10 " -0.030 1.76e-01 3.23e+01 pdb=" C8 DG D 10 " -0.167 1.76e-01 3.23e+01 pdb=" N7 DG D 10 " -0.231 1.76e-01 3.23e+01 pdb=" C5 DG D 10 " -0.106 1.76e-01 3.23e+01 pdb=" C6 DG D 10 " -0.096 1.76e-01 3.23e+01 pdb=" O6 DG D 10 " -0.252 1.76e-01 3.23e+01 pdb=" N1 DG D 10 " 0.039 1.76e-01 3.23e+01 pdb=" C2 DG D 10 " 0.167 1.76e-01 3.23e+01 pdb=" N2 DG D 10 " 0.300 1.76e-01 3.23e+01 pdb=" N3 DG D 10 " 0.167 1.76e-01 3.23e+01 pdb=" C4 DG D 10 " 0.015 1.76e-01 3.23e+01''' test_geo(result) result = '''Stacking parallelity restraints: 8 Sorted by residual: plane 1 plane 2 residual delta(deg) sigma pdb=" C1' DC C 7 " pdb=" C1' DG C 8 " 3.21e+01 12.4210 0.0270 pdb=" N1 DC C 7 " pdb=" N9 DG C 8 " pdb=" C2 DC C 7 " pdb=" C8 DG C 8 " pdb=" O2 DC C 7 " pdb=" N7 DG C 8 " pdb=" N3 DC C 7 " pdb=" C5 DG C 8 " pdb=" C4 DC C 7 " pdb=" C6 DG C 8 " pdb=" N4 DC C 7 " pdb=" O6 DG C 8 " pdb=" C5 DC C 7 " pdb=" N1 DG C 8 " pdb=" C6 DC C 7 " pdb=" C2 DG C 8 " pdb=" N2 DG C 8 " pdb=" N3 DG C 8 " pdb=" C4 DG C 8 "''' test_geo(result) result = '''Basepair parallelity restraints: 5 Sorted by residual: plane 1 plane 2 residual delta(deg) sigma pdb=" C1' DG C 8 " pdb=" C1' DC D 9 " 2.80e+01 14.3936 0.0335 pdb=" N9 DG C 8 " pdb=" N1 DC D 9 " pdb=" C8 DG C 8 " pdb=" C2 DC D 9 " pdb=" N7 DG C 8 " pdb=" O2 DC D 9 " pdb=" C5 DG C 8 " pdb=" N3 DC D 9 " pdb=" C6 DG C 8 " pdb=" C4 DC D 9 " pdb=" O6 DG C 8 " pdb=" N4 DC D 9 " pdb=" N1 DG C 8 " pdb=" C5 DC D 9 " pdb=" C2 DG C 8 " pdb=" C6 DC D 9 " pdb=" N2 DG C 8 "''' test_geo(result) write_and_run(small_pdb) result = '''Stacking parallelity restraints: 2 Sorted by residual: plane 1 plane 2 residual delta(deg) sigma pdb=" C1' DC D 11 " pdb=" C1' DC D 12 " 1.50e+01 8.4829 0.0270 pdb=" N1 DC D 11 " pdb=" N1 DC D 12 " pdb=" C2 DC D 11 " pdb=" C2 DC D 12 " pdb=" O2 DC D 11 " pdb=" O2 DC D 12 " pdb=" N3 DC D 11 " pdb=" N3 DC D 12 " pdb=" C4 DC D 11 " pdb=" C4 DC D 12 " pdb=" N4 DC D 11 " pdb=" N4 DC D 12 " pdb=" C5 DC D 11 " pdb=" C5 DC D 12 " pdb=" C6 DC D 11 " pdb=" C6 DC D 12 " plane 1 plane 2 residual delta(deg) sigma pdb=" C1' DG D 10 " pdb=" C1' DC D 11 " 1.14e+00 2.3398 0.0270 pdb=" N9 DG D 10 " pdb=" N1 DC D 11 " pdb=" C8 DG D 10 " pdb=" C2 DC D 11 " pdb=" N7 DG D 10 " pdb=" O2 DC D 11 " pdb=" C5 DG D 10 " pdb=" N3 DC D 11 " pdb=" C6 DG D 10 " pdb=" C4 DC D 11 " pdb=" O6 DG D 10 " pdb=" N4 DC D 11 " pdb=" N1 DG D 10 " pdb=" C5 DC D 11 " pdb=" C2 DG D 10 " pdb=" C6 DC D 11 " pdb=" N2 DG D 10 "''' test_geo(result) test_geo('Basepair planarity restraints:', opposite=True) test_geo('Basepair parallelity restraints:', opposite=True) if __name__ == '__main__': main()
71.94717
78
0.477622
10,824
57,198
2.5182
0.189209
0.03412
0.018711
0.005393
0.582786
0.426973
0.408115
0.404263
0.398577
0.395641
0
0.594885
0.457271
57,198
794
79
72.037783
0.283156
0
0
0.048346
0
0.445293
0.979125
0.001434
0
0
0
0
0.002545
1
0.003817
false
0
0.002545
0
0.006361
0.007634
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
2ae1387524abaf472b9d846edab85334e0e628aa
91
py
Python
cookbook/apps.py
Parametricall/portfolio_backend
ad00ee17536a49270137c69a53c7fa6869108094
[ "MIT" ]
null
null
null
cookbook/apps.py
Parametricall/portfolio_backend
ad00ee17536a49270137c69a53c7fa6869108094
[ "MIT" ]
null
null
null
cookbook/apps.py
Parametricall/portfolio_backend
ad00ee17536a49270137c69a53c7fa6869108094
[ "MIT" ]
null
null
null
from django.apps import AppConfig class CookbookConfig(AppConfig): name = "cookbook"
15.166667
33
0.758242
10
91
6.9
0.9
0
0
0
0
0
0
0
0
0
0
0
0.164835
91
5
34
18.2
0.907895
0
0
0
0
0
0.087912
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
6309a4c1d2934a1ef5cdf9e19e62b3fd8654ab73
134
py
Python
tests/run_test.py
insilichem/pgaudi
1a502864024ef5c2a07473f2b02680561ba507bf
[ "Apache-2.0" ]
null
null
null
tests/run_test.py
insilichem/pgaudi
1a502864024ef5c2a07473f2b02680561ba507bf
[ "Apache-2.0" ]
4
2019-04-23T07:32:20.000Z
2019-04-29T07:26:12.000Z
tests/run_test.py
insilichem/pgaudi
1a502864024ef5c2a07473f2b02680561ba507bf
[ "Apache-2.0" ]
1
2019-04-10T12:10:05.000Z
2019-04-10T12:10:05.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import pytest if __name__ == "__main__": sys.exit(pytest.main(["-s"]))
14.888889
33
0.61194
19
134
3.894737
0.789474
0
0
0
0
0
0
0
0
0
0
0.008929
0.164179
134
8
34
16.75
0.651786
0.313433
0
0
0
0
0.111111
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
2d61e2ec67c759379cac2e8ad8854b7912080179
797
py
Python
cadastro/models.py
jefersonla/trabalho-labweb-python
5d7451c91c26170a87a81c21726ea868e9c5a71c
[ "MIT" ]
null
null
null
cadastro/models.py
jefersonla/trabalho-labweb-python
5d7451c91c26170a87a81c21726ea868e9c5a71c
[ "MIT" ]
null
null
null
cadastro/models.py
jefersonla/trabalho-labweb-python
5d7451c91c26170a87a81c21726ea868e9c5a71c
[ "MIT" ]
null
null
null
from django.db import models from django.forms import ModelForm # Create your models here. class Bicicleta(models.Model): fabricante = models.CharField(max_length=255) modelo = models.CharField(max_length=255) cor = models.CharField(max_length=255) marcha = models.CharField(max_length=255) marca_cambio = models.CharField(max_length=255) proprietario = models.CharField(max_length=255) celular = models.CharField(max_length=255) email = models.CharField(max_length=255) class BicicletaForm(ModelForm): class Meta: model = Bicicleta fields = [ 'fabricante', 'modelo', 'cor', 'marcha', 'marca_cambio', 'proprietario', 'celular', 'email' ]
27.482759
51
0.628607
84
797
5.845238
0.345238
0.244399
0.293279
0.391039
0.439919
0
0
0
0
0
0
0.041379
0.272271
797
29
52
27.482759
0.805172
0.030113
0
0
0
0
0.079016
0
0
0
0
0
0
1
0
false
0
0.083333
0
0.541667
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
2d7a6af8602078f18e4770fdb3f26102a1a37da3
253
py
Python
lino_book/projects/lydia/tests/dumps/18.12.0/notes_eventtype.py
lino-framework/lino_book
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
[ "BSD-2-Clause" ]
3
2016-08-25T05:58:09.000Z
2019-12-05T11:13:45.000Z
lino_book/projects/lydia/tests/dumps/18.12.0/notes_eventtype.py
lino-framework/lino_book
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
[ "BSD-2-Clause" ]
18
2016-11-12T21:38:58.000Z
2019-12-03T17:54:38.000Z
lino_book/projects/lydia/tests/dumps/18.12.0/notes_eventtype.py
lino-framework/lino_book
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
[ "BSD-2-Clause" ]
9
2016-10-15T11:12:33.000Z
2021-09-22T04:37:37.000Z
# -*- coding: UTF-8 -*- logger.info("Loading 1 objects to table notes_eventtype...") # fields: id, name, remark, body loader.save(create_notes_eventtype(1,['System note', 'System note', 'System note'],u'',['', '', ''])) loader.flush_deferred_objects()
36.142857
101
0.675889
34
253
4.882353
0.735294
0.180723
0.192771
0.240964
0
0
0
0
0
0
0
0.013274
0.106719
253
6
102
42.166667
0.721239
0.205534
0
0
0
0
0.393939
0
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
2d7b707d2d3dd620363d5ee85a0b31a238be64fa
663
py
Python
PLM/ui/models/__init__.py
vtta2008/pipelineTool
2431d2fc987e3b31f2a6a63427fee456fa0765a0
[ "Apache-2.0" ]
7
2020-10-11T21:21:50.000Z
2022-03-07T03:37:51.000Z
PLM/ui/models/__init__.py
vtta2008/pipelineTool
2431d2fc987e3b31f2a6a63427fee456fa0765a0
[ "Apache-2.0" ]
null
null
null
PLM/ui/models/__init__.py
vtta2008/pipelineTool
2431d2fc987e3b31f2a6a63427fee456fa0765a0
[ "Apache-2.0" ]
3
2019-03-11T21:54:52.000Z
2019-11-25T11:23:17.000Z
# -*- coding: utf-8 -*- """ Script Name: __init__.py.py Author: Do Trinh/Jimmy - 3D artist. Description: """ # ------------------------------------------------------------------------------------------------------------- from .ActionManager import ActionManager from .AppModel import AppModel from .ButtonManager import ButtonManager from .CommandUI import CommandUI from .RegistryLayout import RegistryLayout # ------------------------------------------------------------------------------------------------------------- # Created by panda on 7/12/2019 - 4:35 PM # © 2017 - 2018 DAMGteam. All rights reserved
31.571429
111
0.443439
52
663
5.596154
0.730769
0
0
0
0
0
0
0
0
0
0
0.036298
0.168929
663
21
112
31.571429
0.490018
0.609351
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
935058eca3ec4cd9427448bfdab7494f604744cf
2,692
py
Python
model/supplyrequest.py
beda-software/fhir-py-experements
363cfb894fa6f971b9be19340cae1b0a3a4377d8
[ "MIT" ]
null
null
null
model/supplyrequest.py
beda-software/fhir-py-experements
363cfb894fa6f971b9be19340cae1b0a3a4377d8
[ "MIT" ]
null
null
null
model/supplyrequest.py
beda-software/fhir-py-experements
363cfb894fa6f971b9be19340cae1b0a3a4377d8
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated from FHIR 4.0.1-9346c8cc45 (http://hl7.org/fhir/StructureDefinition/SupplyRequest) on 2020-02-03. # 2020, SMART Health IT. import sys from dataclasses import dataclass, field from typing import ClassVar, Optional, List from .backboneelement import BackboneElement from .codeableconcept import CodeableConcept from .domainresource import DomainResource from .fhirdate import FHIRDate from .fhirreference import FHIRReference from .identifier import Identifier from .period import Period from .quantity import Quantity from .range import Range from .timing import Timing @dataclass class SupplyRequestParameter(BackboneElement): """ Ordered item details. Specific parameters for the ordered item. For example, the size of the indicated item. """ resource_type: ClassVar[str] = "SupplyRequestParameter" code: Optional[CodeableConcept] = None valueCodeableConcept: Optional[CodeableConcept] = field(default=None, metadata=dict(one_of_many='value',)) valueQuantity: Optional[Quantity] = field(default=None, metadata=dict(one_of_many='value',)) valueRange: Optional[Range] = field(default=None, metadata=dict(one_of_many='value',)) valueBoolean: Optional[bool] = field(default=None, metadata=dict(one_of_many='value',)) @dataclass class SupplyRequest(DomainResource): """ Request for a medication, substance or device. A record of a request for a medication, substance or device used in the healthcare setting. """ resource_type: ClassVar[str] = "SupplyRequest" identifier: Optional[List[Identifier]] = None status: Optional[str] = None category: Optional[CodeableConcept] = None priority: Optional[str] = None itemCodeableConcept: CodeableConcept = field(default=None, metadata=dict(one_of_many='item',)) itemReference: FHIRReference = field(default=None, metadata=dict(one_of_many='item',)) quantity: Quantity = None parameter: Optional[List[SupplyRequestParameter]] = None occurrenceDateTime: Optional[FHIRDate] = field(default=None, metadata=dict(one_of_many='occurrence',)) occurrencePeriod: Optional[Period] = field(default=None, metadata=dict(one_of_many='occurrence',)) occurrenceTiming: Optional[Timing] = field(default=None, metadata=dict(one_of_many='occurrence',)) authoredOn: Optional[FHIRDate] = None requester: Optional[FHIRReference] = None supplier: Optional[List[FHIRReference]] = None reasonCode: Optional[List[CodeableConcept]] = None reasonReference: Optional[List[FHIRReference]] = None deliverFrom: Optional[FHIRReference] = None deliverTo: Optional[FHIRReference] = None
42.0625
110
0.755572
305
2,692
6.603279
0.334426
0.053625
0.0715
0.107249
0.246773
0.246773
0.246773
0.209037
0.209037
0
0
0.010363
0.139673
2,692
64
111
42.0625
0.85924
0.158247
0
0.047619
1
0
0.041817
0.009892
0
0
0
0
0
1
0
true
0
0.309524
0
0.952381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
935d67b73154dab10f3b638fbc699e5b3b6adefc
41
py
Python
kindling/templates/settings/celery_development.py
radiosilence/django-kindling
85c4fa6abc678850a40790ec590635f2397de00b
[ "MIT" ]
null
null
null
kindling/templates/settings/celery_development.py
radiosilence/django-kindling
85c4fa6abc678850a40790ec590635f2397de00b
[ "MIT" ]
1
2020-09-25T06:58:55.000Z
2020-09-28T06:50:23.000Z
kindling/templates/settings/celery_development.py
radiosilence/django-kindling
85c4fa6abc678850a40790ec590635f2397de00b
[ "MIT" ]
null
null
null
from development import * DEBUG = False
10.25
25
0.756098
5
41
6.2
1
0
0
0
0
0
0
0
0
0
0
0
0.195122
41
3
26
13.666667
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
9360b2a3266db9c22439f86daa4894252253da72
261
py
Python
anime_downloader/extractors/fake_extractor.py
Alsira/anime-downloader
d82b4cfd5c7c6c358d0d8ffd36ce2d5c4a285595
[ "Unlicense" ]
1,077
2020-10-17T15:43:17.000Z
2022-03-31T15:24:29.000Z
anime_downloader/extractors/fake_extractor.py
Alsira/anime-downloader
d82b4cfd5c7c6c358d0d8ffd36ce2d5c4a285595
[ "Unlicense" ]
509
2018-06-01T13:07:56.000Z
2020-10-17T13:34:39.000Z
anime_downloader/extractors/fake_extractor.py
Alsira/anime-downloader
d82b4cfd5c7c6c358d0d8ffd36ce2d5c4a285595
[ "Unlicense" ]
255
2018-05-27T03:52:11.000Z
2020-10-12T17:27:38.000Z
from anime_downloader.extractors.base_extractor import BaseExtractor class AnimeVideo(BaseExtractor): def _get_data(self): return { 'stream_url': self.url, 'referer': self._referer if self._referer else self.url }
26.1
68
0.666667
29
261
5.758621
0.689655
0.083832
0
0
0
0
0
0
0
0
0
0
0.252874
261
9
69
29
0.85641
0
0
0
0
0
0.065134
0
0
0
0
0
0
1
0.142857
false
0
0.142857
0.142857
0.571429
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
9361e96e6b5e988a3c7e02e9eb5b370347edc1d3
1,551
py
Python
astropy/tests/disable_internet.py
jbkalmbach/astropy
88ae8c615533efd1e60de4aded204943f66f881c
[ "BSD-3-Clause" ]
1
2022-03-02T17:07:20.000Z
2022-03-02T17:07:20.000Z
astropy/tests/disable_internet.py
jbkalmbach/astropy
88ae8c615533efd1e60de4aded204943f66f881c
[ "BSD-3-Clause" ]
11
2017-12-18T16:27:29.000Z
2018-08-29T14:54:22.000Z
astropy/tests/disable_internet.py
jbkalmbach/astropy
88ae8c615533efd1e60de4aded204943f66f881c
[ "BSD-3-Clause" ]
1
2018-08-02T09:33:21.000Z
2018-08-02T09:33:21.000Z
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This is retained only for backwards compatibility. Affiliated packages should no longer import ``disable_internet`` from ``astropy.tests``. It is now available from ``pytest_remotedata``. However, this is not the recommended mechanism for controlling access to remote data in tests. Instead, packages should make use of decorators provided by the pytest_remotedata plugin: - ``@pytest.mark.remote_data`` for tests that require remote data access - ``@pytest.mark.internet_off`` for tests that should only run when remote data access is disabled. Remote data access for the test suite is controlled by the ``--remote-data`` command line flag. This is either passed to ``pytest`` directly or to the ``setup.py test`` command. TODO: This module should eventually be removed once backwards compatibility is no longer supported. """ from warnings import warn from ..utils.exceptions import AstropyDeprecationWarning warn("The ``disable_internet`` module is no longer provided by astropy. It " "is now available as ``pytest_remotedata.disable_internet``. However, " "developers are encouraged to avoid using this module directly. See " "<https://docs.astropy.org/en/latest/whatsnew/3.0.html#pytest-plugins> " "for more information.", AstropyDeprecationWarning) try: # This should only be necessary during testing, in which case the test # package must be installed anyway. from pytest_remotedata.disable_internet import * except ImportError: pass
45.617647
77
0.768536
218
1,551
5.422018
0.518349
0.050761
0.040609
0.027073
0
0
0
0
0
0
0
0.002287
0.154094
1,551
33
78
47
0.898628
0.638943
0
0
0
0.090909
0.538182
0.070909
0
0
0
0.030303
0
1
0
true
0.090909
0.363636
0
0.363636
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
1
1
0
0
0
0
4
936408a06540d5d25477a371f86a922457ea10ad
182
py
Python
main/multiply-two-strings/multiply-two-strings.py
EliahKagan/old-practice-snapshot
1b53897eac6902f8d867c8f154ce2a489abb8133
[ "0BSD" ]
null
null
null
main/multiply-two-strings/multiply-two-strings.py
EliahKagan/old-practice-snapshot
1b53897eac6902f8d867c8f154ce2a489abb8133
[ "0BSD" ]
null
null
null
main/multiply-two-strings/multiply-two-strings.py
EliahKagan/old-practice-snapshot
1b53897eac6902f8d867c8f154ce2a489abb8133
[ "0BSD" ]
null
null
null
#Your task is to complete this function # function should strictly return a string else answer wont be printed def multiplyStrings(str1, str2): return str(int(str1) * int(str2))
36.4
70
0.763736
28
182
4.964286
0.821429
0
0
0
0
0
0
0
0
0
0
0.026316
0.164835
182
4
71
45.5
0.888158
0.587912
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
fa72ccf6e1f01dbb7a991d724c35a726e899d486
1,621
py
Python
torchlife/losses.py
sachinruk/torchlife2
fc5217a9f48ff22f7c26126fa9a473f63c328bc0
[ "Apache-2.0" ]
10
2019-10-21T01:19:18.000Z
2022-03-26T17:03:07.000Z
torchlife/losses.py
sachinruk/torchlife2
fc5217a9f48ff22f7c26126fa9a473f63c328bc0
[ "Apache-2.0" ]
4
2020-11-30T04:53:30.000Z
2022-02-26T06:18:42.000Z
torchlife/losses.py
sachinruk/torchlife2
fc5217a9f48ff22f7c26126fa9a473f63c328bc0
[ "Apache-2.0" ]
1
2021-06-23T15:24:03.000Z
2021-06-23T15:24:03.000Z
# AUTOGENERATED! DO NOT EDIT! File to edit: 95_Losses.ipynb (unless otherwise specified). __all__ = ['Loss', 'LossType', 'AFTLoss', 'aft_loss', 'HazardLoss', 'hazard_loss'] # Cell from abc import ABC, abstractmethod from typing import Callable, Tuple import torch # Cell class Loss(ABC): @abstractmethod def __call__(event:torch.Tensor, *args): pass # Cell LossType = Callable[[torch.Tensor, torch.Tensor, torch.Tensor], torch.Tensor] # Cell class AFTLoss(Loss): @staticmethod def __call__(event:torch.Tensor, log_pdf: torch.Tensor, log_icdf: torch.Tensor) -> torch.Tensor: lik = event * log_pdf + (1 - event) * log_icdf return -lik.mean() # Cell def _aft_loss( log_pdf: torch.Tensor, log_cdf: torch.Tensor, e: torch.Tensor ) -> torch.Tensor: lik = e * log_pdf + (1 - e) * log_cdf return -lik.mean() def aft_loss(log_prob, e): log_pdf, log_cdf = log_prob return _aft_loss(log_pdf, log_cdf, e) # Cell class HazardLoss(Loss): @staticmethod def __call__(event: torch.Tensor, logλ: torch.Tensor, Λ: torch.Tensor) -> torch.Tensor: log_lik = event * logλ - Λ return -log_lik.mean() # Cell def _hazard_loss(logλ: torch.Tensor, Λ: torch.Tensor, e: torch.Tensor) -> torch.Tensor: log_lik = e * logλ - Λ return -log_lik.mean() def hazard_loss( hazard: Tuple[torch.Tensor, torch.Tensor], e: torch.Tensor ) -> torch.Tensor: """ parameters: - hazard: log hazard and Cumulative hazard - e: torch.Tensor of 1 if death event occured and 0 otherwise """ logλ, Λ = hazard return _hazard_loss(logλ, Λ, e)
27.016667
100
0.669957
230
1,621
4.517391
0.243478
0.275265
0.138595
0.190568
0.407122
0.319538
0.173244
0
0
0
0
0.004655
0.204812
1,621
60
101
27.016667
0.801396
0.148057
0
0.228571
1
0
0.035529
0
0
0
0
0
0
1
0.2
false
0.028571
0.085714
0
0.542857
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
fa8bfdd43fa8170c8fefb81e61066ab0f0503372
143
py
Python
main.py
dclavijo45/mvc-flask-template
4904047ad4cd0679ce634b0a92ec5fcdd6152f15
[ "MIT" ]
null
null
null
main.py
dclavijo45/mvc-flask-template
4904047ad4cd0679ce634b0a92ec5fcdd6152f15
[ "MIT" ]
null
null
null
main.py
dclavijo45/mvc-flask-template
4904047ad4cd0679ce634b0a92ec5fcdd6152f15
[ "MIT" ]
null
null
null
from config import PORT, HOST, DEBUG from __init__ import app if __name__ == "__main__": app.run(host=HOST, port=PORT, debug=bool(DEBUG))
23.833333
52
0.727273
22
143
4.181818
0.590909
0
0
0
0
0
0
0
0
0
0
0
0.153846
143
5
53
28.6
0.760331
0
0
0
0
0
0.055944
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
fa9d6081796d11c9cc63c348ffd0e2b0d508df23
219
py
Python
tests/deprecation_rules/deprecated_django_simplejson/checked_file.py
iwoca/django-seven
c7be98b73c139c9e74a9be94a0f20a723c739c80
[ "BSD-3-Clause" ]
9
2016-05-25T22:33:17.000Z
2021-05-29T18:38:07.000Z
tests/deprecation_rules/deprecated_django_simplejson/checked_file.py
iwoca/django-upgrade-tools
c7be98b73c139c9e74a9be94a0f20a723c739c80
[ "BSD-3-Clause" ]
3
2016-06-19T21:17:06.000Z
2016-07-20T20:26:14.000Z
tests/deprecation_rules/deprecated_django_simplejson/checked_file.py
iwoca/django-upgrade-tools
c7be98b73c139c9e74a9be94a0f20a723c739c80
[ "BSD-3-Clause" ]
1
2016-08-11T07:27:30.000Z
2016-08-11T07:27:30.000Z
# Old django json import from django.utils import simplejson import django.utils.simplejson # New correct json import (standard python one, as simplejson speedups have been included in python standard lib) import json
31.285714
113
0.817352
32
219
5.59375
0.59375
0.111732
0
0
0
0
0
0
0
0
0
0
0.146119
219
6
114
36.5
0.957219
0.611872
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
faaead3a1606caf23ac36900fa9dfdab952fa7dd
161
py
Python
venv/lib/python3.8/site-packages/uno/form.py
LachlanAttwood/ConvertMe
da544b5cacead3213dab76e9e716222b011d7688
[ "MIT" ]
null
null
null
venv/lib/python3.8/site-packages/uno/form.py
LachlanAttwood/ConvertMe
da544b5cacead3213dab76e9e716222b011d7688
[ "MIT" ]
3
2015-11-09T19:25:22.000Z
2016-06-07T15:02:03.000Z
uno/form.py
elbow-jason/Uno
4ad07d7b84e5b6e3e2b2c89db69448906f24b4e4
[ "MIT" ]
null
null
null
from base import UnoBaseForm class UnoForm(UnoBaseForm): def __init__(self, *args, **kwargs): super(UnoForm, self).__init__(*args, **kwargs)
12.384615
54
0.664596
18
161
5.5
0.666667
0.20202
0
0
0
0
0
0
0
0
0
0
0.204969
161
12
55
13.416667
0.773438
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
87d7bb7b90dee2cb217e4e3bc3369bd9c1df6aff
1,123
py
Python
plugins/flytekit-kf-tensorflow/flytekitplugins/kftensorflow/models.py
ggydush-fn/flytekit
6530601c2538a5d804127a97f63291730b1ba1d8
[ "Apache-2.0" ]
1
2021-11-11T10:10:10.000Z
2021-11-11T10:10:10.000Z
plugins/flytekit-kf-tensorflow/flytekitplugins/kftensorflow/models.py
ggydush-fn/flytekit
6530601c2538a5d804127a97f63291730b1ba1d8
[ "Apache-2.0" ]
null
null
null
plugins/flytekit-kf-tensorflow/flytekitplugins/kftensorflow/models.py
ggydush-fn/flytekit
6530601c2538a5d804127a97f63291730b1ba1d8
[ "Apache-2.0" ]
null
null
null
from flyteidl.plugins import tensorflow_pb2 as _tensorflow_task from flytekit.models import common as _common class TensorFlowJob(_common.FlyteIdlEntity): def __init__(self, workers_count, ps_replicas_count, chief_replicas_count): self._workers_count = workers_count self._ps_replicas_count = ps_replicas_count self._chief_replicas_count = chief_replicas_count @property def workers_count(self): return self._workers_count @property def ps_replicas_count(self): return self._ps_replicas_count @property def chief_replicas_count(self): return self._chief_replicas_count def to_flyte_idl(self): return _tensorflow_task.DistributedTensorflowTrainingTask( workers=self.workers_count, ps_replicas=self.ps_replicas_count, chief_replicas=self.chief_replicas_count ) @classmethod def from_flyte_idl(cls, pb2_object): return cls( workers_count=pb2_object.workers, ps_replicas_count=pb2_object.ps_replicas, chief_replicas_count=pb2_object.chief_replicas, )
31.194444
116
0.732858
135
1,123
5.622222
0.22963
0.239789
0.13834
0.102767
0.247694
0
0
0
0
0
0
0.005643
0.211042
1,123
35
117
32.085714
0.851016
0
0
0.111111
0
0
0
0
0
0
0
0
0
1
0.222222
false
0
0.074074
0.185185
0.518519
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
e2106c0d2cbcb318a290c8c53a41a564246a1ce1
134
py
Python
cloud/sensing_bus/publisher/apps.py
pedrocruz/sensing_bus
f56087f548be284b27bef6ba6de2e1bbf38d9f06
[ "Apache-2.0" ]
5
2017-01-04T01:47:52.000Z
2018-11-03T15:33:19.000Z
cloud/sensing_bus/publisher/apps.py
pedrocruz/sensing_bus
f56087f548be284b27bef6ba6de2e1bbf38d9f06
[ "Apache-2.0" ]
null
null
null
cloud/sensing_bus/publisher/apps.py
pedrocruz/sensing_bus
f56087f548be284b27bef6ba6de2e1bbf38d9f06
[ "Apache-2.0" ]
4
2017-01-04T13:24:55.000Z
2018-11-22T13:50:24.000Z
from __future__ import unicode_literals from django.apps import AppConfig class PublisherConfig(AppConfig): name = 'publisher'
16.75
39
0.798507
15
134
6.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.149254
134
7
40
19.142857
0.894737
0
0
0
0
0
0.067164
0
0
0
0
0
0
1
0
false
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
355bae0d7abefbc2fc952b3177cc49644de4f4a0
305
py
Python
mantis/adapters/postgresqladapter.py
Clivern/Mantis
5f80ef49d7e25b9368513bb059c4c3e712e87346
[ "MIT" ]
2
2015-12-17T23:32:12.000Z
2016-04-16T12:49:01.000Z
mantis/adapters/postgresqladapter.py
Clivern/Mantis
5f80ef49d7e25b9368513bb059c4c3e712e87346
[ "MIT" ]
1
2017-01-17T15:58:12.000Z
2017-01-17T15:58:12.000Z
mantis/adapters/postgresqladapter.py
Clivern/Mantis
5f80ef49d7e25b9368513bb059c4c3e712e87346
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Mantis ~~~~~~ A Minimalist ORM for Python :copyright: (c) 2016 by Clivern (hello@clivern.com). :license: MIT, see LICENSE for more details. """ from __future__ import print_function class PostgreSQLAdapter(object): """PostgreSQL Adapter""" pass
17.941176
56
0.636066
35
305
5.4
0.914286
0
0
0
0
0
0
0
0
0
0
0.021097
0.222951
305
16
57
19.0625
0.776371
0.6
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
3599cde1718b8e4134ec05079ecc3ecc8e59ac8e
697
py
Python
mleus/nlp/legacy_libs_wrapper.py
AhmedHani/mleus
301eaa6419a6d25acf49b467f2e35dc3f9776d8a
[ "BSD-3-Clause" ]
null
null
null
mleus/nlp/legacy_libs_wrapper.py
AhmedHani/mleus
301eaa6419a6d25acf49b467f2e35dc3f9776d8a
[ "BSD-3-Clause" ]
null
null
null
mleus/nlp/legacy_libs_wrapper.py
AhmedHani/mleus
301eaa6419a6d25acf49b467f2e35dc3f9776d8a
[ "BSD-3-Clause" ]
null
null
null
import numpy as np from sklearn.feature_extraction.text import TfidfVectorizer class SklearnWrapper(object): def __init__(self, corpus): self.corpus = corpus self.tfidf_vectorizer = TfidfVectorizer() def train_tfidf(self, corpus): if isinstance(corpus, list): self.tfidf_vectorizer.fit(corpus) else: self.tfidf_vectorizer.fit([corpus]) def get_tfidf_vectors(self, text): if isinstance(text, list): return self.tfidf_vectorizer.transform(text).tolist() else: return self.tfidf_vectorizer.transform(text).tolist()[0] def get_important_words(self, text, n_words=20): pass
25.814815
68
0.659971
81
697
5.481481
0.444444
0.101351
0.213964
0.099099
0.324324
0.198198
0.198198
0
0
0
0
0.005714
0.246772
697
26
69
26.807692
0.84
0
0
0.111111
0
0
0
0
0
0
0
0
0
1
0.222222
false
0.055556
0.166667
0
0.555556
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
35ca854dfa2889149d5b4c0813be067f55472a72
64
py
Python
apps/user/tests.py
liyao2598330/echidna
145c1345ea8ee25cfcc5d3eff867ae06ddea39e8
[ "MIT" ]
null
null
null
apps/user/tests.py
liyao2598330/echidna
145c1345ea8ee25cfcc5d3eff867ae06ddea39e8
[ "MIT" ]
null
null
null
apps/user/tests.py
liyao2598330/echidna
145c1345ea8ee25cfcc5d3eff867ae06ddea39e8
[ "MIT" ]
1
2020-10-19T14:13:41.000Z
2020-10-19T14:13:41.000Z
from django.test import TestCase list # Create your tests here.
16
32
0.796875
10
64
5.1
1
0
0
0
0
0
0
0
0
0
0
0
0.15625
64
3
33
21.333333
0.944444
0.359375
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
ea1725f591fb75bceb6eed9b76a28bc731d0e866
117
py
Python
bark_ml/library_wrappers/lib_fqf_iqn_qrdqn/model/__init__.py
bark-simulator/rl
84f9c74b60becbc4bc758e19b201d85a21880717
[ "MIT" ]
null
null
null
bark_ml/library_wrappers/lib_fqf_iqn_qrdqn/model/__init__.py
bark-simulator/rl
84f9c74b60becbc4bc758e19b201d85a21880717
[ "MIT" ]
null
null
null
bark_ml/library_wrappers/lib_fqf_iqn_qrdqn/model/__init__.py
bark-simulator/rl
84f9c74b60becbc4bc758e19b201d85a21880717
[ "MIT" ]
null
null
null
from .fqf import FQF from .iqn import IQN from .qrdqn import QRDQN from .imitation import Imitation, PolicyImitation
23.4
49
0.811966
17
117
5.588235
0.411765
0
0
0
0
0
0
0
0
0
0
0
0.145299
117
4
50
29.25
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
ea488733b9160af7339923182e9df5568d92ab8a
176
py
Python
alttprbot/alttprgen/randomizer/ffr.py
floresmatthew/sahasrahbot
a3fcc2aba9cd204331ce612ecf269d8a48a1ebc4
[ "MIT" ]
null
null
null
alttprbot/alttprgen/randomizer/ffr.py
floresmatthew/sahasrahbot
a3fcc2aba9cd204331ce612ecf269d8a48a1ebc4
[ "MIT" ]
null
null
null
alttprbot/alttprgen/randomizer/ffr.py
floresmatthew/sahasrahbot
a3fcc2aba9cd204331ce612ecf269d8a48a1ebc4
[ "MIT" ]
null
null
null
import random def roll_ffr(flags): seed = ('%008x' % random.randrange(16**8)).upper() return seed, f"https://finalfantasyrandomizer.com/Randomize?s={seed}&f={flags}"
25.142857
83
0.681818
24
176
4.958333
0.791667
0.084034
0
0
0
0
0
0
0
0
0
0.038961
0.125
176
6
84
29.333333
0.733766
0
0
0
0
0
0.386364
0
0
0
0
0
0
1
0.25
false
0
0.25
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
ea4f8fe67d5d17f38b4be8c9cd108188215f35bb
146
py
Python
src/Product.py
mamorukudo0927/CompePython
7e069a7eb7c0a3cca70cabe5a88687a406425377
[ "MIT" ]
null
null
null
src/Product.py
mamorukudo0927/CompePython
7e069a7eb7c0a3cca70cabe5a88687a406425377
[ "MIT" ]
null
null
null
src/Product.py
mamorukudo0927/CompePython
7e069a7eb7c0a3cca70cabe5a88687a406425377
[ "MIT" ]
null
null
null
# 1行に複数の文字なので、split()関数でList化→map()関数で数値に変換。 a,b = map(int, input().split()) # 条件演算子でそのまま判定結果を返して出力。 print('Even' if (a * b) % 2 == 0 else 'Odd')
36.5
45
0.650685
22
146
4.363636
0.818182
0.041667
0
0
0
0
0
0
0
0
0
0.023622
0.130137
146
4
45
36.5
0.724409
0.438356
0
0
0
0
0.0875
0
0
0
0
0
0
1
0
true
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
57863d718e773ba12d5779ee483edbeacefb4bcc
196
py
Python
musicTheory/run.py
dgole/audioReactiveFadeCandy
aa54325052efb6b1a09741ea4d26b824a1183c5e
[ "MIT" ]
1
2019-06-24T21:57:27.000Z
2019-06-24T21:57:27.000Z
musicTheory/run.py
dgole/audioReactiveFadeCandy
aa54325052efb6b1a09741ea4d26b824a1183c5e
[ "MIT" ]
1
2019-06-25T01:12:11.000Z
2019-06-25T01:12:11.000Z
musicTheory/run.py
dgole/audioReactiveFadeCandy
aa54325052efb6b1a09741ea4d26b824a1183c5e
[ "MIT" ]
null
null
null
import numpy as np import sys sys.path.append("../ar/") import fastopc, time import functionLib as lib import micStream import patterns as patterns #import Tkinter as tk patterns.beatDetection()
17.818182
27
0.790816
29
196
5.344828
0.586207
0
0
0
0
0
0
0
0
0
0
0
0.132653
196
10
28
19.6
0.911765
0.102041
0
0
0
0
0.034286
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
57b5379ab759ee27326e9c7e736fcd0126ab8ccc
3,725
py
Python
python/maya/site-packages/pymel-1.0.5/extras/completion/py/maya/test/brief/api/voxelizeMesh.py
CountZer0/PipelineConstructionSet
0aa73a8a63c72989b2d1c677efd78dad4388d335
[ "BSD-3-Clause" ]
21
2015-04-27T05:01:36.000Z
2021-11-22T13:45:14.000Z
python/maya/site-packages/pymel-1.0.5/extras/completion/py/maya/test/brief/api/voxelizeMesh.py
0xb1dd1e/PipelineConstructionSet
621349da1b6d1437e95d0c9e48ee9f36d59f19fd
[ "BSD-3-Clause" ]
null
null
null
python/maya/site-packages/pymel-1.0.5/extras/completion/py/maya/test/brief/api/voxelizeMesh.py
0xb1dd1e/PipelineConstructionSet
621349da1b6d1437e95d0c9e48ee9f36d59f19fd
[ "BSD-3-Clause" ]
7
2015-04-11T11:37:19.000Z
2020-05-22T09:49:04.000Z
""" start: voxelize meshes v.2 this script turns the selected meshes into cubes over the current timeline range. From Maya-Python mailing list http://zoomy.net/2010/02/25/voxelize-meshes-script-v2 http://groups.google.com/group/python_inside_maya/browse_thread/thread/3f973f5bddf35e15# TODO: Implement Dean's critique of this module the overall algorithm being used to voxelize the mesh is flawed. It shoots rays along the *edges* of the voxels then places the *centers* of the cubes it creates at the intersection points, That guarantees a halo of half-empty voxels around the mesh. Also, the shooting of rays is pretty hit-or-miss. There could be lots of geometry within a given voxel but none of it happens to lie along the ray, leading to that voxel being incorrectly discarded. A better algorithm would be to iterate over the mesh's triangles and clip them against the voxel boundaries, which is trivial given that the voxels are axis-aligned. """ import pymel.core.context as context import pymel.core.rendering as rendering import maya.cmds as cmds import pymel.util as util import pymel.core.runtime as runtime import pymel.api as api import pymel.core.system as system import pymel.core.uitypes as ui import pymel.core.uitypes as uitypes import pymel.core.nodetypes as nodetypes import pymel.core.nodetypes as nt import pymel.core.animation as animation import pymel.core.datatypes as dt import pymel.core.language as language import maya.OpenMaya as om import pymel.core.windows as windows import pymel.core.modeling as modeling import pymel.core.effects as effects from pymel.core.general import * from pymel.core.windows import * from pymel.core.system import * from pymel.core.animation import * from pymel.core.context import * from pymel.core.modeling import * from pymel.core.other import * from pymel.core.rendering import * from pymel.core.effects import * from pymel.core.language import Env from pymel.core.language import callbacks from pymel.core.language import MelConversionError from pymel.core.language import MelError from pymel.core.language import Mel from pymel.core.language import evalNoSelectNotify from pymel.core.language import Catch from pymel.core.language import getProcArguments from pymel.core.language import pythonToMel from pymel.core.language import stackTrace from pymel.core.language import resourceManager from pymel.core.language import isValidMelType from pymel.core.language import getMelType from pymel.core.language import conditionExists from pymel.core.language import OptionVarList from pymel.core.language import MelUnknownProcedureError from pymel.core.language import getLastError from pymel.core.language import MelArgumentError from pymel.core.language import evalEcho from pymel.core.language import getMelGlobal from pymel.core.language import OptionVarDict from pymel.core.language import MelGlobals from pymel.core.language import scriptJob from pymel.core.language import python from pymel.core.language import pythonToMelCmd from pymel.core.language import MelSyntaxError from pymel.core.language import setMelGlobal from pymel.core.language import waitCursor def voxelize(cubeSize=None): """ voxelize the currently selected mesh \param cubeSize float size of voxels """ pass def promptNumber(): pass def roundToFraction(input, fraction): """ # round to nearest fraction in decimal form: 1, .5, .25 """ pass def rayIntersect(fnMesh, point, direction=(0.0, 0.0, -1.0)): """ # shoot a ray from point in direction and return all hits within the mesh """ pass def makeProgBar(length): pass MELTYPES = [] optionVar = {} catch = None env = None _MeshIsectAccelParams = None
29.8
88
0.797315
552
3,725
5.373188
0.347826
0.154754
0.162171
0.198247
0.288604
0
0
0
0
0
0
0.009063
0.14094
3,725
124
89
30.040323
0.917813
0.309799
0
0.071429
0
0
0
0
0
0
0
0.008065
0
1
0.071429
false
0.071429
0.785714
0
0.857143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
57ca2551cc241f8ec8869ed20724d96a535db85e
47
py
Python
config.py
tencia/experiments
2be1cfa68c507cc31bb50d28500a0b2350457208
[ "MIT" ]
3
2016-01-14T23:15:06.000Z
2016-11-21T23:21:28.000Z
config.py
tencia/experiments
2be1cfa68c507cc31bb50d28500a0b2350457208
[ "MIT" ]
null
null
null
config.py
tencia/experiments
2be1cfa68c507cc31bb50d28500a0b2350457208
[ "MIT" ]
null
null
null
external_data = '/home/tencia/Documents/data/'
23.5
46
0.765957
6
47
5.833333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.06383
47
1
47
47
0.795455
0
0
0
0
0
0.595745
0.595745
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
57dd2d374d9be05a4cfd528c8e54c2ed47d770f8
184
py
Python
nrk/modules/moderation/_load.py
Slimmerd/DiscordBotPY
08c177cea8a1f3482f632288a971bded03b349b3
[ "Apache-2.0" ]
null
null
null
nrk/modules/moderation/_load.py
Slimmerd/DiscordBotPY
08c177cea8a1f3482f632288a971bded03b349b3
[ "Apache-2.0" ]
null
null
null
nrk/modules/moderation/_load.py
Slimmerd/DiscordBotPY
08c177cea8a1f3482f632288a971bded03b349b3
[ "Apache-2.0" ]
null
null
null
import hikari import lightbulb moderation_plugin = lightbulb.Plugin('Moderation') moderation_plugin.add_checks( lightbulb.has_guild_permissions(hikari.Permissions.MANAGE_GUILD) )
23
68
0.842391
21
184
7.095238
0.52381
0.214765
0
0
0
0
0
0
0
0
0
0
0.081522
184
7
69
26.285714
0.881657
0
0
0
0
0
0.054348
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
17d39d6d8170fea23db6939834c98d22236a2bd4
102
py
Python
io2.py
pschlump/haskell_3015_nov_21
067ea7d6b5cbac5f4853a0b42aa053d82ea6aa36
[ "MIT" ]
1
2017-11-21T22:16:42.000Z
2017-11-21T22:16:42.000Z
io2.py
pschlump/haskell_3015_nov_21
067ea7d6b5cbac5f4853a0b42aa053d82ea6aa36
[ "MIT" ]
null
null
null
io2.py
pschlump/haskell_3015_nov_21
067ea7d6b5cbac5f4853a0b42aa053d82ea6aa36
[ "MIT" ]
null
null
null
import sys for i in range ( 1, len(sys.argv) ): print ( sys.argv[i] + " ", end='' ) print ( "" )
17
39
0.509804
16
102
3.25
0.6875
0.269231
0
0
0
0
0
0
0
0
0
0.013333
0.264706
102
5
40
20.4
0.68
0
0
0
0
0
0.009804
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.5
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
17f7a71ceac91484d717ffe1b39d00b0e9298ea6
296
py
Python
TP3/1. Battleship/Jugador.py
FdelMazo/TDA1
a61dc6abe0a30b129c3b0f8b2df6f74331e08dc7
[ "MIT" ]
2
2020-10-10T17:34:16.000Z
2021-11-20T18:40:16.000Z
TP3/1. Battleship/Jugador.py
FdelMazo/7529rw-TDA
a61dc6abe0a30b129c3b0f8b2df6f74331e08dc7
[ "MIT" ]
null
null
null
TP3/1. Battleship/Jugador.py
FdelMazo/7529rw-TDA
a61dc6abe0a30b129c3b0f8b2df6f74331e08dc7
[ "MIT" ]
null
null
null
class Jugador(): def __init__(self, nombre): self.nombre = nombre def __str__(self): return self.nombre def elegirTargetsDeLaPartida(self, partida): """Recibe el estado del juego, NO LO MODIFICA (dummy/copy/simulacion) Devuelve todos los turnos a jugar""" raise NotImplementedError
26.909091
71
0.75
38
296
5.631579
0.763158
0.140187
0
0
0
0
0
0
0
0
0
0
0.155405
296
11
72
26.909091
0.856
0.337838
0
0
0
0
0
0
0
0
0
0.090909
0
1
0.428571
false
0
0
0.142857
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
1
1
0
0
4
aa211bdf21acbd49886ecf96c3ac0a3f1c66e001
914
py
Python
node/mirmap/__init__.py
RNAEDITINGPLUS/main
f608578defb122a1782cff39c5a9a60be0a900df
[ "Apache-2.0" ]
4
2018-01-17T17:03:42.000Z
2020-06-11T05:20:59.000Z
node/mirmap/__init__.py
RNAEDITINGPLUS/main
f608578defb122a1782cff39c5a9a60be0a900df
[ "Apache-2.0" ]
null
null
null
node/mirmap/__init__.py
RNAEDITINGPLUS/main
f608578defb122a1782cff39c5a9a60be0a900df
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright (C) 2011-2012 Charles E. Vejnar # # This is free software, licensed under the GNU General Public License v3. # See /LICENSE for more information. # """:class:`mm` and :class:`mmPP` base classes of :mod:`miRmap` that inherit their methods from all the modules. Each module define the methods for one category.""" from . import model from . import prob_binomial from . import report from . import targetscan from . import evolution from . import prob_exact from . import thermo class mm(evolution.mmEvolution, model.mmModel, prob_binomial.mmProbBinomial, prob_exact.mmProbExact, report.mmReport, thermo.mmThermo, targetscan.mmTargetScan): """miRNA and mRNA containing class.""" pass class mmPP(model.mmModel, prob_binomial.mmProbBinomial, report.mmReport, targetscan.mmTargetScan): """miRNA and mRNA containing class with pure Python methods only.""" pass
32.642857
163
0.751641
122
914
5.590164
0.581967
0.102639
0.041056
0.070381
0.255132
0.143695
0.143695
0
0
0
0
0.01287
0.149891
914
27
164
33.851852
0.864865
0.466083
0
0.181818
0
0
0
0
0
0
0
0
0
1
0
true
0.181818
0.636364
0
0.818182
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
aa336dcd5c6efc8749be84f7219a52bed708569f
104
py
Python
ipyselect2/_version.py
opentradesolutions/ipyselect2
68accd9523b8f85aaeb9f7e11bf0a12daa422f58
[ "MIT" ]
3
2019-12-25T17:55:45.000Z
2020-06-08T15:25:41.000Z
ipyselect2/_version.py
opentradesolutions/ipyselect2
68accd9523b8f85aaeb9f7e11bf0a12daa422f58
[ "MIT" ]
9
2019-10-07T12:09:35.000Z
2022-01-22T09:24:25.000Z
ipyselect2/_version.py
opentradesolutions/ipyselect2
68accd9523b8f85aaeb9f7e11bf0a12daa422f58
[ "MIT" ]
3
2019-11-04T03:17:04.000Z
2020-03-20T02:33:14.000Z
version_info = (0, 2, 1) __version__ = '%s.%s.%s' % (version_info[0], version_info[1], version_info[2])
34.666667
78
0.653846
18
104
3.333333
0.333333
0.733333
0.4
0
0
0
0
0
0
0
0
0.065217
0.115385
104
2
79
52
0.586957
0
0
0
0
0
0.076923
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
aa461ad127e45be20178fd70ea20f1a09701795f
5,744
py
Python
build_automation/content_management/migrations/0012_all_kinds_of_tags.py
mattjurenka/DLMS
0a69796b1b9940b37ee4ea7bc375a41dd63ec817
[ "MIT" ]
2
2018-08-02T23:38:32.000Z
2019-12-20T10:54:37.000Z
build_automation/content_management/migrations/0012_all_kinds_of_tags.py
mattjurenka/DLMS
0a69796b1b9940b37ee4ea7bc375a41dd63ec817
[ "MIT" ]
28
2018-02-23T21:20:31.000Z
2018-05-02T22:38:31.000Z
build_automation/content_management/migrations/0012_all_kinds_of_tags.py
mattjurenka/DLMS
0a69796b1b9940b37ee4ea7bc375a41dd63ec817
[ "MIT" ]
3
2019-11-16T03:54:48.000Z
2021-09-10T18:53:20.000Z
# Generated by Django 2.0.2 on 2018-04-06 23:45 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('content_management', '0011_directory_individual_files'), ] operations = [ migrations.CreateModel( name='Cataloger', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Coverage', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Creator', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Keyword', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Language', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Subject', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Workarea', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50, unique=True)), ('description', models.CharField(max_length=200, null=True)), ], options={ 'ordering': ['name'], }, ), migrations.RemoveField( model_name='filtercriteria', name='directory', ), migrations.RemoveField( model_name='filtercriteria', name='left_criteria', ), migrations.RemoveField( model_name='filtercriteria', name='parent', ), migrations.RemoveField( model_name='filtercriteria', name='right_criteria', ), migrations.RemoveField( model_name='filtercriteria', name='tag', ), migrations.RemoveField( model_name='tag', name='parent', ), migrations.RemoveField( model_name='content', name='tag', ), migrations.DeleteModel( name='FilterCriteria', ), migrations.DeleteModel( name='Tag', ), migrations.AddField( model_name='content', name='cataloger', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='content_management.Cataloger'), ), migrations.AddField( model_name='content', name='coverage', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='content_management.Coverage'), ), migrations.AddField( model_name='content', name='creators', field=models.ManyToManyField(to='content_management.Creator'), ), migrations.AddField( model_name='content', name='keywords', field=models.ManyToManyField(to='content_management.Keyword'), ), migrations.AddField( model_name='content', name='language', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='content_management.Language'), ), migrations.AddField( model_name='content', name='subjects', field=models.ManyToManyField(to='content_management.Subject'), ), migrations.AddField( model_name='content', name='workareas', field=models.ManyToManyField(to='content_management.Workarea'), ), ]
35.677019
128
0.527681
488
5,744
6.071721
0.157787
0.070874
0.085049
0.113399
0.823827
0.823827
0.612555
0.574755
0.574755
0.574755
0
0.014237
0.339659
5,744
160
129
35.9
0.766939
0.007834
0
0.727273
1
0
0.134281
0.038266
0
0
0
0
0
1
0
false
0
0.012987
0
0.032468
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
35113fe1c9cfb11fdefea6ca4abb5c31e2f10ae5
289
py
Python
tests/test_dispatcher.py
openstack/deb-python-crank
dc2d95a6179e6029427342e9f56ed39a8e23634e
[ "MIT" ]
10
2016-09-14T21:58:44.000Z
2019-01-28T21:56:27.000Z
tests/test_dispatcher.py
openstack/deb-python-crank
dc2d95a6179e6029427342e9f56ed39a8e23634e
[ "MIT" ]
null
null
null
tests/test_dispatcher.py
openstack/deb-python-crank
dc2d95a6179e6029427342e9f56ed39a8e23634e
[ "MIT" ]
null
null
null
from nose.tools import raises from crank.dispatcher import * class TestDispatcher: def setup(self): self.dispatcher = Dispatcher() def test_create(self): pass @raises(NotImplementedError) def test_dispatch(self): self.dispatcher._dispatch(1,2)
18.0625
38
0.681661
33
289
5.878788
0.575758
0.082474
0.185567
0
0
0
0
0
0
0
0
0.009009
0.231834
289
15
39
19.266667
0.864865
0
0
0
0
0
0
0
0
0
0
0
0
1
0.3
false
0.1
0.2
0
0.6
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
35219710bed6bbab2c21262a14bd2f22f47875b6
226
py
Python
lyricpsych/extractors/__init__.py
mmc-tudelft/lyricpsych
29f5c1a64c914b09d1400df1732164d10c013312
[ "Apache-2.0" ]
null
null
null
lyricpsych/extractors/__init__.py
mmc-tudelft/lyricpsych
29f5c1a64c914b09d1400df1732164d10c013312
[ "Apache-2.0" ]
null
null
null
lyricpsych/extractors/__init__.py
mmc-tudelft/lyricpsych
29f5c1a64c914b09d1400df1732164d10c013312
[ "Apache-2.0" ]
null
null
null
from .inventory_score import InventoryScore from .linguistic_features import LinguisticFeature from .liwc import LIWC from .topic_model import TopicModel __all__ = ['InventoryScore', 'LinguisticFeature', 'LIWC', 'TopicModel']
37.666667
71
0.823009
24
226
7.458333
0.541667
0
0
0
0
0
0
0
0
0
0
0
0.097345
226
6
71
37.666667
0.877451
0
0
0
0
0
0.198238
0
0
0
0
0
0
1
0
false
0
0.8
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
103725de8076b49061ab5a579c2da43a5e4dcf2c
85
py
Python
src/webex-teams/list-rooms.py
fernando28024/git-clone-https-github.com-CiscoDevNet-devasc-code-examples
589dbd5d34f67f9b823159b731844432977b6490
[ "BSD-3-Clause" ]
43
2020-08-01T03:01:53.000Z
2022-02-17T12:43:27.000Z
webex-teams/list-rooms.py
abredes-devasc/devasc-labs
7c0e449151af3369d2ce91282e0bdfd6986282ca
[ "CC0-1.0" ]
2
2021-04-20T17:13:39.000Z
2021-09-23T23:35:12.000Z
webex-teams/list-rooms.py
abredes-devasc/devasc-labs
7c0e449151af3369d2ce91282e0bdfd6986282ca
[ "CC0-1.0" ]
14
2020-08-02T00:07:43.000Z
2022-03-15T22:25:39.000Z
# Fill in this file with the rooms/spaces listing code from the Webex Teams exercise
42.5
84
0.8
15
85
4.533333
0.933333
0
0
0
0
0
0
0
0
0
0
0
0.176471
85
1
85
85
0.971429
0.964706
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
10586bae00c7e1f623146d7d2b75bea601af0a33
140
py
Python
qal/common/tests/__init__.py
OptimalBPM/qal
4d7a31c0d68042b4110e1fa3e733711e0fdd473e
[ "Unlicense" ]
3
2016-05-02T14:35:55.000Z
2021-08-31T14:19:15.000Z
qal/common/tests/__init__.py
OptimalBPM/qal
4d7a31c0d68042b4110e1fa3e733711e0fdd473e
[ "Unlicense" ]
null
null
null
qal/common/tests/__init__.py
OptimalBPM/qal
4d7a31c0d68042b4110e1fa3e733711e0fdd473e
[ "Unlicense" ]
1
2018-03-18T13:19:52.000Z
2018-03-18T13:19:52.000Z
""" Tests for qal.common :copyright: Copyright 2010-2014 by Nicklas Boerjesson :license: BSD, see LICENSE for details. """
20
57
0.657143
17
140
5.411765
0.823529
0
0
0
0
0
0
0
0
0
0
0.075472
0.242857
140
6
58
23.333333
0.792453
0.821429
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
107a94f4c1967c4ad0b6bd16ec1a33dcf3940a8b
237
py
Python
django_backend/backend/base/views/__init__.py
holg/django_backend
6cef76a378664e6621619862e6db476788a58992
[ "BSD-3-Clause" ]
3
2015-09-10T07:10:49.000Z
2021-03-16T07:17:58.000Z
django_backend/backend/base/views/__init__.py
holg/django_backend
6cef76a378664e6621619862e6db476788a58992
[ "BSD-3-Clause" ]
10
2015-09-09T13:40:24.000Z
2021-02-27T09:12:23.000Z
django_backend/backend/base/views/__init__.py
holg/django_backend
6cef76a378664e6621619862e6db476788a58992
[ "BSD-3-Clause" ]
5
2016-06-12T08:20:38.000Z
2021-02-27T09:02:30.000Z
from .create import BackendCreateView from .list import BackendListView from .read import BackendReadView from .update import BackendUpdateView from .delete import BackendDeleteView from .select import BackendSelectView, SelectViewMixin
33.857143
54
0.864979
25
237
8.2
0.6
0
0
0
0
0
0
0
0
0
0
0
0.105485
237
6
55
39.5
0.966981
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
10a578b4128791a42d5969b5d0a99f737dd25ed6
112,856
py
Python
sdk/python/pulumi_google_native/appengine/v1beta/_inputs.py
AaronFriel/pulumi-google-native
75d1cda425e33d4610348972cd70bddf35f1770d
[ "Apache-2.0" ]
44
2021-04-18T23:00:48.000Z
2022-02-14T17:43:15.000Z
sdk/python/pulumi_google_native/appengine/v1beta/_inputs.py
AaronFriel/pulumi-google-native
75d1cda425e33d4610348972cd70bddf35f1770d
[ "Apache-2.0" ]
354
2021-04-16T16:48:39.000Z
2022-03-31T17:16:39.000Z
sdk/python/pulumi_google_native/appengine/v1beta/_inputs.py
AaronFriel/pulumi-google-native
75d1cda425e33d4610348972cd70bddf35f1770d
[ "Apache-2.0" ]
8
2021-04-24T17:46:51.000Z
2022-01-05T10:40:21.000Z
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from ._enums import * __all__ = [ 'ApiConfigHandlerArgs', 'ApiEndpointHandlerArgs', 'AutomaticScalingArgs', 'BasicScalingArgs', 'BuildInfoArgs', 'CertificateRawDataArgs', 'CloudBuildOptionsArgs', 'ContainerInfoArgs', 'CpuUtilizationArgs', 'CustomMetricArgs', 'DeploymentArgs', 'DiskUtilizationArgs', 'EndpointsApiServiceArgs', 'EntrypointArgs', 'ErrorHandlerArgs', 'FeatureSettingsArgs', 'HealthCheckArgs', 'IdentityAwareProxyArgs', 'LibraryArgs', 'LivenessCheckArgs', 'ManualScalingArgs', 'NetworkUtilizationArgs', 'NetworkArgs', 'ReadinessCheckArgs', 'RequestUtilizationArgs', 'ResourcesArgs', 'ScriptHandlerArgs', 'SslSettingsArgs', 'StandardSchedulerSettingsArgs', 'StaticFilesHandlerArgs', 'UrlDispatchRuleArgs', 'UrlMapArgs', 'VolumeArgs', 'VpcAccessConnectorArgs', 'ZipInfoArgs', ] @pulumi.input_type class ApiConfigHandlerArgs: def __init__(__self__, *, auth_fail_action: Optional[pulumi.Input['ApiConfigHandlerAuthFailAction']] = None, login: Optional[pulumi.Input['ApiConfigHandlerLogin']] = None, script: Optional[pulumi.Input[str]] = None, security_level: Optional[pulumi.Input['ApiConfigHandlerSecurityLevel']] = None, url: Optional[pulumi.Input[str]] = None): """ Google Cloud Endpoints (https://cloud.google.com/appengine/docs/python/endpoints/) configuration for API handlers. :param pulumi.Input['ApiConfigHandlerAuthFailAction'] auth_fail_action: Action to take when users access resources that require authentication. Defaults to redirect. :param pulumi.Input['ApiConfigHandlerLogin'] login: Level of login required to access this resource. Defaults to optional. :param pulumi.Input[str] script: Path to the script from the application root directory. :param pulumi.Input['ApiConfigHandlerSecurityLevel'] security_level: Security (HTTPS) enforcement for this URL. :param pulumi.Input[str] url: URL to serve the endpoint at. """ if auth_fail_action is not None: pulumi.set(__self__, "auth_fail_action", auth_fail_action) if login is not None: pulumi.set(__self__, "login", login) if script is not None: pulumi.set(__self__, "script", script) if security_level is not None: pulumi.set(__self__, "security_level", security_level) if url is not None: pulumi.set(__self__, "url", url) @property @pulumi.getter(name="authFailAction") def auth_fail_action(self) -> Optional[pulumi.Input['ApiConfigHandlerAuthFailAction']]: """ Action to take when users access resources that require authentication. Defaults to redirect. """ return pulumi.get(self, "auth_fail_action") @auth_fail_action.setter def auth_fail_action(self, value: Optional[pulumi.Input['ApiConfigHandlerAuthFailAction']]): pulumi.set(self, "auth_fail_action", value) @property @pulumi.getter def login(self) -> Optional[pulumi.Input['ApiConfigHandlerLogin']]: """ Level of login required to access this resource. Defaults to optional. """ return pulumi.get(self, "login") @login.setter def login(self, value: Optional[pulumi.Input['ApiConfigHandlerLogin']]): pulumi.set(self, "login", value) @property @pulumi.getter def script(self) -> Optional[pulumi.Input[str]]: """ Path to the script from the application root directory. """ return pulumi.get(self, "script") @script.setter def script(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "script", value) @property @pulumi.getter(name="securityLevel") def security_level(self) -> Optional[pulumi.Input['ApiConfigHandlerSecurityLevel']]: """ Security (HTTPS) enforcement for this URL. """ return pulumi.get(self, "security_level") @security_level.setter def security_level(self, value: Optional[pulumi.Input['ApiConfigHandlerSecurityLevel']]): pulumi.set(self, "security_level", value) @property @pulumi.getter def url(self) -> Optional[pulumi.Input[str]]: """ URL to serve the endpoint at. """ return pulumi.get(self, "url") @url.setter def url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "url", value) @pulumi.input_type class ApiEndpointHandlerArgs: def __init__(__self__, *, script_path: Optional[pulumi.Input[str]] = None): """ Uses Google Cloud Endpoints to handle requests. :param pulumi.Input[str] script_path: Path to the script from the application root directory. """ if script_path is not None: pulumi.set(__self__, "script_path", script_path) @property @pulumi.getter(name="scriptPath") def script_path(self) -> Optional[pulumi.Input[str]]: """ Path to the script from the application root directory. """ return pulumi.get(self, "script_path") @script_path.setter def script_path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "script_path", value) @pulumi.input_type class AutomaticScalingArgs: def __init__(__self__, *, cool_down_period: Optional[pulumi.Input[str]] = None, cpu_utilization: Optional[pulumi.Input['CpuUtilizationArgs']] = None, custom_metrics: Optional[pulumi.Input[Sequence[pulumi.Input['CustomMetricArgs']]]] = None, disk_utilization: Optional[pulumi.Input['DiskUtilizationArgs']] = None, max_concurrent_requests: Optional[pulumi.Input[int]] = None, max_idle_instances: Optional[pulumi.Input[int]] = None, max_pending_latency: Optional[pulumi.Input[str]] = None, max_total_instances: Optional[pulumi.Input[int]] = None, min_idle_instances: Optional[pulumi.Input[int]] = None, min_pending_latency: Optional[pulumi.Input[str]] = None, min_total_instances: Optional[pulumi.Input[int]] = None, network_utilization: Optional[pulumi.Input['NetworkUtilizationArgs']] = None, request_utilization: Optional[pulumi.Input['RequestUtilizationArgs']] = None, standard_scheduler_settings: Optional[pulumi.Input['StandardSchedulerSettingsArgs']] = None): """ Automatic scaling is based on request rate, response latencies, and other application metrics. :param pulumi.Input[str] cool_down_period: The time period that the Autoscaler (https://cloud.google.com/compute/docs/autoscaler/) should wait before it starts collecting information from a new instance. This prevents the autoscaler from collecting information when the instance is initializing, during which the collected usage would not be reliable. Only applicable in the App Engine flexible environment. :param pulumi.Input['CpuUtilizationArgs'] cpu_utilization: Target scaling by CPU usage. :param pulumi.Input[Sequence[pulumi.Input['CustomMetricArgs']]] custom_metrics: Target scaling by user-provided metrics. Only applicable in the App Engine flexible environment. :param pulumi.Input['DiskUtilizationArgs'] disk_utilization: Target scaling by disk usage. :param pulumi.Input[int] max_concurrent_requests: Number of concurrent requests an automatic scaling instance can accept before the scheduler spawns a new instance.Defaults to a runtime-specific value. :param pulumi.Input[int] max_idle_instances: Maximum number of idle instances that should be maintained for this version. :param pulumi.Input[str] max_pending_latency: Maximum amount of time that a request should wait in the pending queue before starting a new instance to handle it. :param pulumi.Input[int] max_total_instances: Maximum number of instances that should be started to handle requests for this version. :param pulumi.Input[int] min_idle_instances: Minimum number of idle instances that should be maintained for this version. Only applicable for the default version of a service. :param pulumi.Input[str] min_pending_latency: Minimum amount of time a request should wait in the pending queue before starting a new instance to handle it. :param pulumi.Input[int] min_total_instances: Minimum number of running instances that should be maintained for this version. :param pulumi.Input['NetworkUtilizationArgs'] network_utilization: Target scaling by network usage. :param pulumi.Input['RequestUtilizationArgs'] request_utilization: Target scaling by request utilization. :param pulumi.Input['StandardSchedulerSettingsArgs'] standard_scheduler_settings: Scheduler settings for standard environment. """ if cool_down_period is not None: pulumi.set(__self__, "cool_down_period", cool_down_period) if cpu_utilization is not None: pulumi.set(__self__, "cpu_utilization", cpu_utilization) if custom_metrics is not None: pulumi.set(__self__, "custom_metrics", custom_metrics) if disk_utilization is not None: pulumi.set(__self__, "disk_utilization", disk_utilization) if max_concurrent_requests is not None: pulumi.set(__self__, "max_concurrent_requests", max_concurrent_requests) if max_idle_instances is not None: pulumi.set(__self__, "max_idle_instances", max_idle_instances) if max_pending_latency is not None: pulumi.set(__self__, "max_pending_latency", max_pending_latency) if max_total_instances is not None: pulumi.set(__self__, "max_total_instances", max_total_instances) if min_idle_instances is not None: pulumi.set(__self__, "min_idle_instances", min_idle_instances) if min_pending_latency is not None: pulumi.set(__self__, "min_pending_latency", min_pending_latency) if min_total_instances is not None: pulumi.set(__self__, "min_total_instances", min_total_instances) if network_utilization is not None: pulumi.set(__self__, "network_utilization", network_utilization) if request_utilization is not None: pulumi.set(__self__, "request_utilization", request_utilization) if standard_scheduler_settings is not None: pulumi.set(__self__, "standard_scheduler_settings", standard_scheduler_settings) @property @pulumi.getter(name="coolDownPeriod") def cool_down_period(self) -> Optional[pulumi.Input[str]]: """ The time period that the Autoscaler (https://cloud.google.com/compute/docs/autoscaler/) should wait before it starts collecting information from a new instance. This prevents the autoscaler from collecting information when the instance is initializing, during which the collected usage would not be reliable. Only applicable in the App Engine flexible environment. """ return pulumi.get(self, "cool_down_period") @cool_down_period.setter def cool_down_period(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cool_down_period", value) @property @pulumi.getter(name="cpuUtilization") def cpu_utilization(self) -> Optional[pulumi.Input['CpuUtilizationArgs']]: """ Target scaling by CPU usage. """ return pulumi.get(self, "cpu_utilization") @cpu_utilization.setter def cpu_utilization(self, value: Optional[pulumi.Input['CpuUtilizationArgs']]): pulumi.set(self, "cpu_utilization", value) @property @pulumi.getter(name="customMetrics") def custom_metrics(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CustomMetricArgs']]]]: """ Target scaling by user-provided metrics. Only applicable in the App Engine flexible environment. """ return pulumi.get(self, "custom_metrics") @custom_metrics.setter def custom_metrics(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CustomMetricArgs']]]]): pulumi.set(self, "custom_metrics", value) @property @pulumi.getter(name="diskUtilization") def disk_utilization(self) -> Optional[pulumi.Input['DiskUtilizationArgs']]: """ Target scaling by disk usage. """ return pulumi.get(self, "disk_utilization") @disk_utilization.setter def disk_utilization(self, value: Optional[pulumi.Input['DiskUtilizationArgs']]): pulumi.set(self, "disk_utilization", value) @property @pulumi.getter(name="maxConcurrentRequests") def max_concurrent_requests(self) -> Optional[pulumi.Input[int]]: """ Number of concurrent requests an automatic scaling instance can accept before the scheduler spawns a new instance.Defaults to a runtime-specific value. """ return pulumi.get(self, "max_concurrent_requests") @max_concurrent_requests.setter def max_concurrent_requests(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "max_concurrent_requests", value) @property @pulumi.getter(name="maxIdleInstances") def max_idle_instances(self) -> Optional[pulumi.Input[int]]: """ Maximum number of idle instances that should be maintained for this version. """ return pulumi.get(self, "max_idle_instances") @max_idle_instances.setter def max_idle_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "max_idle_instances", value) @property @pulumi.getter(name="maxPendingLatency") def max_pending_latency(self) -> Optional[pulumi.Input[str]]: """ Maximum amount of time that a request should wait in the pending queue before starting a new instance to handle it. """ return pulumi.get(self, "max_pending_latency") @max_pending_latency.setter def max_pending_latency(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "max_pending_latency", value) @property @pulumi.getter(name="maxTotalInstances") def max_total_instances(self) -> Optional[pulumi.Input[int]]: """ Maximum number of instances that should be started to handle requests for this version. """ return pulumi.get(self, "max_total_instances") @max_total_instances.setter def max_total_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "max_total_instances", value) @property @pulumi.getter(name="minIdleInstances") def min_idle_instances(self) -> Optional[pulumi.Input[int]]: """ Minimum number of idle instances that should be maintained for this version. Only applicable for the default version of a service. """ return pulumi.get(self, "min_idle_instances") @min_idle_instances.setter def min_idle_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "min_idle_instances", value) @property @pulumi.getter(name="minPendingLatency") def min_pending_latency(self) -> Optional[pulumi.Input[str]]: """ Minimum amount of time a request should wait in the pending queue before starting a new instance to handle it. """ return pulumi.get(self, "min_pending_latency") @min_pending_latency.setter def min_pending_latency(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "min_pending_latency", value) @property @pulumi.getter(name="minTotalInstances") def min_total_instances(self) -> Optional[pulumi.Input[int]]: """ Minimum number of running instances that should be maintained for this version. """ return pulumi.get(self, "min_total_instances") @min_total_instances.setter def min_total_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "min_total_instances", value) @property @pulumi.getter(name="networkUtilization") def network_utilization(self) -> Optional[pulumi.Input['NetworkUtilizationArgs']]: """ Target scaling by network usage. """ return pulumi.get(self, "network_utilization") @network_utilization.setter def network_utilization(self, value: Optional[pulumi.Input['NetworkUtilizationArgs']]): pulumi.set(self, "network_utilization", value) @property @pulumi.getter(name="requestUtilization") def request_utilization(self) -> Optional[pulumi.Input['RequestUtilizationArgs']]: """ Target scaling by request utilization. """ return pulumi.get(self, "request_utilization") @request_utilization.setter def request_utilization(self, value: Optional[pulumi.Input['RequestUtilizationArgs']]): pulumi.set(self, "request_utilization", value) @property @pulumi.getter(name="standardSchedulerSettings") def standard_scheduler_settings(self) -> Optional[pulumi.Input['StandardSchedulerSettingsArgs']]: """ Scheduler settings for standard environment. """ return pulumi.get(self, "standard_scheduler_settings") @standard_scheduler_settings.setter def standard_scheduler_settings(self, value: Optional[pulumi.Input['StandardSchedulerSettingsArgs']]): pulumi.set(self, "standard_scheduler_settings", value) @pulumi.input_type class BasicScalingArgs: def __init__(__self__, *, idle_timeout: Optional[pulumi.Input[str]] = None, max_instances: Optional[pulumi.Input[int]] = None): """ A service with basic scaling will create an instance when the application receives a request. The instance will be turned down when the app becomes idle. Basic scaling is ideal for work that is intermittent or driven by user activity. :param pulumi.Input[str] idle_timeout: Duration of time after the last request that an instance must wait before the instance is shut down. :param pulumi.Input[int] max_instances: Maximum number of instances to create for this version. """ if idle_timeout is not None: pulumi.set(__self__, "idle_timeout", idle_timeout) if max_instances is not None: pulumi.set(__self__, "max_instances", max_instances) @property @pulumi.getter(name="idleTimeout") def idle_timeout(self) -> Optional[pulumi.Input[str]]: """ Duration of time after the last request that an instance must wait before the instance is shut down. """ return pulumi.get(self, "idle_timeout") @idle_timeout.setter def idle_timeout(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "idle_timeout", value) @property @pulumi.getter(name="maxInstances") def max_instances(self) -> Optional[pulumi.Input[int]]: """ Maximum number of instances to create for this version. """ return pulumi.get(self, "max_instances") @max_instances.setter def max_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "max_instances", value) @pulumi.input_type class BuildInfoArgs: def __init__(__self__, *, cloud_build_id: Optional[pulumi.Input[str]] = None): """ Google Cloud Build information. :param pulumi.Input[str] cloud_build_id: The Google Cloud Build id. Example: "f966068f-08b2-42c8-bdfe-74137dff2bf9" """ if cloud_build_id is not None: pulumi.set(__self__, "cloud_build_id", cloud_build_id) @property @pulumi.getter(name="cloudBuildId") def cloud_build_id(self) -> Optional[pulumi.Input[str]]: """ The Google Cloud Build id. Example: "f966068f-08b2-42c8-bdfe-74137dff2bf9" """ return pulumi.get(self, "cloud_build_id") @cloud_build_id.setter def cloud_build_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cloud_build_id", value) @pulumi.input_type class CertificateRawDataArgs: def __init__(__self__, *, private_key: Optional[pulumi.Input[str]] = None, public_certificate: Optional[pulumi.Input[str]] = None): """ An SSL certificate obtained from a certificate authority. :param pulumi.Input[str] private_key: Unencrypted PEM encoded RSA private key. This field is set once on certificate creation and then encrypted. The key size must be 2048 bits or fewer. Must include the header and footer. Example: -----BEGIN RSA PRIVATE KEY----- -----END RSA PRIVATE KEY----- @InputOnly :param pulumi.Input[str] public_certificate: PEM encoded x.509 public key certificate. This field is set once on certificate creation. Must include the header and footer. Example: -----BEGIN CERTIFICATE----- -----END CERTIFICATE----- """ if private_key is not None: pulumi.set(__self__, "private_key", private_key) if public_certificate is not None: pulumi.set(__self__, "public_certificate", public_certificate) @property @pulumi.getter(name="privateKey") def private_key(self) -> Optional[pulumi.Input[str]]: """ Unencrypted PEM encoded RSA private key. This field is set once on certificate creation and then encrypted. The key size must be 2048 bits or fewer. Must include the header and footer. Example: -----BEGIN RSA PRIVATE KEY----- -----END RSA PRIVATE KEY----- @InputOnly """ return pulumi.get(self, "private_key") @private_key.setter def private_key(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "private_key", value) @property @pulumi.getter(name="publicCertificate") def public_certificate(self) -> Optional[pulumi.Input[str]]: """ PEM encoded x.509 public key certificate. This field is set once on certificate creation. Must include the header and footer. Example: -----BEGIN CERTIFICATE----- -----END CERTIFICATE----- """ return pulumi.get(self, "public_certificate") @public_certificate.setter def public_certificate(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "public_certificate", value) @pulumi.input_type class CloudBuildOptionsArgs: def __init__(__self__, *, app_yaml_path: Optional[pulumi.Input[str]] = None, cloud_build_timeout: Optional[pulumi.Input[str]] = None): """ Options for the build operations performed as a part of the version deployment. Only applicable for App Engine flexible environment when creating a version using source code directly. :param pulumi.Input[str] app_yaml_path: Path to the yaml file used in deployment, used to determine runtime configuration details.Required for flexible environment builds.See https://cloud.google.com/appengine/docs/standard/python/config/appref for more details. :param pulumi.Input[str] cloud_build_timeout: The Cloud Build timeout used as part of any dependent builds performed by version creation. Defaults to 10 minutes. """ if app_yaml_path is not None: pulumi.set(__self__, "app_yaml_path", app_yaml_path) if cloud_build_timeout is not None: pulumi.set(__self__, "cloud_build_timeout", cloud_build_timeout) @property @pulumi.getter(name="appYamlPath") def app_yaml_path(self) -> Optional[pulumi.Input[str]]: """ Path to the yaml file used in deployment, used to determine runtime configuration details.Required for flexible environment builds.See https://cloud.google.com/appengine/docs/standard/python/config/appref for more details. """ return pulumi.get(self, "app_yaml_path") @app_yaml_path.setter def app_yaml_path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "app_yaml_path", value) @property @pulumi.getter(name="cloudBuildTimeout") def cloud_build_timeout(self) -> Optional[pulumi.Input[str]]: """ The Cloud Build timeout used as part of any dependent builds performed by version creation. Defaults to 10 minutes. """ return pulumi.get(self, "cloud_build_timeout") @cloud_build_timeout.setter def cloud_build_timeout(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "cloud_build_timeout", value) @pulumi.input_type class ContainerInfoArgs: def __init__(__self__, *, image: Optional[pulumi.Input[str]] = None): """ Docker image that is used to create a container and start a VM instance for the version that you deploy. Only applicable for instances running in the App Engine flexible environment. :param pulumi.Input[str] image: URI to the hosted container image in Google Container Registry. The URI must be fully qualified and include a tag or digest. Examples: "gcr.io/my-project/image:tag" or "gcr.io/my-project/image@digest" """ if image is not None: pulumi.set(__self__, "image", image) @property @pulumi.getter def image(self) -> Optional[pulumi.Input[str]]: """ URI to the hosted container image in Google Container Registry. The URI must be fully qualified and include a tag or digest. Examples: "gcr.io/my-project/image:tag" or "gcr.io/my-project/image@digest" """ return pulumi.get(self, "image") @image.setter def image(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "image", value) @pulumi.input_type class CpuUtilizationArgs: def __init__(__self__, *, aggregation_window_length: Optional[pulumi.Input[str]] = None, target_utilization: Optional[pulumi.Input[float]] = None): """ Target scaling by CPU usage. :param pulumi.Input[str] aggregation_window_length: Period of time over which CPU utilization is calculated. :param pulumi.Input[float] target_utilization: Target CPU utilization ratio to maintain when scaling. Must be between 0 and 1. """ if aggregation_window_length is not None: pulumi.set(__self__, "aggregation_window_length", aggregation_window_length) if target_utilization is not None: pulumi.set(__self__, "target_utilization", target_utilization) @property @pulumi.getter(name="aggregationWindowLength") def aggregation_window_length(self) -> Optional[pulumi.Input[str]]: """ Period of time over which CPU utilization is calculated. """ return pulumi.get(self, "aggregation_window_length") @aggregation_window_length.setter def aggregation_window_length(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "aggregation_window_length", value) @property @pulumi.getter(name="targetUtilization") def target_utilization(self) -> Optional[pulumi.Input[float]]: """ Target CPU utilization ratio to maintain when scaling. Must be between 0 and 1. """ return pulumi.get(self, "target_utilization") @target_utilization.setter def target_utilization(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "target_utilization", value) @pulumi.input_type class CustomMetricArgs: def __init__(__self__, *, filter: Optional[pulumi.Input[str]] = None, metric_name: Optional[pulumi.Input[str]] = None, single_instance_assignment: Optional[pulumi.Input[float]] = None, target_type: Optional[pulumi.Input[str]] = None, target_utilization: Optional[pulumi.Input[float]] = None): """ Allows autoscaling based on Stackdriver metrics. :param pulumi.Input[str] filter: Allows filtering on the metric's fields. :param pulumi.Input[str] metric_name: The name of the metric. :param pulumi.Input[float] single_instance_assignment: May be used instead of target_utilization when an instance can handle a specific amount of work/resources and the metric value is equal to the current amount of work remaining. The autoscaler will try to keep the number of instances equal to the metric value divided by single_instance_assignment. :param pulumi.Input[str] target_type: The type of the metric. Must be a string representing a Stackdriver metric type e.g. GAGUE, DELTA_PER_SECOND, etc. :param pulumi.Input[float] target_utilization: The target value for the metric. """ if filter is not None: pulumi.set(__self__, "filter", filter) if metric_name is not None: pulumi.set(__self__, "metric_name", metric_name) if single_instance_assignment is not None: pulumi.set(__self__, "single_instance_assignment", single_instance_assignment) if target_type is not None: pulumi.set(__self__, "target_type", target_type) if target_utilization is not None: pulumi.set(__self__, "target_utilization", target_utilization) @property @pulumi.getter def filter(self) -> Optional[pulumi.Input[str]]: """ Allows filtering on the metric's fields. """ return pulumi.get(self, "filter") @filter.setter def filter(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "filter", value) @property @pulumi.getter(name="metricName") def metric_name(self) -> Optional[pulumi.Input[str]]: """ The name of the metric. """ return pulumi.get(self, "metric_name") @metric_name.setter def metric_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "metric_name", value) @property @pulumi.getter(name="singleInstanceAssignment") def single_instance_assignment(self) -> Optional[pulumi.Input[float]]: """ May be used instead of target_utilization when an instance can handle a specific amount of work/resources and the metric value is equal to the current amount of work remaining. The autoscaler will try to keep the number of instances equal to the metric value divided by single_instance_assignment. """ return pulumi.get(self, "single_instance_assignment") @single_instance_assignment.setter def single_instance_assignment(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "single_instance_assignment", value) @property @pulumi.getter(name="targetType") def target_type(self) -> Optional[pulumi.Input[str]]: """ The type of the metric. Must be a string representing a Stackdriver metric type e.g. GAGUE, DELTA_PER_SECOND, etc. """ return pulumi.get(self, "target_type") @target_type.setter def target_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "target_type", value) @property @pulumi.getter(name="targetUtilization") def target_utilization(self) -> Optional[pulumi.Input[float]]: """ The target value for the metric. """ return pulumi.get(self, "target_utilization") @target_utilization.setter def target_utilization(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "target_utilization", value) @pulumi.input_type class DeploymentArgs: def __init__(__self__, *, build: Optional[pulumi.Input['BuildInfoArgs']] = None, cloud_build_options: Optional[pulumi.Input['CloudBuildOptionsArgs']] = None, container: Optional[pulumi.Input['ContainerInfoArgs']] = None, files: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, zip: Optional[pulumi.Input['ZipInfoArgs']] = None): """ Code and application artifacts used to deploy a version to App Engine. :param pulumi.Input['BuildInfoArgs'] build: Google Cloud Build build information. Only applicable for instances running in the App Engine flexible environment. :param pulumi.Input['CloudBuildOptionsArgs'] cloud_build_options: Options for any Google Cloud Build builds created as a part of this deployment.These options will only be used if a new build is created, such as when deploying to the App Engine flexible environment using files or zip. :param pulumi.Input['ContainerInfoArgs'] container: The Docker image for the container that runs the version. Only applicable for instances running in the App Engine flexible environment. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] files: Manifest of the files stored in Google Cloud Storage that are included as part of this version. All files must be readable using the credentials supplied with this call. :param pulumi.Input['ZipInfoArgs'] zip: The zip file for this deployment, if this is a zip deployment. """ if build is not None: pulumi.set(__self__, "build", build) if cloud_build_options is not None: pulumi.set(__self__, "cloud_build_options", cloud_build_options) if container is not None: pulumi.set(__self__, "container", container) if files is not None: pulumi.set(__self__, "files", files) if zip is not None: pulumi.set(__self__, "zip", zip) @property @pulumi.getter def build(self) -> Optional[pulumi.Input['BuildInfoArgs']]: """ Google Cloud Build build information. Only applicable for instances running in the App Engine flexible environment. """ return pulumi.get(self, "build") @build.setter def build(self, value: Optional[pulumi.Input['BuildInfoArgs']]): pulumi.set(self, "build", value) @property @pulumi.getter(name="cloudBuildOptions") def cloud_build_options(self) -> Optional[pulumi.Input['CloudBuildOptionsArgs']]: """ Options for any Google Cloud Build builds created as a part of this deployment.These options will only be used if a new build is created, such as when deploying to the App Engine flexible environment using files or zip. """ return pulumi.get(self, "cloud_build_options") @cloud_build_options.setter def cloud_build_options(self, value: Optional[pulumi.Input['CloudBuildOptionsArgs']]): pulumi.set(self, "cloud_build_options", value) @property @pulumi.getter def container(self) -> Optional[pulumi.Input['ContainerInfoArgs']]: """ The Docker image for the container that runs the version. Only applicable for instances running in the App Engine flexible environment. """ return pulumi.get(self, "container") @container.setter def container(self, value: Optional[pulumi.Input['ContainerInfoArgs']]): pulumi.set(self, "container", value) @property @pulumi.getter def files(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Manifest of the files stored in Google Cloud Storage that are included as part of this version. All files must be readable using the credentials supplied with this call. """ return pulumi.get(self, "files") @files.setter def files(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "files", value) @property @pulumi.getter def zip(self) -> Optional[pulumi.Input['ZipInfoArgs']]: """ The zip file for this deployment, if this is a zip deployment. """ return pulumi.get(self, "zip") @zip.setter def zip(self, value: Optional[pulumi.Input['ZipInfoArgs']]): pulumi.set(self, "zip", value) @pulumi.input_type class DiskUtilizationArgs: def __init__(__self__, *, target_read_bytes_per_second: Optional[pulumi.Input[int]] = None, target_read_ops_per_second: Optional[pulumi.Input[int]] = None, target_write_bytes_per_second: Optional[pulumi.Input[int]] = None, target_write_ops_per_second: Optional[pulumi.Input[int]] = None): """ Target scaling by disk usage. Only applicable in the App Engine flexible environment. :param pulumi.Input[int] target_read_bytes_per_second: Target bytes read per second. :param pulumi.Input[int] target_read_ops_per_second: Target ops read per seconds. :param pulumi.Input[int] target_write_bytes_per_second: Target bytes written per second. :param pulumi.Input[int] target_write_ops_per_second: Target ops written per second. """ if target_read_bytes_per_second is not None: pulumi.set(__self__, "target_read_bytes_per_second", target_read_bytes_per_second) if target_read_ops_per_second is not None: pulumi.set(__self__, "target_read_ops_per_second", target_read_ops_per_second) if target_write_bytes_per_second is not None: pulumi.set(__self__, "target_write_bytes_per_second", target_write_bytes_per_second) if target_write_ops_per_second is not None: pulumi.set(__self__, "target_write_ops_per_second", target_write_ops_per_second) @property @pulumi.getter(name="targetReadBytesPerSecond") def target_read_bytes_per_second(self) -> Optional[pulumi.Input[int]]: """ Target bytes read per second. """ return pulumi.get(self, "target_read_bytes_per_second") @target_read_bytes_per_second.setter def target_read_bytes_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_read_bytes_per_second", value) @property @pulumi.getter(name="targetReadOpsPerSecond") def target_read_ops_per_second(self) -> Optional[pulumi.Input[int]]: """ Target ops read per seconds. """ return pulumi.get(self, "target_read_ops_per_second") @target_read_ops_per_second.setter def target_read_ops_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_read_ops_per_second", value) @property @pulumi.getter(name="targetWriteBytesPerSecond") def target_write_bytes_per_second(self) -> Optional[pulumi.Input[int]]: """ Target bytes written per second. """ return pulumi.get(self, "target_write_bytes_per_second") @target_write_bytes_per_second.setter def target_write_bytes_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_write_bytes_per_second", value) @property @pulumi.getter(name="targetWriteOpsPerSecond") def target_write_ops_per_second(self) -> Optional[pulumi.Input[int]]: """ Target ops written per second. """ return pulumi.get(self, "target_write_ops_per_second") @target_write_ops_per_second.setter def target_write_ops_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_write_ops_per_second", value) @pulumi.input_type class EndpointsApiServiceArgs: def __init__(__self__, *, config_id: Optional[pulumi.Input[str]] = None, disable_trace_sampling: Optional[pulumi.Input[bool]] = None, name: Optional[pulumi.Input[str]] = None, rollout_strategy: Optional[pulumi.Input['EndpointsApiServiceRolloutStrategy']] = None): """ Cloud Endpoints (https://cloud.google.com/endpoints) configuration. The Endpoints API Service provides tooling for serving Open API and gRPC endpoints via an NGINX proxy. Only valid for App Engine Flexible environment deployments.The fields here refer to the name and configuration ID of a "service" resource in the Service Management API (https://cloud.google.com/service-management/overview). :param pulumi.Input[str] config_id: Endpoints service configuration ID as specified by the Service Management API. For example "2016-09-19r1".By default, the rollout strategy for Endpoints is RolloutStrategy.FIXED. This means that Endpoints starts up with a particular configuration ID. When a new configuration is rolled out, Endpoints must be given the new configuration ID. The config_id field is used to give the configuration ID and is required in this case.Endpoints also has a rollout strategy called RolloutStrategy.MANAGED. When using this, Endpoints fetches the latest configuration and does not need the configuration ID. In this case, config_id must be omitted. :param pulumi.Input[bool] disable_trace_sampling: Enable or disable trace sampling. By default, this is set to false for enabled. :param pulumi.Input[str] name: Endpoints service name which is the name of the "service" resource in the Service Management API. For example "myapi.endpoints.myproject.cloud.goog" :param pulumi.Input['EndpointsApiServiceRolloutStrategy'] rollout_strategy: Endpoints rollout strategy. If FIXED, config_id must be specified. If MANAGED, config_id must be omitted. """ if config_id is not None: pulumi.set(__self__, "config_id", config_id) if disable_trace_sampling is not None: pulumi.set(__self__, "disable_trace_sampling", disable_trace_sampling) if name is not None: pulumi.set(__self__, "name", name) if rollout_strategy is not None: pulumi.set(__self__, "rollout_strategy", rollout_strategy) @property @pulumi.getter(name="configId") def config_id(self) -> Optional[pulumi.Input[str]]: """ Endpoints service configuration ID as specified by the Service Management API. For example "2016-09-19r1".By default, the rollout strategy for Endpoints is RolloutStrategy.FIXED. This means that Endpoints starts up with a particular configuration ID. When a new configuration is rolled out, Endpoints must be given the new configuration ID. The config_id field is used to give the configuration ID and is required in this case.Endpoints also has a rollout strategy called RolloutStrategy.MANAGED. When using this, Endpoints fetches the latest configuration and does not need the configuration ID. In this case, config_id must be omitted. """ return pulumi.get(self, "config_id") @config_id.setter def config_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "config_id", value) @property @pulumi.getter(name="disableTraceSampling") def disable_trace_sampling(self) -> Optional[pulumi.Input[bool]]: """ Enable or disable trace sampling. By default, this is set to false for enabled. """ return pulumi.get(self, "disable_trace_sampling") @disable_trace_sampling.setter def disable_trace_sampling(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "disable_trace_sampling", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Endpoints service name which is the name of the "service" resource in the Service Management API. For example "myapi.endpoints.myproject.cloud.goog" """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="rolloutStrategy") def rollout_strategy(self) -> Optional[pulumi.Input['EndpointsApiServiceRolloutStrategy']]: """ Endpoints rollout strategy. If FIXED, config_id must be specified. If MANAGED, config_id must be omitted. """ return pulumi.get(self, "rollout_strategy") @rollout_strategy.setter def rollout_strategy(self, value: Optional[pulumi.Input['EndpointsApiServiceRolloutStrategy']]): pulumi.set(self, "rollout_strategy", value) @pulumi.input_type class EntrypointArgs: def __init__(__self__, *, shell: Optional[pulumi.Input[str]] = None): """ The entrypoint for the application. :param pulumi.Input[str] shell: The format should be a shell command that can be fed to bash -c. """ if shell is not None: pulumi.set(__self__, "shell", shell) @property @pulumi.getter def shell(self) -> Optional[pulumi.Input[str]]: """ The format should be a shell command that can be fed to bash -c. """ return pulumi.get(self, "shell") @shell.setter def shell(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "shell", value) @pulumi.input_type class ErrorHandlerArgs: def __init__(__self__, *, error_code: Optional[pulumi.Input['ErrorHandlerErrorCode']] = None, mime_type: Optional[pulumi.Input[str]] = None, static_file: Optional[pulumi.Input[str]] = None): """ Custom static error page to be served when an error occurs. :param pulumi.Input['ErrorHandlerErrorCode'] error_code: Error condition this handler applies to. :param pulumi.Input[str] mime_type: MIME type of file. Defaults to text/html. :param pulumi.Input[str] static_file: Static file content to be served for this error. """ if error_code is not None: pulumi.set(__self__, "error_code", error_code) if mime_type is not None: pulumi.set(__self__, "mime_type", mime_type) if static_file is not None: pulumi.set(__self__, "static_file", static_file) @property @pulumi.getter(name="errorCode") def error_code(self) -> Optional[pulumi.Input['ErrorHandlerErrorCode']]: """ Error condition this handler applies to. """ return pulumi.get(self, "error_code") @error_code.setter def error_code(self, value: Optional[pulumi.Input['ErrorHandlerErrorCode']]): pulumi.set(self, "error_code", value) @property @pulumi.getter(name="mimeType") def mime_type(self) -> Optional[pulumi.Input[str]]: """ MIME type of file. Defaults to text/html. """ return pulumi.get(self, "mime_type") @mime_type.setter def mime_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "mime_type", value) @property @pulumi.getter(name="staticFile") def static_file(self) -> Optional[pulumi.Input[str]]: """ Static file content to be served for this error. """ return pulumi.get(self, "static_file") @static_file.setter def static_file(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "static_file", value) @pulumi.input_type class FeatureSettingsArgs: def __init__(__self__, *, split_health_checks: Optional[pulumi.Input[bool]] = None, use_container_optimized_os: Optional[pulumi.Input[bool]] = None): """ The feature specific settings to be used in the application. These define behaviors that are user configurable. :param pulumi.Input[bool] split_health_checks: Boolean value indicating if split health checks should be used instead of the legacy health checks. At an app.yaml level, this means defaulting to 'readiness_check' and 'liveness_check' values instead of 'health_check' ones. Once the legacy 'health_check' behavior is deprecated, and this value is always true, this setting can be removed. :param pulumi.Input[bool] use_container_optimized_os: If true, use Container-Optimized OS (https://cloud.google.com/container-optimized-os/) base image for VMs, rather than a base Debian image. """ if split_health_checks is not None: pulumi.set(__self__, "split_health_checks", split_health_checks) if use_container_optimized_os is not None: pulumi.set(__self__, "use_container_optimized_os", use_container_optimized_os) @property @pulumi.getter(name="splitHealthChecks") def split_health_checks(self) -> Optional[pulumi.Input[bool]]: """ Boolean value indicating if split health checks should be used instead of the legacy health checks. At an app.yaml level, this means defaulting to 'readiness_check' and 'liveness_check' values instead of 'health_check' ones. Once the legacy 'health_check' behavior is deprecated, and this value is always true, this setting can be removed. """ return pulumi.get(self, "split_health_checks") @split_health_checks.setter def split_health_checks(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "split_health_checks", value) @property @pulumi.getter(name="useContainerOptimizedOs") def use_container_optimized_os(self) -> Optional[pulumi.Input[bool]]: """ If true, use Container-Optimized OS (https://cloud.google.com/container-optimized-os/) base image for VMs, rather than a base Debian image. """ return pulumi.get(self, "use_container_optimized_os") @use_container_optimized_os.setter def use_container_optimized_os(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "use_container_optimized_os", value) @pulumi.input_type class HealthCheckArgs: def __init__(__self__, *, check_interval: Optional[pulumi.Input[str]] = None, disable_health_check: Optional[pulumi.Input[bool]] = None, healthy_threshold: Optional[pulumi.Input[int]] = None, host: Optional[pulumi.Input[str]] = None, restart_threshold: Optional[pulumi.Input[int]] = None, timeout: Optional[pulumi.Input[str]] = None, unhealthy_threshold: Optional[pulumi.Input[int]] = None): """ Health checking configuration for VM instances. Unhealthy instances are killed and replaced with new instances. Only applicable for instances in App Engine flexible environment. :param pulumi.Input[str] check_interval: Interval between health checks. :param pulumi.Input[bool] disable_health_check: Whether to explicitly disable health checks for this instance. :param pulumi.Input[int] healthy_threshold: Number of consecutive successful health checks required before receiving traffic. :param pulumi.Input[str] host: Host header to send when performing an HTTP health check. Example: "myapp.appspot.com" :param pulumi.Input[int] restart_threshold: Number of consecutive failed health checks required before an instance is restarted. :param pulumi.Input[str] timeout: Time before the health check is considered failed. :param pulumi.Input[int] unhealthy_threshold: Number of consecutive failed health checks required before removing traffic. """ if check_interval is not None: pulumi.set(__self__, "check_interval", check_interval) if disable_health_check is not None: pulumi.set(__self__, "disable_health_check", disable_health_check) if healthy_threshold is not None: pulumi.set(__self__, "healthy_threshold", healthy_threshold) if host is not None: pulumi.set(__self__, "host", host) if restart_threshold is not None: pulumi.set(__self__, "restart_threshold", restart_threshold) if timeout is not None: pulumi.set(__self__, "timeout", timeout) if unhealthy_threshold is not None: pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold) @property @pulumi.getter(name="checkInterval") def check_interval(self) -> Optional[pulumi.Input[str]]: """ Interval between health checks. """ return pulumi.get(self, "check_interval") @check_interval.setter def check_interval(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "check_interval", value) @property @pulumi.getter(name="disableHealthCheck") def disable_health_check(self) -> Optional[pulumi.Input[bool]]: """ Whether to explicitly disable health checks for this instance. """ return pulumi.get(self, "disable_health_check") @disable_health_check.setter def disable_health_check(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "disable_health_check", value) @property @pulumi.getter(name="healthyThreshold") def healthy_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive successful health checks required before receiving traffic. """ return pulumi.get(self, "healthy_threshold") @healthy_threshold.setter def healthy_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "healthy_threshold", value) @property @pulumi.getter def host(self) -> Optional[pulumi.Input[str]]: """ Host header to send when performing an HTTP health check. Example: "myapp.appspot.com" """ return pulumi.get(self, "host") @host.setter def host(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "host", value) @property @pulumi.getter(name="restartThreshold") def restart_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive failed health checks required before an instance is restarted. """ return pulumi.get(self, "restart_threshold") @restart_threshold.setter def restart_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "restart_threshold", value) @property @pulumi.getter def timeout(self) -> Optional[pulumi.Input[str]]: """ Time before the health check is considered failed. """ return pulumi.get(self, "timeout") @timeout.setter def timeout(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "timeout", value) @property @pulumi.getter(name="unhealthyThreshold") def unhealthy_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive failed health checks required before removing traffic. """ return pulumi.get(self, "unhealthy_threshold") @unhealthy_threshold.setter def unhealthy_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "unhealthy_threshold", value) @pulumi.input_type class IdentityAwareProxyArgs: def __init__(__self__, *, enabled: Optional[pulumi.Input[bool]] = None, oauth2_client_id: Optional[pulumi.Input[str]] = None, oauth2_client_secret: Optional[pulumi.Input[str]] = None): """ Identity-Aware Proxy :param pulumi.Input[bool] enabled: Whether the serving infrastructure will authenticate and authorize all incoming requests.If true, the oauth2_client_id and oauth2_client_secret fields must be non-empty. :param pulumi.Input[str] oauth2_client_id: OAuth2 client ID to use for the authentication flow. :param pulumi.Input[str] oauth2_client_secret: OAuth2 client secret to use for the authentication flow.For security reasons, this value cannot be retrieved via the API. Instead, the SHA-256 hash of the value is returned in the oauth2_client_secret_sha256 field.@InputOnly """ if enabled is not None: pulumi.set(__self__, "enabled", enabled) if oauth2_client_id is not None: pulumi.set(__self__, "oauth2_client_id", oauth2_client_id) if oauth2_client_secret is not None: pulumi.set(__self__, "oauth2_client_secret", oauth2_client_secret) @property @pulumi.getter def enabled(self) -> Optional[pulumi.Input[bool]]: """ Whether the serving infrastructure will authenticate and authorize all incoming requests.If true, the oauth2_client_id and oauth2_client_secret fields must be non-empty. """ return pulumi.get(self, "enabled") @enabled.setter def enabled(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "enabled", value) @property @pulumi.getter(name="oauth2ClientId") def oauth2_client_id(self) -> Optional[pulumi.Input[str]]: """ OAuth2 client ID to use for the authentication flow. """ return pulumi.get(self, "oauth2_client_id") @oauth2_client_id.setter def oauth2_client_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "oauth2_client_id", value) @property @pulumi.getter(name="oauth2ClientSecret") def oauth2_client_secret(self) -> Optional[pulumi.Input[str]]: """ OAuth2 client secret to use for the authentication flow.For security reasons, this value cannot be retrieved via the API. Instead, the SHA-256 hash of the value is returned in the oauth2_client_secret_sha256 field.@InputOnly """ return pulumi.get(self, "oauth2_client_secret") @oauth2_client_secret.setter def oauth2_client_secret(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "oauth2_client_secret", value) @pulumi.input_type class LibraryArgs: def __init__(__self__, *, name: Optional[pulumi.Input[str]] = None, version: Optional[pulumi.Input[str]] = None): """ Third-party Python runtime library that is required by the application. :param pulumi.Input[str] name: Name of the library. Example: "django". :param pulumi.Input[str] version: Version of the library to select, or "latest". """ if name is not None: pulumi.set(__self__, "name", name) if version is not None: pulumi.set(__self__, "version", version) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Name of the library. Example: "django". """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter def version(self) -> Optional[pulumi.Input[str]]: """ Version of the library to select, or "latest". """ return pulumi.get(self, "version") @version.setter def version(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "version", value) @pulumi.input_type class LivenessCheckArgs: def __init__(__self__, *, check_interval: Optional[pulumi.Input[str]] = None, failure_threshold: Optional[pulumi.Input[int]] = None, host: Optional[pulumi.Input[str]] = None, initial_delay: Optional[pulumi.Input[str]] = None, path: Optional[pulumi.Input[str]] = None, success_threshold: Optional[pulumi.Input[int]] = None, timeout: Optional[pulumi.Input[str]] = None): """ Health checking configuration for VM instances. Unhealthy instances are killed and replaced with new instances. :param pulumi.Input[str] check_interval: Interval between health checks. :param pulumi.Input[int] failure_threshold: Number of consecutive failed checks required before considering the VM unhealthy. :param pulumi.Input[str] host: Host header to send when performing a HTTP Liveness check. Example: "myapp.appspot.com" :param pulumi.Input[str] initial_delay: The initial delay before starting to execute the checks. :param pulumi.Input[str] path: The request path. :param pulumi.Input[int] success_threshold: Number of consecutive successful checks required before considering the VM healthy. :param pulumi.Input[str] timeout: Time before the check is considered failed. """ if check_interval is not None: pulumi.set(__self__, "check_interval", check_interval) if failure_threshold is not None: pulumi.set(__self__, "failure_threshold", failure_threshold) if host is not None: pulumi.set(__self__, "host", host) if initial_delay is not None: pulumi.set(__self__, "initial_delay", initial_delay) if path is not None: pulumi.set(__self__, "path", path) if success_threshold is not None: pulumi.set(__self__, "success_threshold", success_threshold) if timeout is not None: pulumi.set(__self__, "timeout", timeout) @property @pulumi.getter(name="checkInterval") def check_interval(self) -> Optional[pulumi.Input[str]]: """ Interval between health checks. """ return pulumi.get(self, "check_interval") @check_interval.setter def check_interval(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "check_interval", value) @property @pulumi.getter(name="failureThreshold") def failure_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive failed checks required before considering the VM unhealthy. """ return pulumi.get(self, "failure_threshold") @failure_threshold.setter def failure_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "failure_threshold", value) @property @pulumi.getter def host(self) -> Optional[pulumi.Input[str]]: """ Host header to send when performing a HTTP Liveness check. Example: "myapp.appspot.com" """ return pulumi.get(self, "host") @host.setter def host(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "host", value) @property @pulumi.getter(name="initialDelay") def initial_delay(self) -> Optional[pulumi.Input[str]]: """ The initial delay before starting to execute the checks. """ return pulumi.get(self, "initial_delay") @initial_delay.setter def initial_delay(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "initial_delay", value) @property @pulumi.getter def path(self) -> Optional[pulumi.Input[str]]: """ The request path. """ return pulumi.get(self, "path") @path.setter def path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "path", value) @property @pulumi.getter(name="successThreshold") def success_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive successful checks required before considering the VM healthy. """ return pulumi.get(self, "success_threshold") @success_threshold.setter def success_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "success_threshold", value) @property @pulumi.getter def timeout(self) -> Optional[pulumi.Input[str]]: """ Time before the check is considered failed. """ return pulumi.get(self, "timeout") @timeout.setter def timeout(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "timeout", value) @pulumi.input_type class ManualScalingArgs: def __init__(__self__, *, instances: Optional[pulumi.Input[int]] = None): """ A service with manual scaling runs continuously, allowing you to perform complex initialization and rely on the state of its memory over time. :param pulumi.Input[int] instances: Number of instances to assign to the service at the start. This number can later be altered by using the Modules API (https://cloud.google.com/appengine/docs/python/modules/functions) set_num_instances() function. """ if instances is not None: pulumi.set(__self__, "instances", instances) @property @pulumi.getter def instances(self) -> Optional[pulumi.Input[int]]: """ Number of instances to assign to the service at the start. This number can later be altered by using the Modules API (https://cloud.google.com/appengine/docs/python/modules/functions) set_num_instances() function. """ return pulumi.get(self, "instances") @instances.setter def instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "instances", value) @pulumi.input_type class NetworkUtilizationArgs: def __init__(__self__, *, target_received_bytes_per_second: Optional[pulumi.Input[int]] = None, target_received_packets_per_second: Optional[pulumi.Input[int]] = None, target_sent_bytes_per_second: Optional[pulumi.Input[int]] = None, target_sent_packets_per_second: Optional[pulumi.Input[int]] = None): """ Target scaling by network usage. Only applicable in the App Engine flexible environment. :param pulumi.Input[int] target_received_bytes_per_second: Target bytes received per second. :param pulumi.Input[int] target_received_packets_per_second: Target packets received per second. :param pulumi.Input[int] target_sent_bytes_per_second: Target bytes sent per second. :param pulumi.Input[int] target_sent_packets_per_second: Target packets sent per second. """ if target_received_bytes_per_second is not None: pulumi.set(__self__, "target_received_bytes_per_second", target_received_bytes_per_second) if target_received_packets_per_second is not None: pulumi.set(__self__, "target_received_packets_per_second", target_received_packets_per_second) if target_sent_bytes_per_second is not None: pulumi.set(__self__, "target_sent_bytes_per_second", target_sent_bytes_per_second) if target_sent_packets_per_second is not None: pulumi.set(__self__, "target_sent_packets_per_second", target_sent_packets_per_second) @property @pulumi.getter(name="targetReceivedBytesPerSecond") def target_received_bytes_per_second(self) -> Optional[pulumi.Input[int]]: """ Target bytes received per second. """ return pulumi.get(self, "target_received_bytes_per_second") @target_received_bytes_per_second.setter def target_received_bytes_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_received_bytes_per_second", value) @property @pulumi.getter(name="targetReceivedPacketsPerSecond") def target_received_packets_per_second(self) -> Optional[pulumi.Input[int]]: """ Target packets received per second. """ return pulumi.get(self, "target_received_packets_per_second") @target_received_packets_per_second.setter def target_received_packets_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_received_packets_per_second", value) @property @pulumi.getter(name="targetSentBytesPerSecond") def target_sent_bytes_per_second(self) -> Optional[pulumi.Input[int]]: """ Target bytes sent per second. """ return pulumi.get(self, "target_sent_bytes_per_second") @target_sent_bytes_per_second.setter def target_sent_bytes_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_sent_bytes_per_second", value) @property @pulumi.getter(name="targetSentPacketsPerSecond") def target_sent_packets_per_second(self) -> Optional[pulumi.Input[int]]: """ Target packets sent per second. """ return pulumi.get(self, "target_sent_packets_per_second") @target_sent_packets_per_second.setter def target_sent_packets_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_sent_packets_per_second", value) @pulumi.input_type class NetworkArgs: def __init__(__self__, *, forwarded_ports: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None, instance_ip_mode: Optional[pulumi.Input['NetworkInstanceIpMode']] = None, instance_tag: Optional[pulumi.Input[str]] = None, name: Optional[pulumi.Input[str]] = None, session_affinity: Optional[pulumi.Input[bool]] = None, subnetwork_name: Optional[pulumi.Input[str]] = None): """ Extra network settings. Only applicable in the App Engine flexible environment. :param pulumi.Input[Sequence[pulumi.Input[str]]] forwarded_ports: List of ports, or port pairs, to forward from the virtual machine to the application container. Only applicable in the App Engine flexible environment. :param pulumi.Input['NetworkInstanceIpMode'] instance_ip_mode: The IP mode for instances. Only applicable in the App Engine flexible environment. :param pulumi.Input[str] instance_tag: Tag to apply to the instance during creation. Only applicable in the App Engine flexible environment. :param pulumi.Input[str] name: Google Compute Engine network where the virtual machines are created. Specify the short name, not the resource path.Defaults to default. :param pulumi.Input[bool] session_affinity: Enable session affinity. Only applicable in the App Engine flexible environment. :param pulumi.Input[str] subnetwork_name: Google Cloud Platform sub-network where the virtual machines are created. Specify the short name, not the resource path.If a subnetwork name is specified, a network name will also be required unless it is for the default network. If the network that the instance is being created in is a Legacy network, then the IP address is allocated from the IPv4Range. If the network that the instance is being created in is an auto Subnet Mode Network, then only network name should be specified (not the subnetwork_name) and the IP address is created from the IPCidrRange of the subnetwork that exists in that zone for that network. If the network that the instance is being created in is a custom Subnet Mode Network, then the subnetwork_name must be specified and the IP address is created from the IPCidrRange of the subnetwork.If specified, the subnetwork must exist in the same region as the App Engine flexible environment application. """ if forwarded_ports is not None: pulumi.set(__self__, "forwarded_ports", forwarded_ports) if instance_ip_mode is not None: pulumi.set(__self__, "instance_ip_mode", instance_ip_mode) if instance_tag is not None: pulumi.set(__self__, "instance_tag", instance_tag) if name is not None: pulumi.set(__self__, "name", name) if session_affinity is not None: pulumi.set(__self__, "session_affinity", session_affinity) if subnetwork_name is not None: pulumi.set(__self__, "subnetwork_name", subnetwork_name) @property @pulumi.getter(name="forwardedPorts") def forwarded_ports(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]: """ List of ports, or port pairs, to forward from the virtual machine to the application container. Only applicable in the App Engine flexible environment. """ return pulumi.get(self, "forwarded_ports") @forwarded_ports.setter def forwarded_ports(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]): pulumi.set(self, "forwarded_ports", value) @property @pulumi.getter(name="instanceIpMode") def instance_ip_mode(self) -> Optional[pulumi.Input['NetworkInstanceIpMode']]: """ The IP mode for instances. Only applicable in the App Engine flexible environment. """ return pulumi.get(self, "instance_ip_mode") @instance_ip_mode.setter def instance_ip_mode(self, value: Optional[pulumi.Input['NetworkInstanceIpMode']]): pulumi.set(self, "instance_ip_mode", value) @property @pulumi.getter(name="instanceTag") def instance_tag(self) -> Optional[pulumi.Input[str]]: """ Tag to apply to the instance during creation. Only applicable in the App Engine flexible environment. """ return pulumi.get(self, "instance_tag") @instance_tag.setter def instance_tag(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "instance_tag", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Google Compute Engine network where the virtual machines are created. Specify the short name, not the resource path.Defaults to default. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="sessionAffinity") def session_affinity(self) -> Optional[pulumi.Input[bool]]: """ Enable session affinity. Only applicable in the App Engine flexible environment. """ return pulumi.get(self, "session_affinity") @session_affinity.setter def session_affinity(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "session_affinity", value) @property @pulumi.getter(name="subnetworkName") def subnetwork_name(self) -> Optional[pulumi.Input[str]]: """ Google Cloud Platform sub-network where the virtual machines are created. Specify the short name, not the resource path.If a subnetwork name is specified, a network name will also be required unless it is for the default network. If the network that the instance is being created in is a Legacy network, then the IP address is allocated from the IPv4Range. If the network that the instance is being created in is an auto Subnet Mode Network, then only network name should be specified (not the subnetwork_name) and the IP address is created from the IPCidrRange of the subnetwork that exists in that zone for that network. If the network that the instance is being created in is a custom Subnet Mode Network, then the subnetwork_name must be specified and the IP address is created from the IPCidrRange of the subnetwork.If specified, the subnetwork must exist in the same region as the App Engine flexible environment application. """ return pulumi.get(self, "subnetwork_name") @subnetwork_name.setter def subnetwork_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "subnetwork_name", value) @pulumi.input_type class ReadinessCheckArgs: def __init__(__self__, *, app_start_timeout: Optional[pulumi.Input[str]] = None, check_interval: Optional[pulumi.Input[str]] = None, failure_threshold: Optional[pulumi.Input[int]] = None, host: Optional[pulumi.Input[str]] = None, path: Optional[pulumi.Input[str]] = None, success_threshold: Optional[pulumi.Input[int]] = None, timeout: Optional[pulumi.Input[str]] = None): """ Readiness checking configuration for VM instances. Unhealthy instances are removed from traffic rotation. :param pulumi.Input[str] app_start_timeout: A maximum time limit on application initialization, measured from moment the application successfully replies to a healthcheck until it is ready to serve traffic. :param pulumi.Input[str] check_interval: Interval between health checks. :param pulumi.Input[int] failure_threshold: Number of consecutive failed checks required before removing traffic. :param pulumi.Input[str] host: Host header to send when performing a HTTP Readiness check. Example: "myapp.appspot.com" :param pulumi.Input[str] path: The request path. :param pulumi.Input[int] success_threshold: Number of consecutive successful checks required before receiving traffic. :param pulumi.Input[str] timeout: Time before the check is considered failed. """ if app_start_timeout is not None: pulumi.set(__self__, "app_start_timeout", app_start_timeout) if check_interval is not None: pulumi.set(__self__, "check_interval", check_interval) if failure_threshold is not None: pulumi.set(__self__, "failure_threshold", failure_threshold) if host is not None: pulumi.set(__self__, "host", host) if path is not None: pulumi.set(__self__, "path", path) if success_threshold is not None: pulumi.set(__self__, "success_threshold", success_threshold) if timeout is not None: pulumi.set(__self__, "timeout", timeout) @property @pulumi.getter(name="appStartTimeout") def app_start_timeout(self) -> Optional[pulumi.Input[str]]: """ A maximum time limit on application initialization, measured from moment the application successfully replies to a healthcheck until it is ready to serve traffic. """ return pulumi.get(self, "app_start_timeout") @app_start_timeout.setter def app_start_timeout(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "app_start_timeout", value) @property @pulumi.getter(name="checkInterval") def check_interval(self) -> Optional[pulumi.Input[str]]: """ Interval between health checks. """ return pulumi.get(self, "check_interval") @check_interval.setter def check_interval(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "check_interval", value) @property @pulumi.getter(name="failureThreshold") def failure_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive failed checks required before removing traffic. """ return pulumi.get(self, "failure_threshold") @failure_threshold.setter def failure_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "failure_threshold", value) @property @pulumi.getter def host(self) -> Optional[pulumi.Input[str]]: """ Host header to send when performing a HTTP Readiness check. Example: "myapp.appspot.com" """ return pulumi.get(self, "host") @host.setter def host(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "host", value) @property @pulumi.getter def path(self) -> Optional[pulumi.Input[str]]: """ The request path. """ return pulumi.get(self, "path") @path.setter def path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "path", value) @property @pulumi.getter(name="successThreshold") def success_threshold(self) -> Optional[pulumi.Input[int]]: """ Number of consecutive successful checks required before receiving traffic. """ return pulumi.get(self, "success_threshold") @success_threshold.setter def success_threshold(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "success_threshold", value) @property @pulumi.getter def timeout(self) -> Optional[pulumi.Input[str]]: """ Time before the check is considered failed. """ return pulumi.get(self, "timeout") @timeout.setter def timeout(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "timeout", value) @pulumi.input_type class RequestUtilizationArgs: def __init__(__self__, *, target_concurrent_requests: Optional[pulumi.Input[int]] = None, target_request_count_per_second: Optional[pulumi.Input[int]] = None): """ Target scaling by request utilization. Only applicable in the App Engine flexible environment. :param pulumi.Input[int] target_concurrent_requests: Target number of concurrent requests. :param pulumi.Input[int] target_request_count_per_second: Target requests per second. """ if target_concurrent_requests is not None: pulumi.set(__self__, "target_concurrent_requests", target_concurrent_requests) if target_request_count_per_second is not None: pulumi.set(__self__, "target_request_count_per_second", target_request_count_per_second) @property @pulumi.getter(name="targetConcurrentRequests") def target_concurrent_requests(self) -> Optional[pulumi.Input[int]]: """ Target number of concurrent requests. """ return pulumi.get(self, "target_concurrent_requests") @target_concurrent_requests.setter def target_concurrent_requests(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_concurrent_requests", value) @property @pulumi.getter(name="targetRequestCountPerSecond") def target_request_count_per_second(self) -> Optional[pulumi.Input[int]]: """ Target requests per second. """ return pulumi.get(self, "target_request_count_per_second") @target_request_count_per_second.setter def target_request_count_per_second(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "target_request_count_per_second", value) @pulumi.input_type class ResourcesArgs: def __init__(__self__, *, cpu: Optional[pulumi.Input[float]] = None, disk_gb: Optional[pulumi.Input[float]] = None, kms_key_reference: Optional[pulumi.Input[str]] = None, memory_gb: Optional[pulumi.Input[float]] = None, volumes: Optional[pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]]] = None): """ Machine resources for a version. :param pulumi.Input[float] cpu: Number of CPU cores needed. :param pulumi.Input[float] disk_gb: Disk size (GB) needed. :param pulumi.Input[str] kms_key_reference: The name of the encryption key that is stored in Google Cloud KMS. Only should be used by Cloud Composer to encrypt the vm disk :param pulumi.Input[float] memory_gb: Memory (GB) needed. :param pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]] volumes: User specified volumes. """ if cpu is not None: pulumi.set(__self__, "cpu", cpu) if disk_gb is not None: pulumi.set(__self__, "disk_gb", disk_gb) if kms_key_reference is not None: pulumi.set(__self__, "kms_key_reference", kms_key_reference) if memory_gb is not None: pulumi.set(__self__, "memory_gb", memory_gb) if volumes is not None: pulumi.set(__self__, "volumes", volumes) @property @pulumi.getter def cpu(self) -> Optional[pulumi.Input[float]]: """ Number of CPU cores needed. """ return pulumi.get(self, "cpu") @cpu.setter def cpu(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "cpu", value) @property @pulumi.getter(name="diskGb") def disk_gb(self) -> Optional[pulumi.Input[float]]: """ Disk size (GB) needed. """ return pulumi.get(self, "disk_gb") @disk_gb.setter def disk_gb(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "disk_gb", value) @property @pulumi.getter(name="kmsKeyReference") def kms_key_reference(self) -> Optional[pulumi.Input[str]]: """ The name of the encryption key that is stored in Google Cloud KMS. Only should be used by Cloud Composer to encrypt the vm disk """ return pulumi.get(self, "kms_key_reference") @kms_key_reference.setter def kms_key_reference(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "kms_key_reference", value) @property @pulumi.getter(name="memoryGb") def memory_gb(self) -> Optional[pulumi.Input[float]]: """ Memory (GB) needed. """ return pulumi.get(self, "memory_gb") @memory_gb.setter def memory_gb(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "memory_gb", value) @property @pulumi.getter def volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]]]: """ User specified volumes. """ return pulumi.get(self, "volumes") @volumes.setter def volumes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]]]): pulumi.set(self, "volumes", value) @pulumi.input_type class ScriptHandlerArgs: def __init__(__self__, *, script_path: Optional[pulumi.Input[str]] = None): """ Executes a script to handle the request that matches the URL pattern. :param pulumi.Input[str] script_path: Path to the script from the application root directory. """ if script_path is not None: pulumi.set(__self__, "script_path", script_path) @property @pulumi.getter(name="scriptPath") def script_path(self) -> Optional[pulumi.Input[str]]: """ Path to the script from the application root directory. """ return pulumi.get(self, "script_path") @script_path.setter def script_path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "script_path", value) @pulumi.input_type class SslSettingsArgs: def __init__(__self__, *, certificate_id: Optional[pulumi.Input[str]] = None, ssl_management_type: Optional[pulumi.Input['SslSettingsSslManagementType']] = None): """ SSL configuration for a DomainMapping resource. :param pulumi.Input[str] certificate_id: ID of the AuthorizedCertificate resource configuring SSL for the application. Clearing this field will remove SSL support.By default, a managed certificate is automatically created for every domain mapping. To omit SSL support or to configure SSL manually, specify SslManagementType.MANUAL on a CREATE or UPDATE request. You must be authorized to administer the AuthorizedCertificate resource to manually map it to a DomainMapping resource. Example: 12345. :param pulumi.Input['SslSettingsSslManagementType'] ssl_management_type: SSL management type for this domain. If AUTOMATIC, a managed certificate is automatically provisioned. If MANUAL, certificate_id must be manually specified in order to configure SSL for this domain. """ if certificate_id is not None: pulumi.set(__self__, "certificate_id", certificate_id) if ssl_management_type is not None: pulumi.set(__self__, "ssl_management_type", ssl_management_type) @property @pulumi.getter(name="certificateId") def certificate_id(self) -> Optional[pulumi.Input[str]]: """ ID of the AuthorizedCertificate resource configuring SSL for the application. Clearing this field will remove SSL support.By default, a managed certificate is automatically created for every domain mapping. To omit SSL support or to configure SSL manually, specify SslManagementType.MANUAL on a CREATE or UPDATE request. You must be authorized to administer the AuthorizedCertificate resource to manually map it to a DomainMapping resource. Example: 12345. """ return pulumi.get(self, "certificate_id") @certificate_id.setter def certificate_id(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "certificate_id", value) @property @pulumi.getter(name="sslManagementType") def ssl_management_type(self) -> Optional[pulumi.Input['SslSettingsSslManagementType']]: """ SSL management type for this domain. If AUTOMATIC, a managed certificate is automatically provisioned. If MANUAL, certificate_id must be manually specified in order to configure SSL for this domain. """ return pulumi.get(self, "ssl_management_type") @ssl_management_type.setter def ssl_management_type(self, value: Optional[pulumi.Input['SslSettingsSslManagementType']]): pulumi.set(self, "ssl_management_type", value) @pulumi.input_type class StandardSchedulerSettingsArgs: def __init__(__self__, *, max_instances: Optional[pulumi.Input[int]] = None, min_instances: Optional[pulumi.Input[int]] = None, target_cpu_utilization: Optional[pulumi.Input[float]] = None, target_throughput_utilization: Optional[pulumi.Input[float]] = None): """ Scheduler settings for standard environment. :param pulumi.Input[int] max_instances: Maximum number of instances to run for this version. Set to zero to disable max_instances configuration. :param pulumi.Input[int] min_instances: Minimum number of instances to run for this version. Set to zero to disable min_instances configuration. :param pulumi.Input[float] target_cpu_utilization: Target CPU utilization ratio to maintain when scaling. :param pulumi.Input[float] target_throughput_utilization: Target throughput utilization ratio to maintain when scaling """ if max_instances is not None: pulumi.set(__self__, "max_instances", max_instances) if min_instances is not None: pulumi.set(__self__, "min_instances", min_instances) if target_cpu_utilization is not None: pulumi.set(__self__, "target_cpu_utilization", target_cpu_utilization) if target_throughput_utilization is not None: pulumi.set(__self__, "target_throughput_utilization", target_throughput_utilization) @property @pulumi.getter(name="maxInstances") def max_instances(self) -> Optional[pulumi.Input[int]]: """ Maximum number of instances to run for this version. Set to zero to disable max_instances configuration. """ return pulumi.get(self, "max_instances") @max_instances.setter def max_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "max_instances", value) @property @pulumi.getter(name="minInstances") def min_instances(self) -> Optional[pulumi.Input[int]]: """ Minimum number of instances to run for this version. Set to zero to disable min_instances configuration. """ return pulumi.get(self, "min_instances") @min_instances.setter def min_instances(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "min_instances", value) @property @pulumi.getter(name="targetCpuUtilization") def target_cpu_utilization(self) -> Optional[pulumi.Input[float]]: """ Target CPU utilization ratio to maintain when scaling. """ return pulumi.get(self, "target_cpu_utilization") @target_cpu_utilization.setter def target_cpu_utilization(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "target_cpu_utilization", value) @property @pulumi.getter(name="targetThroughputUtilization") def target_throughput_utilization(self) -> Optional[pulumi.Input[float]]: """ Target throughput utilization ratio to maintain when scaling """ return pulumi.get(self, "target_throughput_utilization") @target_throughput_utilization.setter def target_throughput_utilization(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "target_throughput_utilization", value) @pulumi.input_type class StaticFilesHandlerArgs: def __init__(__self__, *, application_readable: Optional[pulumi.Input[bool]] = None, expiration: Optional[pulumi.Input[str]] = None, http_headers: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, mime_type: Optional[pulumi.Input[str]] = None, path: Optional[pulumi.Input[str]] = None, require_matching_file: Optional[pulumi.Input[bool]] = None, upload_path_regex: Optional[pulumi.Input[str]] = None): """ Files served directly to the user for a given URL, such as images, CSS stylesheets, or JavaScript source files. Static file handlers describe which files in the application directory are static files, and which URLs serve them. :param pulumi.Input[bool] application_readable: Whether files should also be uploaded as code data. By default, files declared in static file handlers are uploaded as static data and are only served to end users; they cannot be read by the application. If enabled, uploads are charged against both your code and static data storage resource quotas. :param pulumi.Input[str] expiration: Time a static file served by this handler should be cached by web proxies and browsers. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] http_headers: HTTP headers to use for all responses from these URLs. :param pulumi.Input[str] mime_type: MIME type used to serve all files served by this handler.Defaults to file-specific MIME types, which are derived from each file's filename extension. :param pulumi.Input[str] path: Path to the static files matched by the URL pattern, from the application root directory. The path can refer to text matched in groupings in the URL pattern. :param pulumi.Input[bool] require_matching_file: Whether this handler should match the request if the file referenced by the handler does not exist. :param pulumi.Input[str] upload_path_regex: Regular expression that matches the file paths for all files that should be referenced by this handler. """ if application_readable is not None: pulumi.set(__self__, "application_readable", application_readable) if expiration is not None: pulumi.set(__self__, "expiration", expiration) if http_headers is not None: pulumi.set(__self__, "http_headers", http_headers) if mime_type is not None: pulumi.set(__self__, "mime_type", mime_type) if path is not None: pulumi.set(__self__, "path", path) if require_matching_file is not None: pulumi.set(__self__, "require_matching_file", require_matching_file) if upload_path_regex is not None: pulumi.set(__self__, "upload_path_regex", upload_path_regex) @property @pulumi.getter(name="applicationReadable") def application_readable(self) -> Optional[pulumi.Input[bool]]: """ Whether files should also be uploaded as code data. By default, files declared in static file handlers are uploaded as static data and are only served to end users; they cannot be read by the application. If enabled, uploads are charged against both your code and static data storage resource quotas. """ return pulumi.get(self, "application_readable") @application_readable.setter def application_readable(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "application_readable", value) @property @pulumi.getter def expiration(self) -> Optional[pulumi.Input[str]]: """ Time a static file served by this handler should be cached by web proxies and browsers. """ return pulumi.get(self, "expiration") @expiration.setter def expiration(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "expiration", value) @property @pulumi.getter(name="httpHeaders") def http_headers(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ HTTP headers to use for all responses from these URLs. """ return pulumi.get(self, "http_headers") @http_headers.setter def http_headers(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "http_headers", value) @property @pulumi.getter(name="mimeType") def mime_type(self) -> Optional[pulumi.Input[str]]: """ MIME type used to serve all files served by this handler.Defaults to file-specific MIME types, which are derived from each file's filename extension. """ return pulumi.get(self, "mime_type") @mime_type.setter def mime_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "mime_type", value) @property @pulumi.getter def path(self) -> Optional[pulumi.Input[str]]: """ Path to the static files matched by the URL pattern, from the application root directory. The path can refer to text matched in groupings in the URL pattern. """ return pulumi.get(self, "path") @path.setter def path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "path", value) @property @pulumi.getter(name="requireMatchingFile") def require_matching_file(self) -> Optional[pulumi.Input[bool]]: """ Whether this handler should match the request if the file referenced by the handler does not exist. """ return pulumi.get(self, "require_matching_file") @require_matching_file.setter def require_matching_file(self, value: Optional[pulumi.Input[bool]]): pulumi.set(self, "require_matching_file", value) @property @pulumi.getter(name="uploadPathRegex") def upload_path_regex(self) -> Optional[pulumi.Input[str]]: """ Regular expression that matches the file paths for all files that should be referenced by this handler. """ return pulumi.get(self, "upload_path_regex") @upload_path_regex.setter def upload_path_regex(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "upload_path_regex", value) @pulumi.input_type class UrlDispatchRuleArgs: def __init__(__self__, *, domain: Optional[pulumi.Input[str]] = None, path: Optional[pulumi.Input[str]] = None, service: Optional[pulumi.Input[str]] = None): """ Rules to match an HTTP request and dispatch that request to a service. :param pulumi.Input[str] domain: Domain name to match against. The wildcard "*" is supported if specified before a period: "*.".Defaults to matching all domains: "*". :param pulumi.Input[str] path: Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path.The sum of the lengths of the domain and path may not exceed 100 characters. :param pulumi.Input[str] service: Resource ID of a service in this application that should serve the matched request. The service must already exist. Example: default. """ if domain is not None: pulumi.set(__self__, "domain", domain) if path is not None: pulumi.set(__self__, "path", path) if service is not None: pulumi.set(__self__, "service", service) @property @pulumi.getter def domain(self) -> Optional[pulumi.Input[str]]: """ Domain name to match against. The wildcard "*" is supported if specified before a period: "*.".Defaults to matching all domains: "*". """ return pulumi.get(self, "domain") @domain.setter def domain(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "domain", value) @property @pulumi.getter def path(self) -> Optional[pulumi.Input[str]]: """ Pathname within the host. Must start with a "/". A single "*" can be included at the end of the path.The sum of the lengths of the domain and path may not exceed 100 characters. """ return pulumi.get(self, "path") @path.setter def path(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "path", value) @property @pulumi.getter def service(self) -> Optional[pulumi.Input[str]]: """ Resource ID of a service in this application that should serve the matched request. The service must already exist. Example: default. """ return pulumi.get(self, "service") @service.setter def service(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "service", value) @pulumi.input_type class UrlMapArgs: def __init__(__self__, *, api_endpoint: Optional[pulumi.Input['ApiEndpointHandlerArgs']] = None, auth_fail_action: Optional[pulumi.Input['UrlMapAuthFailAction']] = None, login: Optional[pulumi.Input['UrlMapLogin']] = None, redirect_http_response_code: Optional[pulumi.Input['UrlMapRedirectHttpResponseCode']] = None, script: Optional[pulumi.Input['ScriptHandlerArgs']] = None, security_level: Optional[pulumi.Input['UrlMapSecurityLevel']] = None, static_files: Optional[pulumi.Input['StaticFilesHandlerArgs']] = None, url_regex: Optional[pulumi.Input[str]] = None): """ URL pattern and description of how the URL should be handled. App Engine can handle URLs by executing application code or by serving static files uploaded with the version, such as images, CSS, or JavaScript. :param pulumi.Input['ApiEndpointHandlerArgs'] api_endpoint: Uses API Endpoints to handle requests. :param pulumi.Input['UrlMapAuthFailAction'] auth_fail_action: Action to take when users access resources that require authentication. Defaults to redirect. :param pulumi.Input['UrlMapLogin'] login: Level of login required to access this resource. Not supported for Node.js in the App Engine standard environment. :param pulumi.Input['UrlMapRedirectHttpResponseCode'] redirect_http_response_code: 30x code to use when performing redirects for the secure field. Defaults to 302. :param pulumi.Input['ScriptHandlerArgs'] script: Executes a script to handle the requests that match this URL pattern. Only the auto value is supported for Node.js in the App Engine standard environment, for example "script": "auto". :param pulumi.Input['UrlMapSecurityLevel'] security_level: Security (HTTPS) enforcement for this URL. :param pulumi.Input['StaticFilesHandlerArgs'] static_files: Returns the contents of a file, such as an image, as the response. :param pulumi.Input[str] url_regex: URL prefix. Uses regular expression syntax, which means regexp special characters must be escaped, but should not contain groupings. All URLs that begin with this prefix are handled by this handler, using the portion of the URL after the prefix as part of the file path. """ if api_endpoint is not None: pulumi.set(__self__, "api_endpoint", api_endpoint) if auth_fail_action is not None: pulumi.set(__self__, "auth_fail_action", auth_fail_action) if login is not None: pulumi.set(__self__, "login", login) if redirect_http_response_code is not None: pulumi.set(__self__, "redirect_http_response_code", redirect_http_response_code) if script is not None: pulumi.set(__self__, "script", script) if security_level is not None: pulumi.set(__self__, "security_level", security_level) if static_files is not None: pulumi.set(__self__, "static_files", static_files) if url_regex is not None: pulumi.set(__self__, "url_regex", url_regex) @property @pulumi.getter(name="apiEndpoint") def api_endpoint(self) -> Optional[pulumi.Input['ApiEndpointHandlerArgs']]: """ Uses API Endpoints to handle requests. """ return pulumi.get(self, "api_endpoint") @api_endpoint.setter def api_endpoint(self, value: Optional[pulumi.Input['ApiEndpointHandlerArgs']]): pulumi.set(self, "api_endpoint", value) @property @pulumi.getter(name="authFailAction") def auth_fail_action(self) -> Optional[pulumi.Input['UrlMapAuthFailAction']]: """ Action to take when users access resources that require authentication. Defaults to redirect. """ return pulumi.get(self, "auth_fail_action") @auth_fail_action.setter def auth_fail_action(self, value: Optional[pulumi.Input['UrlMapAuthFailAction']]): pulumi.set(self, "auth_fail_action", value) @property @pulumi.getter def login(self) -> Optional[pulumi.Input['UrlMapLogin']]: """ Level of login required to access this resource. Not supported for Node.js in the App Engine standard environment. """ return pulumi.get(self, "login") @login.setter def login(self, value: Optional[pulumi.Input['UrlMapLogin']]): pulumi.set(self, "login", value) @property @pulumi.getter(name="redirectHttpResponseCode") def redirect_http_response_code(self) -> Optional[pulumi.Input['UrlMapRedirectHttpResponseCode']]: """ 30x code to use when performing redirects for the secure field. Defaults to 302. """ return pulumi.get(self, "redirect_http_response_code") @redirect_http_response_code.setter def redirect_http_response_code(self, value: Optional[pulumi.Input['UrlMapRedirectHttpResponseCode']]): pulumi.set(self, "redirect_http_response_code", value) @property @pulumi.getter def script(self) -> Optional[pulumi.Input['ScriptHandlerArgs']]: """ Executes a script to handle the requests that match this URL pattern. Only the auto value is supported for Node.js in the App Engine standard environment, for example "script": "auto". """ return pulumi.get(self, "script") @script.setter def script(self, value: Optional[pulumi.Input['ScriptHandlerArgs']]): pulumi.set(self, "script", value) @property @pulumi.getter(name="securityLevel") def security_level(self) -> Optional[pulumi.Input['UrlMapSecurityLevel']]: """ Security (HTTPS) enforcement for this URL. """ return pulumi.get(self, "security_level") @security_level.setter def security_level(self, value: Optional[pulumi.Input['UrlMapSecurityLevel']]): pulumi.set(self, "security_level", value) @property @pulumi.getter(name="staticFiles") def static_files(self) -> Optional[pulumi.Input['StaticFilesHandlerArgs']]: """ Returns the contents of a file, such as an image, as the response. """ return pulumi.get(self, "static_files") @static_files.setter def static_files(self, value: Optional[pulumi.Input['StaticFilesHandlerArgs']]): pulumi.set(self, "static_files", value) @property @pulumi.getter(name="urlRegex") def url_regex(self) -> Optional[pulumi.Input[str]]: """ URL prefix. Uses regular expression syntax, which means regexp special characters must be escaped, but should not contain groupings. All URLs that begin with this prefix are handled by this handler, using the portion of the URL after the prefix as part of the file path. """ return pulumi.get(self, "url_regex") @url_regex.setter def url_regex(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "url_regex", value) @pulumi.input_type class VolumeArgs: def __init__(__self__, *, name: Optional[pulumi.Input[str]] = None, size_gb: Optional[pulumi.Input[float]] = None, volume_type: Optional[pulumi.Input[str]] = None): """ Volumes mounted within the app container. Only applicable in the App Engine flexible environment. :param pulumi.Input[str] name: Unique name for the volume. :param pulumi.Input[float] size_gb: Volume size in gigabytes. :param pulumi.Input[str] volume_type: Underlying volume type, e.g. 'tmpfs'. """ if name is not None: pulumi.set(__self__, "name", name) if size_gb is not None: pulumi.set(__self__, "size_gb", size_gb) if volume_type is not None: pulumi.set(__self__, "volume_type", volume_type) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Unique name for the volume. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @property @pulumi.getter(name="sizeGb") def size_gb(self) -> Optional[pulumi.Input[float]]: """ Volume size in gigabytes. """ return pulumi.get(self, "size_gb") @size_gb.setter def size_gb(self, value: Optional[pulumi.Input[float]]): pulumi.set(self, "size_gb", value) @property @pulumi.getter(name="volumeType") def volume_type(self) -> Optional[pulumi.Input[str]]: """ Underlying volume type, e.g. 'tmpfs'. """ return pulumi.get(self, "volume_type") @volume_type.setter def volume_type(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "volume_type", value) @pulumi.input_type class VpcAccessConnectorArgs: def __init__(__self__, *, egress_setting: Optional[pulumi.Input['VpcAccessConnectorEgressSetting']] = None, name: Optional[pulumi.Input[str]] = None): """ VPC access connector specification. :param pulumi.Input['VpcAccessConnectorEgressSetting'] egress_setting: The egress setting for the connector, controlling what traffic is diverted through it. :param pulumi.Input[str] name: Full Serverless VPC Access Connector name e.g. /projects/my-project/locations/us-central1/connectors/c1. """ if egress_setting is not None: pulumi.set(__self__, "egress_setting", egress_setting) if name is not None: pulumi.set(__self__, "name", name) @property @pulumi.getter(name="egressSetting") def egress_setting(self) -> Optional[pulumi.Input['VpcAccessConnectorEgressSetting']]: """ The egress setting for the connector, controlling what traffic is diverted through it. """ return pulumi.get(self, "egress_setting") @egress_setting.setter def egress_setting(self, value: Optional[pulumi.Input['VpcAccessConnectorEgressSetting']]): pulumi.set(self, "egress_setting", value) @property @pulumi.getter def name(self) -> Optional[pulumi.Input[str]]: """ Full Serverless VPC Access Connector name e.g. /projects/my-project/locations/us-central1/connectors/c1. """ return pulumi.get(self, "name") @name.setter def name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "name", value) @pulumi.input_type class ZipInfoArgs: def __init__(__self__, *, files_count: Optional[pulumi.Input[int]] = None, source_url: Optional[pulumi.Input[str]] = None): """ The zip file information for a zip deployment. :param pulumi.Input[int] files_count: An estimate of the number of files in a zip for a zip deployment. If set, must be greater than or equal to the actual number of files. Used for optimizing performance; if not provided, deployment may be slow. :param pulumi.Input[str] source_url: URL of the zip file to deploy from. Must be a URL to a resource in Google Cloud Storage in the form 'http(s)://storage.googleapis.com//'. """ if files_count is not None: pulumi.set(__self__, "files_count", files_count) if source_url is not None: pulumi.set(__self__, "source_url", source_url) @property @pulumi.getter(name="filesCount") def files_count(self) -> Optional[pulumi.Input[int]]: """ An estimate of the number of files in a zip for a zip deployment. If set, must be greater than or equal to the actual number of files. Used for optimizing performance; if not provided, deployment may be slow. """ return pulumi.get(self, "files_count") @files_count.setter def files_count(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "files_count", value) @property @pulumi.getter(name="sourceUrl") def source_url(self) -> Optional[pulumi.Input[str]]: """ URL of the zip file to deploy from. Must be a URL to a resource in Google Cloud Storage in the form 'http(s)://storage.googleapis.com//'. """ return pulumi.get(self, "source_url") @source_url.setter def source_url(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "source_url", value)
46.828216
981
0.683863
13,901
112,856
5.374649
0.059061
0.084657
0.09918
0.050353
0.814664
0.712928
0.642873
0.566515
0.541887
0.497464
0
0.001759
0.218987
112,856
2,409
982
46.847655
0.845885
0.336189
0
0.366022
1
0
0.132001
0.048705
0
0
0
0
0
1
0.203729
false
0
0.004144
0
0.321823
0
0
0
0
null
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
10ada9dfa44f7e1ddefd490b672280b438bd0726
300
py
Python
desktop/core/ext-py/django-extensions-0.5/django_extensions/tests/__init__.py
thinker0/hue
ee5aecc3db442e962584d3151c0f2eab397d6707
[ "Apache-2.0" ]
19
2015-05-01T19:59:03.000Z
2021-12-09T08:03:16.000Z
desktop/core/ext-py/django-extensions-0.5/django_extensions/tests/__init__.py
jesman/hue
21edfc1b790510e512216ab5cc8aeb1a84255de3
[ "Apache-2.0" ]
1
2018-01-03T15:26:49.000Z
2018-01-03T15:26:49.000Z
desktop/core/ext-py/django-extensions-0.5/django_extensions/tests/__init__.py
jesman/hue
21edfc1b790510e512216ab5cc8aeb1a84255de3
[ "Apache-2.0" ]
30
2015-03-25T19:40:07.000Z
2021-05-28T22:59:26.000Z
from django.db import models from django_extensions.tests.utils import UTILS_TESTS try: from django_extensions.tests.encrypted_fields import EncryptedFieldsTestCase from django_extensions.tests.models import Secret except ImportError: pass __test__ = { 'UTILS_TESTS': UTILS_TESTS, }
25
80
0.803333
37
300
6.216216
0.459459
0.173913
0.26087
0.326087
0
0
0
0
0
0
0
0
0.143333
300
11
81
27.272727
0.894942
0
0
0
0
0
0.036667
0
0
0
0
0
0
1
0
false
0.1
0.5
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
4
10b40932ab2ef5da7bf0661d04d9fdaba4defdaf
332
py
Python
vogais.py
RodFernandes/Python_USP_Curso_Ciencias_da_Computacao_1
9bbcb2ccf4ca4330c8b00b5b0920c307be1f15ae
[ "Apache-2.0" ]
null
null
null
vogais.py
RodFernandes/Python_USP_Curso_Ciencias_da_Computacao_1
9bbcb2ccf4ca4330c8b00b5b0920c307be1f15ae
[ "Apache-2.0" ]
null
null
null
vogais.py
RodFernandes/Python_USP_Curso_Ciencias_da_Computacao_1
9bbcb2ccf4ca4330c8b00b5b0920c307be1f15ae
[ "Apache-2.0" ]
1
2020-01-23T13:37:19.000Z
2020-01-23T13:37:19.000Z
def vogal(letra): if(letra=='a') or (letra=='A'): return True if (letra=='e') or (letra=='E'): return True if (letra=='i') or (letra=='I'): return True if (letra=='o') or (letra=='O'): return True if (letra=='u') or (letra=='U'): return True else: return False
25.538462
36
0.475904
46
332
3.434783
0.304348
0.221519
0.303797
0.43038
0
0
0
0
0
0
0
0
0.316265
332
13
37
25.538462
0.696035
0
0
0.384615
0
0
0.03003
0
0
0
0
0
0
1
0.076923
false
0
0
0
0.538462
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
10bd1d4166237fb7edc99ef59e4092f86bb24ab9
169
py
Python
gevent_flask.py
brandonxiang/weapp-handsome-backend
13c82dbefc155c50d7e4b5684e87859803bcdd36
[ "MIT" ]
33
2016-07-07T04:48:34.000Z
2020-02-26T08:03:07.000Z
gevent_flask.py
brandonxiang/weapp-handsome-backend
13c82dbefc155c50d7e4b5684e87859803bcdd36
[ "MIT" ]
5
2021-03-19T16:03:23.000Z
2022-03-12T00:54:55.000Z
gevent_flask.py
yuyf-fsd/pyMap_webapp
0023a23cc9d73f9d1a1f7ea73a326439a7e82490
[ "MIT" ]
8
2017-08-01T02:35:20.000Z
2021-09-29T07:00:42.000Z
from gevent import monkey monkey.patch_all() from gevent.wsgi import WSGIServer from app import app http_server = WSGIServer(('',5000),app) http_server.serve_forever()
21.125
39
0.798817
25
169
5.24
0.56
0.152672
0.198473
0
0
0
0
0
0
0
0
0.02649
0.106509
169
8
40
21.125
0.84106
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
52a4c5093e94deb4d43e4b87ed2c834cc1dc9f61
407
py
Python
FSJ_django20_project/FSJ/forms/__init__.py
CMPUT401FSJ/FSJAwards
01f630e5060d70eecf7cb8f35b576f8799e2d7af
[ "MIT" ]
6
2018-02-03T21:37:14.000Z
2020-11-20T19:07:20.000Z
FSJ_django20_project/FSJ/forms/__init__.py
CMPUT401FSJ/FSJAwards
01f630e5060d70eecf7cb8f35b576f8799e2d7af
[ "MIT" ]
145
2018-02-01T02:38:17.000Z
2018-06-06T16:22:05.000Z
FSJ_django20_project/FSJ/forms/__init__.py
CMPUT401FSJ/FSJAwards
01f630e5060d70eecf7cb8f35b576f8799e2d7af
[ "MIT" ]
4
2018-05-04T22:04:29.000Z
2020-10-01T11:45:15.000Z
from .forms_student import * from .forms_adjudicator import * from .forms_coordinator import * from .forms_award import * from .forms_yearofstudy import * from .forms_committee import * from .forms_program import ProgramForm from .forms_application import * from .forms_fileupload import * from .forms_comment import * from .forms_signup import * from .forms_ranking import * from .forms_datechange import *
31.307692
38
0.813268
53
407
6
0.320755
0.367925
0.518868
0
0
0
0
0
0
0
0
0
0.125307
407
13
39
31.307692
0.893258
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
52cdff69e52620fcd02a0bc33db2b378f33c19b4
24,639
py
Python
torpy/dirs.py
recurrence/torpy
44f56de5c0350a379dd097442cca8153127c852d
[ "Apache-2.0" ]
230
2019-07-18T14:22:28.000Z
2022-03-28T10:12:37.000Z
torpy/dirs.py
recurrence/torpy
44f56de5c0350a379dd097442cca8153127c852d
[ "Apache-2.0" ]
37
2019-07-26T16:20:12.000Z
2022-03-03T23:24:04.000Z
torpy/dirs.py
recurrence/torpy
44f56de5c0350a379dd097442cca8153127c852d
[ "Apache-2.0" ]
49
2019-08-08T11:48:20.000Z
2022-03-11T21:07:06.000Z
# flake8: noqa # tor ref src\app\config\auth_dirs.inc AUTHORITY_DIRS = """ "moria1 orport=9101 " "v3ident=D586D18309DED4CD6D57C18FDB97EFA96D330566 " "128.31.0.39:9131 9695 DFC3 5FFE B861 329B 9F1A B04C 4639 7020 CE31", "tor26 orport=443 " "v3ident=14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4 " "ipv6=[2001:858:2:2:aabb:0:563b:1526]:443 " "86.59.21.38:80 847B 1F85 0344 D787 6491 A548 92F9 0493 4E4E B85D", "dizum orport=443 " "v3ident=E8A9C45EDE6D711294FADF8E7951F4DE6CA56B58 " "45.66.33.45:80 7EA6 EAD6 FD83 083C 538F 4403 8BBF A077 587D D755", "Serge orport=9001 bridge " "66.111.2.131:9030 BA44 A889 E64B 93FA A2B1 14E0 2C2A 279A 8555 C533", "gabelmoo orport=443 " "v3ident=ED03BB616EB2F60BEC80151114BB25CEF515B226 " "ipv6=[2001:638:a000:4140::ffff:189]:443 " "131.188.40.189:80 F204 4413 DAC2 E02E 3D6B CF47 35A1 9BCA 1DE9 7281", "dannenberg orport=443 " "v3ident=0232AF901C31A04EE9848595AF9BB7620D4C5B2E " "ipv6=[2001:678:558:1000::244]:443 " "193.23.244.244:80 7BE6 83E6 5D48 1413 21C5 ED92 F075 C553 64AC 7123", "maatuska orport=80 " "v3ident=49015F787433103580E3B66A1707A00E60F2D15B " "ipv6=[2001:67c:289c::9]:80 " "171.25.193.9:443 BD6A 8292 55CB 08E6 6FBE 7D37 4836 3586 E46B 3810", "Faravahar orport=443 " "v3ident=EFCBE720AB3A82B99F9E953CD5BF50F7EEFC7B97 " "154.35.175.225:80 CF6D 0AAF B385 BE71 B8E1 11FC 5CFF 4B47 9237 33BC", "longclaw orport=443 " "v3ident=23D15D965BC35114467363C165C4F724B64B4F66 " "199.58.81.140:80 74A9 1064 6BCE EFBC D2E8 74FC 1DC9 9743 0F96 8145", "bastet orport=443 " "v3ident=27102BC123E7AF1D4741AE047E160C91ADC76B21 " "ipv6=[2620:13:4000:6000::1000:118]:443 " "204.13.164.118:80 24E2 F139 121D 4394 C54B 5BCC 368B 3B41 1857 C413", """ FALLBACK_DIRS = """ /* type=fallback */ /* version=3.0.0 */ /* timestamp=20200723133610 */ /* source=offer-list */ /* ===== */ /* Offer list excluded 1807 of 1978 candidates. */ /* Checked IPv4 DirPorts served a consensus within 15.0s. */ /* Final Count: 144 (Eligible 171, Target 447 (2239 * 0.20), Max 200) Excluded: 27 (Same Operator 15, Failed/Skipped Download 6, Excess 6) Bandwidth Range: 0.6 - 96.1 MByte/s */ /* Onionoo Source: details Date: 2020-07-23 13:00:00 Version: 8.0 URL: https:onionoo.torproject.orgdetails?fieldsfingerprint%2Cnickname%2Ccontact%2Clast_changed_address_or_port%2Cconsensus_weight%2Cadvertised_bandwidth%2Cor_addresses%2Cdir_address%2Crecommended_version%2Cflags%2Ceffective_family%2Cplatform&typerelay&first_seen_days90-&last_seen_days-0&flagV2Dir&order-consensus_weight%2Cfirst_seen */ /* Onionoo Source: uptime Date: 2020-07-23 13:00:00 Version: 8.0 URL: https:onionoo.torproject.orguptime?typerelay&first_seen_days90-&last_seen_days-0&flagV2Dir&order-consensus_weight%2Cfirst_seen */ /* ===== */ "185.225.17.3:80 orport=443 id=0338F9F55111FE8E3570E7DE117EF3AF999CC1D7" " ipv6=[2a0a:c800:1:5::3]:443" /* nickname=Nebuchadnezzar */ /* extrainfo=0 */ /* ===== */ , "81.7.10.193:9002 orport=993 id=03C3069E814E296EB18776EB61B1ECB754ED89FE" /* nickname=Ichotolot61 */ /* extrainfo=1 */ /* ===== */ , "163.172.149.155:80 orport=443 id=0B85617241252517E8ECF2CFC7F4C1A32DCD153F" /* nickname=niij02 */ /* extrainfo=0 */ /* ===== */ , "5.200.21.144:80 orport=443 id=0C039F35C2E40DCB71CD8A07E97C7FD7787D42D6" /* nickname=libel */ /* extrainfo=0 */ /* ===== */ , "81.7.18.7:9030 orport=9001 id=0C475BA4D3AA3C289B716F95954CAD616E50C4E5" /* nickname=Freebird32 */ /* extrainfo=1 */ /* ===== */ , "193.234.15.60:80 orport=443 id=0F6E5CA4BF5565D9AA9FDDCA165AFC6A5305763D" " ipv6=[2a00:1c20:4089:1234:67bc:79f3:61c0:6e49]:443" /* nickname=jaures3 */ /* extrainfo=0 */ /* ===== */ , "93.177.67.71:9030 orport=8080 id=113143469021882C3A4B82F084F8125B08EE471E" " ipv6=[2a03:4000:38:559::2]:8080" /* nickname=parasol */ /* extrainfo=0 */ /* ===== */ , "37.120.174.249:80 orport=443 id=11DF0017A43AF1F08825CD5D973297F81AB00FF3" " ipv6=[2a03:4000:6:724c:df98:15f9:b34d:443]:443" /* nickname=gGDHjdcC6zAlM8k08lX */ /* extrainfo=0 */ /* ===== */ , "193.11.114.43:9030 orport=9001 id=12AD30E5D25AA67F519780E2111E611A455FDC89" " ipv6=[2001:6b0:30:1000::99]:9050" /* nickname=mdfnet1 */ /* extrainfo=0 */ /* ===== */ , "37.157.195.87:8030 orport=443 id=12FD624EE73CEF37137C90D38B2406A66F68FAA2" /* nickname=thanatosCZ */ /* extrainfo=0 */ /* ===== */ , "193.234.15.61:80 orport=443 id=158581827034DEF1BAB1FC248D180165452E53D3" " ipv6=[2a00:1c20:4089:1234:2712:a3d0:666b:88a6]:443" /* nickname=bakunin3 */ /* extrainfo=0 */ /* ===== */ , "51.15.78.0:9030 orport=9001 id=15BE17C99FACE24470D40AF782D6A9C692AB36D6" " ipv6=[2001:bc8:1824:c4b::1]:9001" /* nickname=rofltor07 */ /* extrainfo=0 */ /* ===== */ , "204.11.50.131:9030 orport=9001 id=185F2A57B0C4620582602761097D17DB81654F70" /* nickname=BoingBoing */ /* extrainfo=0 */ /* ===== */ , "50.7.74.171:9030 orport=9001 id=1CD17CB202063C51C7DAD3BACEF87ECE81C2350F" " ipv6=[2001:49f0:d002:2::51]:443" /* nickname=theia1 */ /* extrainfo=0 */ /* ===== */ , "199.184.246.250:80 orport=443 id=1F6ABD086F40B890A33C93CC4606EE68B31C9556" " ipv6=[2620:124:1009:1::171]:443" /* nickname=dao */ /* extrainfo=0 */ /* ===== */ , "212.47.229.2:9030 orport=9001 id=20462CBA5DA4C2D963567D17D0B7249718114A68" " ipv6=[2001:bc8:47ac:23a::1]:9001" /* nickname=scaletor */ /* extrainfo=0 */ /* ===== */ , "77.247.181.164:80 orport=443 id=204DFD2A2C6A0DC1FA0EACB495218E0B661704FD" /* nickname=HaveHeart */ /* extrainfo=0 */ /* ===== */ , "163.172.176.167:80 orport=443 id=230A8B2A8BA861210D9B4BA97745AEC217A94207" /* nickname=niij01 */ /* extrainfo=0 */ /* ===== */ , "193.234.15.57:80 orport=443 id=24D0491A2ADAAB52C17625FBC926D84477AEA322" " ipv6=[2a00:1c20:4089:1234:7825:2c5d:1ecd:c66f]:443" /* nickname=bakunin */ /* extrainfo=0 */ /* ===== */ , "185.220.101.137:20137 orport=10137 id=28F4F392F8F19E3FBDE09616D9DB8143A1E2DDD3" " ipv6=[2a0b:f4c2:1::137]:10137" /* nickname=niftycottonmouse */ /* extrainfo=0 */ /* ===== */ , "138.201.250.33:9012 orport=9011 id=2BA2C8E96B2590E1072AECE2BDB5C48921BF8510" /* nickname=storm */ /* extrainfo=0 */ /* ===== */ , "5.181.50.99:80 orport=443 id=2BB85DC5BD3C6F0D81A4F2B5882176C6BF7ECF5A" " ipv6=[2a03:4000:3f:16c:3851:6bff:fe07:bd2]:443" /* nickname=AlanTuring */ /* extrainfo=0 */ /* ===== */ , "97.74.237.196:9030 orport=9001 id=2F0F32AB1E5B943CA7D062C03F18960C86E70D94" /* nickname=Minotaur */ /* extrainfo=0 */ /* ===== */ , "94.230.208.147:8080 orport=8443 id=311A4533F7A2415F42346A6C8FA77E6FD279594C" " ipv6=[2a02:418:6017::147]:8443" /* nickname=DigiGesTor3e2 */ /* extrainfo=0 */ /* ===== */ , "109.105.109.162:52860 orport=60784 id=32EE911D968BE3E016ECA572BB1ED0A9EE43FC2F" " ipv6=[2001:948:7:2::163]:5001" /* nickname=ndnr1 */ /* extrainfo=0 */ /* ===== */ , "185.100.84.212:80 orport=443 id=330CD3DB6AD266DC70CDB512B036957D03D9BC59" " ipv6=[2a06:1700:0:7::1]:443" /* nickname=TeamTardis */ /* extrainfo=0 */ /* ===== */ , "64.79.152.132:80 orport=443 id=375DCBB2DBD94E5263BC0C015F0C9E756669617E" /* nickname=ebola */ /* extrainfo=0 */ /* ===== */ , "198.50.191.95:80 orport=443 id=39F096961ED2576975C866D450373A9913AFDC92" /* nickname=shhovh */ /* extrainfo=0 */ /* ===== */ , "50.7.74.174:9030 orport=9001 id=3AFDAAD91A15B4C6A7686A53AA8627CA871FF491" " ipv6=[2001:49f0:d002:2::57]:443" /* nickname=theia7 */ /* extrainfo=0 */ /* ===== */ , "212.83.154.33:8888 orport=443 id=3C79699D4FBC37DE1A212D5033B56DAE079AC0EF" " ipv6=[2001:bc8:31d3:1dd::1]:443" /* nickname=bauruine203 */ /* extrainfo=0 */ /* ===== */ , "51.38.65.160:9030 orport=9001 id=3CB4193EF4E239FCEDC4DC43468E0B0D6B67ACC3" " ipv6=[2001:41d0:801:2000::f6e]:9001" /* nickname=rofltor10 */ /* extrainfo=0 */ /* ===== */ , "95.216.211.81:80 orport=443 id=3CCF9573F59137E52787D9C322AC19D2BD090B70" " ipv6=[2a01:4f9:c010:4dfa::1]:443" /* nickname=BurningMan */ /* extrainfo=0 */ /* ===== */ , "217.79.179.177:9030 orport=9001 id=3E53D3979DB07EFD736661C934A1DED14127B684" " ipv6=[2001:4ba0:fff9:131:6c4f::90d3]:9001" /* nickname=Unnamed */ /* extrainfo=0 */ /* ===== */ , "66.111.2.16:9030 orport=9001 id=3F092986E9B87D3FDA09B71FA3A602378285C77A" " ipv6=[2610:1c0:0:5::16]:9001" /* nickname=NYCBUG1 */ /* extrainfo=0 */ /* ===== */ , "185.100.85.101:9030 orport=9001 id=4061C553CA88021B8302F0814365070AAE617270" /* nickname=TorExitRomania */ /* extrainfo=0 */ /* ===== */ , "163.172.157.213:8080 orport=443 id=4623A9EC53BFD83155929E56D6F7B55B5E718C24" /* nickname=Cotopaxi */ /* extrainfo=0 */ /* ===== */ , "193.70.43.76:9030 orport=9001 id=484A10BA2B8D48A5F0216674C8DD50EF27BC32F3" /* nickname=Aerodynamik03 */ /* extrainfo=0 */ /* ===== */ , "109.70.100.4:80 orport=443 id=4BFC9C631A93FF4BA3AA84BC6931B4310C38A263" " ipv6=[2a03:e600:100::4]:443" /* nickname=karotte */ /* extrainfo=0 */ /* ===== */ , "81.7.13.84:80 orport=443 id=4EB55679FA91363B97372554F8DC7C63F4E5B101" " ipv6=[2a02:180:1:1::5b8f:538c]:443" /* nickname=torpidsDEisppro */ /* extrainfo=0 */ /* ===== */ , "108.53.208.157:80 orport=443 id=4F0DB7E687FC7C0AE55C8F243DA8B0EB27FBF1F2" /* nickname=Binnacle */ /* extrainfo=1 */ /* ===== */ , "5.9.158.75:9030 orport=9001 id=509EAB4C5D10C9A9A24B4EA0CE402C047A2D64E6" " ipv6=[2a01:4f8:190:514a::2]:9001" /* nickname=zwiebeltoralf2 */ /* extrainfo=1 */ /* ===== */ , "69.30.215.42:80 orport=443 id=510176C07005D47B23E6796F02C93241A29AA0E9" " ipv6=[2604:4300:a:2e:21b:21ff:fe11:392]:443" /* nickname=torpidsUSwholesale */ /* extrainfo=0 */ /* ===== */ , "176.223.141.106:80 orport=443 id=5262556D44A7F2434990FDE1AE7973C67DF49E58" /* nickname=Theoden */ /* extrainfo=0 */ /* ===== */ , "85.25.159.65:995 orport=80 id=52BFADA8BEAA01BA46C8F767F83C18E2FE50C1B9" /* nickname=BeastieJoy63 */ /* extrainfo=0 */ /* ===== */ , "193.234.15.59:80 orport=443 id=562434D987CF49D45649B76ADCA993BEA8F78471" " ipv6=[2a00:1c20:4089:1234:bff6:e1bb:1ce3:8dc6]:443" /* nickname=bakunin2 */ /* extrainfo=0 */ /* ===== */ , "89.234.157.254:80 orport=443 id=578E007E5E4535FBFEF7758D8587B07B4C8C5D06" " ipv6=[2001:67c:2608::1]:443" /* nickname=marylou1 */ /* extrainfo=0 */ /* ===== */ , "172.98.193.43:80 orport=443 id=5E56738E7F97AA81DEEF59AF28494293DFBFCCDF" /* nickname=Backplane */ /* extrainfo=0 */ /* ===== */ , "163.172.139.104:8080 orport=443 id=68F175CCABE727AA2D2309BCD8789499CEE36ED7" /* nickname=Pichincha */ /* extrainfo=0 */ /* ===== */ , "95.217.16.212:80 orport=443 id=6A7551EEE18F78A9813096E82BF84F740D32B911" " ipv6=[2a01:4f9:c010:609a::1]:443" /* nickname=TorMachine */ /* extrainfo=0 */ /* ===== */ , "78.156.110.135:9093 orport=9092 id=7262B9D2EDE0B6A266C4B43D6202209BF6BBA888" /* nickname=SkynetRenegade */ /* extrainfo=0 */ /* ===== */ , "85.235.250.88:80 orport=443 id=72B2B12A3F60408BDBC98C6DF53988D3A0B3F0EE" " ipv6=[2a01:3a0:1:1900:85:235:250:88]:443" /* nickname=TykRelay01 */ /* extrainfo=0 */ /* ===== */ , "178.17.170.23:9030 orport=9001 id=742C45F2D9004AADE0077E528A4418A6A81BC2BA" " ipv6=[2a00:1dc0:caff:7d::8254]:9001" /* nickname=TorExitMoldova2 */ /* extrainfo=0 */ /* ===== */ , "81.7.14.31:9001 orport=443 id=7600680249A22080ECC6173FBBF64D6FCF330A61" /* nickname=Ichotolot62 */ /* extrainfo=1 */ /* ===== */ , "62.171.144.155:80 orport=443 id=7614EF326635DA810638E2F5D449D10AE2BB7158" " ipv6=[2a02:c207:3004:8874::1]:443" /* nickname=Nicenstein */ /* extrainfo=0 */ /* ===== */ , "77.247.181.166:80 orport=443 id=77131D7E2EC1CA9B8D737502256DA9103599CE51" /* nickname=CriticalMass */ /* extrainfo=0 */ /* ===== */ , "5.196.23.64:9030 orport=9001 id=775B0FAFDE71AADC23FFC8782B7BEB1D5A92733E" /* nickname=Aerodynamik01 */ /* extrainfo=0 */ /* ===== */ , "185.244.193.141:9030 orport=9001 id=79509683AB4C8DDAF90A120C69A4179C6CD5A387" " ipv6=[2a03:4000:27:192:24:12:1984:4]:9001" /* nickname=DerDickeReloaded */ /* extrainfo=0 */ /* ===== */ , "82.223.21.74:9030 orport=9001 id=7A32C9519D80CA458FC8B034A28F5F6815649A98" " ipv6=[2001:ba0:1800:6c::1]:9001" /* nickname=silentrocket */ /* extrainfo=0 */ /* ===== */ , "51.254.136.195:80 orport=443 id=7BB70F8585DFC27E75D692970C0EEB0F22983A63" /* nickname=torproxy02 */ /* extrainfo=0 */ /* ===== */ , "77.247.181.162:80 orport=443 id=7BFB908A3AA5B491DA4CA72CCBEE0E1F2A939B55" /* nickname=sofia */ /* extrainfo=0 */ /* ===== */ , "193.11.114.45:9031 orport=9002 id=80AAF8D5956A43C197104CEF2550CD42D165C6FB" /* nickname=mdfnet2 */ /* extrainfo=0 */ /* ===== */ , "51.254.96.208:9030 orport=9001 id=8101421BEFCCF4C271D5483C5AABCAAD245BBB9D" " ipv6=[2001:41d0:401:3100::30dc]:9001" /* nickname=rofltor01 */ /* extrainfo=0 */ /* ===== */ , "152.89.106.147:9030 orport=9001 id=8111FEB45EF2950EB8F84BFD8FF070AB07AEE9DD" " ipv6=[2a03:4000:39:605:c4f2:c9ff:fe64:c215]:9001" /* nickname=TugaOnionMR3 */ /* extrainfo=0 */ /* ===== */ , "192.42.116.16:80 orport=443 id=81B75D534F91BFB7C57AB67DA10BCEF622582AE8" /* nickname=hviv104 */ /* extrainfo=0 */ /* ===== */ , "192.87.28.82:9030 orport=9001 id=844AE9CAD04325E955E2BE1521563B79FE7094B7" " ipv6=[2001:678:230:3028:192:87:28:82]:9001" /* nickname=Smeerboel */ /* extrainfo=0 */ /* ===== */ , "85.228.136.92:9030 orport=443 id=855BC2DABE24C861CD887DB9B2E950424B49FC34" /* nickname=Logforme */ /* extrainfo=0 */ /* ===== */ , "178.254.7.88:8080 orport=8443 id=85A885433E50B1874F11CEC9BE98451E24660976" /* nickname=wr3ck3d0ni0n01 */ /* extrainfo=0 */ /* ===== */ , "163.172.194.53:9030 orport=9001 id=8C00FA7369A7A308F6A137600F0FA07990D9D451" " ipv6=[2001:bc8:225f:142:6c69:7461:7669:73]:9001" /* nickname=GrmmlLitavis */ /* extrainfo=0 */ /* ===== */ , "188.138.102.98:465 orport=443 id=8CAA470B905758742203E3EB45941719FCA9FEEC" /* nickname=BeastieJoy64 */ /* extrainfo=0 */ /* ===== */ , "109.70.100.6:80 orport=443 id=8CF987FF43FB7F3D9AA4C4F3D96FFDF247A9A6C2" " ipv6=[2a03:e600:100::6]:443" /* nickname=zucchini */ /* extrainfo=0 */ /* ===== */ , "5.189.169.190:8030 orport=8080 id=8D79F73DCD91FC4F5017422FAC70074D6DB8DD81" /* nickname=thanatosDE */ /* extrainfo=0 */ /* ===== */ , "80.67.172.162:80 orport=443 id=8E6EDA78D8E3ABA88D877C3E37D6D4F0938C7B9F" " ipv6=[2001:910:1410:600::1]:443" /* nickname=AlGrothendieck */ /* extrainfo=0 */ /* ===== */ , "54.37.139.118:9030 orport=9001 id=90A5D1355C4B5840E950EB61E673863A6AE3ACA1" " ipv6=[2001:41d0:601:1100::1b8]:9001" /* nickname=rofltor09 */ /* extrainfo=0 */ /* ===== */ , "96.253.78.108:80 orport=443 id=924B24AFA7F075D059E8EEB284CC400B33D3D036" /* nickname=NSDFreedom */ /* extrainfo=0 */ /* ===== */ , "109.70.100.5:80 orport=443 id=9661AC95717798884F3E3727D360DD98D66727CC" " ipv6=[2a03:e600:100::5]:443" /* nickname=erdapfel */ /* extrainfo=0 */ /* ===== */ , "173.212.254.192:31336 orport=31337 id=99E246DB480B313A3012BC3363093CC26CD209C7" " ipv6=[2a02:c207:3002:3972::1]:31337" /* nickname=ViDiSrv */ /* extrainfo=0 */ /* ===== */ , "188.127.69.60:80 orport=443 id=9B2BC7EFD661072AFADC533BE8DCF1C19D8C2DCC" " ipv6=[2a02:29d0:8008:c0de:bad:beef::]:443" /* nickname=MIGHTYWANG */ /* extrainfo=0 */ /* ===== */ , "185.100.86.128:9030 orport=9001 id=9B31F1F1C1554F9FFB3455911F82E818EF7C7883" " ipv6=[2a06:1700:1::11]:9001" /* nickname=TorExitFinland */ /* extrainfo=0 */ /* ===== */ , "95.142.161.63:80 orport=443 id=9BA84E8C90083676F86C7427C8D105925F13716C" " ipv6=[2001:4b98:dc0:47:216:3eff:fe3d:888c]:443" /* nickname=ekumen */ /* extrainfo=0 */ /* ===== */ , "86.105.212.130:9030 orport=443 id=9C900A7F6F5DD034CFFD192DAEC9CCAA813DB022" /* nickname=firstor2 */ /* extrainfo=0 */ /* ===== */ , "46.28.110.244:80 orport=443 id=9F7D6E6420183C2B76D3CE99624EBC98A21A967E" /* nickname=Nivrim */ /* extrainfo=0 */ /* ===== */ , "46.165.230.5:80 orport=443 id=A0F06C2FADF88D3A39AA3072B406F09D7095AC9E" /* nickname=Dhalgren */ /* extrainfo=1 */ /* ===== */ , "193.234.15.55:80 orport=443 id=A1B28D636A56AAFFE92ADCCA937AA4BD5333BB4C" " ipv6=[2a00:1c20:4089:1234:7b2c:11c5:5221:903e]:443" /* nickname=bakunin4 */ /* extrainfo=0 */ /* ===== */ , "128.31.0.13:80 orport=443 id=A53C46F5B157DD83366D45A8E99A244934A14C46" /* nickname=csailmitexit */ /* extrainfo=0 */ /* ===== */ , "212.47.233.86:9130 orport=9101 id=A68097FE97D3065B1A6F4CE7187D753F8B8513F5" /* nickname=olabobamanmu */ /* extrainfo=0 */ /* ===== */ , "163.172.149.122:80 orport=443 id=A9406A006D6E7B5DA30F2C6D4E42A338B5E340B2" /* nickname=niij03 */ /* extrainfo=0 */ /* ===== */ , "176.10.107.180:9030 orport=9001 id=AC2BEDD0BAC72838EA7E6F113F856C4E8018ACDB" /* nickname=schokomilch */ /* extrainfo=0 */ /* ===== */ , "195.154.164.243:80 orport=443 id=AC66FFA4AB35A59EBBF5BF4C70008BF24D8A7A5C" " ipv6=[2001:bc8:399f:f000::1]:993" /* nickname=torpidsFRonline3 */ /* extrainfo=0 */ /* ===== */ , "185.129.62.62:9030 orport=9001 id=ACDD9E85A05B127BA010466C13C8C47212E8A38F" " ipv6=[2a06:d380:0:3700::62]:9001" /* nickname=kramse */ /* extrainfo=0 */ /* ===== */ , "188.40.128.246:9030 orport=9001 id=AD19490C7DBB26D3A68EFC824F67E69B0A96E601" " ipv6=[2a01:4f8:221:1ac1:dead:beef:7005:9001]:9001" /* nickname=sputnik */ /* extrainfo=0 */ /* ===== */ , "176.10.104.240:8080 orport=8443 id=AD86CD1A49573D52A7B6F4A35750F161AAD89C88" /* nickname=DigiGesTor1e2 */ /* extrainfo=0 */ /* ===== */ , "178.17.174.14:9030 orport=9001 id=B06F093A3D4DFAD3E923F4F28A74901BD4F74EB1" " ipv6=[2a00:1dc0:caff:8b::5b9a]:9001" /* nickname=TorExitMoldova */ /* extrainfo=0 */ /* ===== */ , "212.129.62.232:80 orport=443 id=B143D439B72D239A419F8DCE07B8A8EB1B486FA7" /* nickname=wardsback */ /* extrainfo=0 */ /* ===== */ , "109.70.100.2:80 orport=443 id=B27CF1DCEECD50F7992B07D720D7F6BF0EDF9D40" " ipv6=[2a03:e600:100::2]:443" /* nickname=radieschen */ /* extrainfo=0 */ /* ===== */ , "136.243.214.137:80 orport=443 id=B291D30517D23299AD7CEE3E60DFE60D0E3A4664" /* nickname=TorKIT */ /* extrainfo=0 */ /* ===== */ , "93.115.97.242:9030 orport=9001 id=B5212DB685A2A0FCFBAE425738E478D12361710D" /* nickname=firstor */ /* extrainfo=0 */ /* ===== */ , "193.11.114.46:9032 orport=9003 id=B83DC1558F0D34353BB992EF93AFEAFDB226A73E" /* nickname=mdfnet3 */ /* extrainfo=0 */ /* ===== */ , "85.248.227.164:444 orport=9002 id=B84F248233FEA90CAD439F292556A3139F6E1B82" " ipv6=[2a00:1298:8011:212::164]:9004" /* nickname=tollana */ /* extrainfo=0 */ /* ===== */ , "51.15.179.153:110 orport=995 id=BB60F5BA113A0B8B44B7B37DE3567FE561E92F78" " ipv6=[2001:bc8:3fec:500:7ea::]:995" /* nickname=Casper04 */ /* extrainfo=0 */ /* ===== */ , "198.96.155.3:8080 orport=5001 id=BCEDF6C193AA687AE471B8A22EBF6BC57C2D285E" /* nickname=gurgle */ /* extrainfo=0 */ /* ===== */ , "128.199.55.207:9030 orport=9001 id=BCEF908195805E03E92CCFE669C48738E556B9C5" " ipv6=[2a03:b0c0:2:d0::158:3001]:9001" /* nickname=EldritchReaper */ /* extrainfo=0 */ /* ===== */ , "213.141.138.174:9030 orport=9001 id=BD552C165E2ED2887D3F1CCE9CFF155DDA2D86E6" /* nickname=Schakalium */ /* extrainfo=0 */ /* ===== */ , "148.251.190.229:9030 orport=9010 id=BF0FB582E37F738CD33C3651125F2772705BB8E8" " ipv6=[2a01:4f8:211:c68::2]:9010" /* nickname=quadhead */ /* extrainfo=0 */ /* ===== */ , "212.47.233.250:9030 orport=9001 id=BF735F669481EE1CCC348F0731551C933D1E2278" " ipv6=[2001:bc8:4400:2b00::1c:629]:9001" /* nickname=freeway */ /* extrainfo=0 */ /* ===== */ , "132.248.241.5:9130 orport=9101 id=C0C4F339046EB824999F711D178472FDF53BE7F5" /* nickname=toritounam2 */ /* extrainfo=0 */ /* ===== */ , "109.70.100.3:80 orport=443 id=C282248597D1C8522A2A7525E61C8B77BBC37614" " ipv6=[2a03:e600:100::3]:443" /* nickname=erbse */ /* extrainfo=0 */ /* ===== */ , "50.7.74.170:9030 orport=9001 id=C36A434DB54C66E1A97A5653858CE36024352C4D" " ipv6=[2001:49f0:d002:2::59]:443" /* nickname=theia9 */ /* extrainfo=0 */ /* ===== */ , "188.138.112.60:1433 orport=1521 id=C414F28FD2BEC1553024299B31D4E726BEB8E788" /* nickname=zebra620 */ /* extrainfo=0 */ /* ===== */ , "178.20.55.18:80 orport=443 id=C656B41AEFB40A141967EBF49D6E69603C9B4A11" /* nickname=marcuse2 */ /* extrainfo=0 */ /* ===== */ , "85.248.227.163:443 orport=9001 id=C793AB88565DDD3C9E4C6F15CCB9D8C7EF964CE9" " ipv6=[2a00:1298:8011:212::163]:9003" /* nickname=ori */ /* extrainfo=0 */ /* ===== */ , "50.7.74.173:80 orport=443 id=C87A4D8B534F78FDF0F4639B55F121401FEF259C" " ipv6=[2001:49f0:d002:2::54]:443" /* nickname=theia4 */ /* extrainfo=0 */ /* ===== */ , "176.31.103.150:9030 orport=9001 id=CBD0D1BD110EC52963082D839AC6A89D0AE243E7" /* nickname=UV74S7mjxRcYVrGsAMw */ /* extrainfo=0 */ /* ===== */ , "193.234.15.62:80 orport=443 id=CD0F9AA1A5064430B1DE8E645CBA7A502B27ED5F" " ipv6=[2a00:1c20:4089:1234:a6a4:2926:d0af:dfee]:443" /* nickname=jaures4 */ /* extrainfo=0 */ /* ===== */ , "85.25.213.211:465 orport=80 id=CE47F0356D86CF0A1A2008D97623216D560FB0A8" /* nickname=BeastieJoy61 */ /* extrainfo=0 */ /* ===== */ , "50.7.74.172:80 orport=443 id=D1AFBF3117B308B6D1A7AA762B1315FD86A6B8AF" " ipv6=[2001:49f0:d002:2::52]:443" /* nickname=theia2 */ /* extrainfo=0 */ /* ===== */ , "66.111.2.20:9030 orport=9001 id=D317C7889162E9EC4A1DA1A1095C2A0F377536D9" " ipv6=[2610:1c0:0:5::20]:9001" /* nickname=NYCBUG0 */ /* extrainfo=0 */ /* ===== */ , "5.45.111.149:80 orport=443 id=D405FCCF06ADEDF898DF2F29C9348DCB623031BA" " ipv6=[2a03:4000:6:2388:df98:15f9:b34d:443]:443" /* nickname=gGDHjdcC6zAlM8k08lY */ /* extrainfo=0 */ /* ===== */ , "12.235.151.200:9030 orport=9029 id=D5C33F3E203728EDF8361EA868B2939CCC43FAFB" /* nickname=nx1tor */ /* extrainfo=0 */ /* ===== */ , "212.83.166.62:80 orport=443 id=D7082DB97E7F0481CBF4B88CA5F5683399E196A3" /* nickname=shhop */ /* extrainfo=0 */ /* ===== */ , "54.36.237.163:80 orport=443 id=DB2682153AC0CCAECD2BD1E9EBE99C6815807A1E" /* nickname=GermanCraft2 */ /* extrainfo=0 */ /* ===== */ , "171.25.193.20:80 orport=443 id=DD8BD7307017407FCC36F8D04A688F74A0774C02" " ipv6=[2001:67c:289c::20]:443" /* nickname=DFRI0 */ /* extrainfo=0 */ /* ===== */ , "83.212.99.68:80 orport=443 id=DDBB2A38252ADDA53E4492DDF982CA6CC6E10EC0" " ipv6=[2001:648:2ffc:1225:a800:bff:fe3d:67b5]:443" /* nickname=zouzounella */ /* extrainfo=0 */ /* ===== */ , "166.70.207.2:9130 orport=9101 id=E41B16F7DDF52EBB1DB4268AB2FE340B37AD8904" /* nickname=xmission1 */ /* extrainfo=0 */ /* ===== */ , "185.100.86.182:9030 orport=8080 id=E51620B90DCB310138ED89EDEDD0A5C361AAE24E" /* nickname=NormalCitizen */ /* extrainfo=0 */ /* ===== */ , "212.47.244.38:8080 orport=443 id=E81EF60A73B3809F8964F73766B01BAA0A171E20" /* nickname=Chimborazo */ /* extrainfo=0 */ /* ===== */ , "185.4.132.148:80 orport=443 id=E8D114B3C78D8E6E7FEB1004650DD632C2143C9E" " ipv6=[2a02:c500:2:f0::5492]:443" /* nickname=libreonion1 */ /* extrainfo=0 */ /* ===== */ , "195.154.105.170:9030 orport=9001 id=E947C029087FA1C3499BEF5D4372947C51223D8F" /* nickname=dgplug */ /* extrainfo=0 */ /* ===== */ , "131.188.40.188:1443 orport=11180 id=EBE718E1A49EE229071702964F8DB1F318075FF8" " ipv6=[2001:638:a000:4140::ffff:188]:11180" /* nickname=fluxe4 */ /* extrainfo=1 */ /* ===== */ , "192.87.28.28:9030 orport=9001 id=ED2338CAC2711B3E331392E1ED2831219B794024" " ipv6=[2001:678:230:3028:192:87:28:28]:9001" /* nickname=SEC6xFreeBSD64 */ /* extrainfo=0 */ /* ===== */ , "178.20.55.16:80 orport=443 id=EFAE44728264982224445E96214C15F9075DEE1D" /* nickname=marcuse1 */ /* extrainfo=0 */ /* ===== */ , "217.182.75.181:9030 orport=9001 id=EFEACD781604EB80FBC025EDEDEA2D523AEAAA2F" /* nickname=Aerodynamik02 */ /* extrainfo=0 */ /* ===== */ , "193.234.15.58:80 orport=443 id=F24F8BEA2779A79111F33F6832B062BED306B9CB" " ipv6=[2a00:1c20:4089:1234:cdae:1b3e:cc38:3d45]:443" /* nickname=jaures2 */ /* extrainfo=0 */ /* ===== */ , "129.13.131.140:80 orport=443 id=F2DFE5FA1E4CF54F8E761A6D304B9B4EC69BDAE8" " ipv6=[2a00:1398:5:f604:cafe:cafe:cafe:9001]:443" /* nickname=AlleKochenKaffee */ /* extrainfo=0 */ /* ===== */ , "37.187.102.108:80 orport=443 id=F4263275CF54A6836EE7BD527B1328836A6F06E1" " ipv6=[2001:41d0:a:266c::1]:443" /* nickname=EvilMoe */ /* extrainfo=0 */ /* ===== */ , "5.199.142.236:9030 orport=9001 id=F4C0EDAA0BF0F7EC138746F8FEF1CE26C7860265" /* nickname=tornodenumber9004 */ /* extrainfo=0 */ /* ===== */ , "163.172.154.162:9030 orport=9001 id=F741E5124CB12700DA946B78C9B2DD175D6CD2A1" " ipv6=[2001:bc8:47a0:162a::1]:9001" /* nickname=rofltor06 */ /* extrainfo=0 */ /* ===== */ , "78.47.18.110:443 orport=80 id=F8D27B163B9247B232A2EEE68DD8B698695C28DE" " ipv6=[2a01:4f8:120:4023::110]:80" /* nickname=fluxe3 */ /* extrainfo=1 */ /* ===== */ , "91.143.88.62:80 orport=443 id=F9246DEF2B653807236DA134F2AEAB103D58ABFE" /* nickname=Freebird31 */ /* extrainfo=1 */ /* ===== */ , "149.56.45.200:9030 orport=9001 id=FE296180018833AF03A8EACD5894A614623D3F76" " ipv6=[2607:5300:201:3000::17d3]:9002" /* nickname=PyotrTorpotkinOne */ /* extrainfo=0 */ /* ===== */ , "62.141.38.69:80 orport=443 id=FF9FC6D130FA26AE3AE8B23688691DC419F0F22E" " ipv6=[2001:4ba0:cafe:ac5::]:443" /* nickname=rinderwahnRelay3L */ /* extrainfo=0 */ /* ===== */ , "193.11.164.243:9030 orport=9001 id=FFA72BD683BC2FCF988356E6BEC1E490F313FB07" " ipv6=[2001:6b0:7:125::243]:9001" /* nickname=Lule */ /* extrainfo=0 */ /* ===== */ , """
29.089728
333
0.695442
2,833
24,639
6.039534
0.333569
0.077148
0.045646
0.047107
0.095441
0.023729
0.018001
0.018001
0.015079
0.015079
0
0.378186
0.093916
24,639
846
334
29.124113
0.388219
0.001989
0
0.346793
0
0.042755
0.997926
0.459592
0
0
0
0
0
1
0
false
0
0
0
0
0.001188
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
52d508b41b37aa6cf6f5b2b26ec5462e875f476d
58
py
Python
src/network/activation.py
jordanjoewatson/natural-language-classifier
f935804c6c2c13098517a015251bf5b30fd12c05
[ "MIT" ]
1
2020-01-23T10:09:52.000Z
2020-01-23T10:09:52.000Z
src/network/activation.py
jordanjoewatson/natural-language-classifier
f935804c6c2c13098517a015251bf5b30fd12c05
[ "MIT" ]
null
null
null
src/network/activation.py
jordanjoewatson/natural-language-classifier
f935804c6c2c13098517a015251bf5b30fd12c05
[ "MIT" ]
null
null
null
def sign(val): if(val >= 0): return 1 else: return -1
14.5
24
0.586207
11
58
3.090909
0.727273
0.411765
0
0
0
0
0
0
0
0
0
0.068182
0.241379
58
3
25
19.333333
0.704545
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
52f5d900177f90aae8d262093b99e53fb1da748c
68
py
Python
ddtrace/contrib/cassandra/patch.py
zhammer/dd-trace-py
4c30f6e36bfa34a63cd9b6884677c977f76d2a01
[ "Apache-2.0", "BSD-3-Clause" ]
5
2020-03-07T01:12:29.000Z
2021-04-21T00:53:19.000Z
ddtrace/contrib/cassandra/patch.py
zhammer/dd-trace-py
4c30f6e36bfa34a63cd9b6884677c977f76d2a01
[ "Apache-2.0", "BSD-3-Clause" ]
4
2019-11-22T20:58:01.000Z
2020-08-17T21:16:13.000Z
ddtrace/contrib/cassandra/patch.py
zhammer/dd-trace-py
4c30f6e36bfa34a63cd9b6884677c977f76d2a01
[ "Apache-2.0", "BSD-3-Clause" ]
3
2020-03-18T16:29:20.000Z
2020-07-20T16:05:10.000Z
from .session import patch, unpatch __all__ = ['patch', 'unpatch']
17
35
0.705882
8
68
5.5
0.75
0.545455
0
0
0
0
0
0
0
0
0
0
0.147059
68
3
36
22.666667
0.758621
0
0
0
0
0
0.176471
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
52fbfe4923f581ee2c55ac18b346c24cdfd5836b
168
py
Python
friends/urls.py
Nijinsha/Inshare
ebbeb904ae20b997df385cad589121bc46d67728
[ "MIT" ]
null
null
null
friends/urls.py
Nijinsha/Inshare
ebbeb904ae20b997df385cad589121bc46d67728
[ "MIT" ]
null
null
null
friends/urls.py
Nijinsha/Inshare
ebbeb904ae20b997df385cad589121bc46d67728
[ "MIT" ]
1
2019-11-21T17:16:49.000Z
2019-11-21T17:16:49.000Z
from django.urls import path from .views import FriendRequestView urlpatterns = [ path('friend_request/', FriendRequestView.as_view(), name="friend_request"), ]
18.666667
80
0.755952
19
168
6.526316
0.684211
0.209677
0
0
0
0
0
0
0
0
0
0
0.130952
168
8
81
21
0.849315
0
0
0
0
0
0.172619
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
5e39fbd98bfcdf641302bf71b0af529ec7d274ca
10,579
py
Python
rocketgram/api/__init__.py
streemline/rocketgram
6f4e69e37c64f51b6bed9ebb6a1413a187d2eee2
[ "MIT" ]
null
null
null
rocketgram/api/__init__.py
streemline/rocketgram
6f4e69e37c64f51b6bed9ebb6a1413a187d2eee2
[ "MIT" ]
1
2022-01-16T13:56:45.000Z
2022-01-16T13:56:45.000Z
rocketgram/api/__init__.py
streemline/rocketgram
6f4e69e37c64f51b6bed9ebb6a1413a187d2eee2
[ "MIT" ]
null
null
null
# Copyright (C) 2015-2022 by Vd. # This file is part of Rocketgram, the modern Telegram bot framework. # Rocketgram is released under the MIT License (see LICENSE). from .add_sticker_to_set import AddStickerToSet from .animation import Animation from .answer_callback_query import AnswerCallbackQuery from .answer_inline_query import AnswerInlineQuery from .answer_pre_checkout_query import AnswerPreCheckoutQuery from .answer_shipping_query import AnswerShippingQuery from .approve_chat_join_request import ApproveChatJoinRequest from .audio import Audio from .ban_chat_member import BanChatMember, KickChatMember from .ban_chat_sender_chat import BanChatSenderChat from .bot_command import BotCommand from .bot_command_scope import BotCommandScope from .bot_command_scope_all_chat_administrators import BotCommandScopeAllChatAdministrators from .bot_command_scope_all_group_chats import BotCommandScopeAllGroupChats from .bot_command_scope_all_private_chats import BotCommandScopeAllPrivateChats from .bot_command_scope_chat import BotCommandScopeChat from .bot_command_scope_chat_administrators import BotCommandScopeChatAdministrators from .bot_command_scope_chat_member import BotCommandScopeChatMember from .bot_command_scope_default import BotCommandScopeDefault from .callback_query import CallbackQuery from .chat import Chat from .chat_action_type import ChatActionType from .chat_invite_link import ChatInviteLink from .chat_join_request import ChatJoinRequest from .chat_location import ChatLocation from .chat_member import ChatMember from .chat_member_status_type import ChatMemberStatusType from .chat_member_updated import ChatMemberUpdated from .chat_permissions import ChatPermissions from .chat_photo import ChatPhoto from .chat_type import ChatType from .chosen_inline_result import ChosenInlineResult from .close import Close from .contact import Contact from .copy_message import CopyMessage from .create_chat_invite_link import CreateChatInviteLink from .create_new_sticker_set import CreateNewStickerSet from .decline_chat_join_request import DeclineChatJoinRequest from .delete_chat_photo import DeleteChatPhoto from .delete_chat_sticker_set import DeleteChatStickerSet from .delete_message import DeleteMessage from .delete_my_commands import DeleteMyCommands from .delete_sticker_from_set import DeleteStickerFromSet from .delete_webhook import DeleteWebhook from .dice import Dice from .dice_type import DiceType from .document import Document from .edit_chat_invite_link import EditChatInviteLink from .edit_message_caption import EditMessageCaption from .edit_message_live_location import EditMessageLiveLocation from .edit_message_media import EditMessageMedia from .edit_message_reply_markup import EditMessageReplyMarkup from .edit_message_text import EditMessageText from .encrypted_credentials import EncryptedCredentials from .encrypted_passport_element import EncryptedPassportElement from .encrypted_passport_element_type import EncryptedPassportElementType from .entity_type import EntityType from .export_chat_invite_link import ExportChatInviteLink from .file import File from .force_reply import ForceReply from .forward_message import ForwardMessage from .game import Game from .game_high_score import GameHighScore from .get_chat import GetChat from .get_chat_administrators import GetChatAdministrators from .get_chat_member import GetChatMember from .get_chat_member_count import GetChatMemberCount, GetChatMembersCount from .get_file import GetFile from .get_game_high_scores import GetGameHighScores from .get_me import GetMe from .get_my_commands import GetMyCommands from .get_sticker_set import GetStickerSet from .get_updates import GetUpdates from .get_user_profile_photos import GetUserProfilePhotos from .get_webhook_info import GetWebhookInfo from .inline_keyboard_button import InlineKeyboardButton from .inline_keyboard_markup import InlineKeyboardMarkup from .inline_query import InlineQuery from .inline_query_result_article import InlineQueryResultArticle from .inline_query_result_audio import InlineQueryResultAudio from .inline_query_result_cached_audio import InlineQueryResultCachedAudio from .inline_query_result_cached_document import InlineQueryResultCachedDocument from .inline_query_result_cached_gif import InlineQueryResultCachedGif from .inline_query_result_cached_mpeg4_gif import InlineQueryResultCachedMpeg4Gif from .inline_query_result_cached_photo import InlineQueryResultCachedPhoto from .inline_query_result_cached_sticker import InlineQueryResultCachedSticker from .inline_query_result_cached_video import InlineQueryResultCachedVideo from .inline_query_result_cached_voice import InlineQueryResultCachedVoice from .inline_query_result_contact import InlineQueryResultContact from .inline_query_result_document import InlineQueryResultDocument from .inline_query_result_game import InlineQueryResultGame from .inline_query_result_gif import InlineQueryResultGif from .inline_query_result_location import InlineQueryResultLocation from .inline_query_result_mpeg4_gif import InlineQueryResultMpeg4Gif from .inline_query_result_photo import InlineQueryResultPhoto from .inline_query_result_venue import InlineQueryResultVenue from .inline_query_result_video import InlineQueryResultVideo from .inline_query_result_voice import InlineQueryResultVoice from .input_contact_message_content import InputContactMessageContent from .input_file import InputFile from .input_invoice_message_content import InputInvoiceMessageContent from .input_location_message_content import InputLocationMessageContent from .input_media_animation import InputMediaAnimation from .input_media_audio import InputMediaAudio from .input_media_document import InputMediaDocument from .input_media_photo import InputMediaPhoto from .input_media_video import InputMediaVideo from .input_text_message_content import InputTextMessageContent from .input_venue_message_content import InputVenueMessageContent from .invoice import Invoice from .keyboard_button import KeyboardButton from .keyboard_button_poll_type import KeyboardButtonPollType from .labeled_price import LabeledPrice from .leave_chat import LeaveChat from .location import Location from .log_out import LogOut from .login_url import LoginUrl from .mask_position import MaskPosition from .mask_position_point_type import MaskPositionPointType from .message import Message from .message_auto_delete_timer_changed import MessageAutoDeleteTimerChanged from .message_entity import MessageEntity from .message_id import MessageId from .message_type import MessageType from .order_info import OrderInfo from .parse_mode_type import ParseModeType from .passport_data import PassportData from .passport_element_error_data_field import PassportElementErrorDataField from .passport_element_error_file import PassportElementErrorFile from .passport_element_error_files import PassportElementErrorFiles from .passport_element_error_front_side import PassportElementErrorFrontSide from .passport_element_error_reverse_side import PassportElementErrorReverseSide from .passport_element_error_selfie import PassportElementErrorSelfie from .passport_element_error_translation_file import PassportElementErrorTranslationFile from .passport_element_error_translation_files import PassportElementErrorTranslationFiles from .passport_element_error_unspecified import PassportElementErrorUnspecified from .password_file import PassportFile from .photo_size import PhotoSize from .pin_chat_message import PinChatMessage from .poll import Poll from .poll_answer import PollAnswer from .poll_option import PollOption from .poll_type import PollType from .pre_checkout_query import PreCheckoutQuery from .promote_chat_member import PromoteChatMember from .proximity_alert_triggered import ProximityAlertTriggered from .reply_keyboard_markup import ReplyKeyboardMarkup from .reply_keyboard_remove import ReplyKeyboardRemove from .request import Request from .response import Response from .response_parameters import ResponseParameters from .restrict_chat_member import RestrictChatMember from .revoke_chat_invite_link import RevokeChatInviteLink from .send_animation import SendAnimation from .send_audio import SendAudio from .send_chat_action import SendChatAction from .send_contact import SendContact from .send_dice import SendDice from .send_document import SendDocument from .send_game import SendGame from .send_invoice import SendInvoice from .send_location import SendLocation from .send_media_group import SendMediaGroup from .send_message import SendMessage from .send_photo import SendPhoto from .send_poll import SendPoll from .send_sticker import SendSticker from .send_venue import SendVenue from .send_video import SendVideo from .send_video_note import SendVideoNote from .send_voice import SendVoice from .set_chat_administrator_custom_title import SetChatAdministratorCustomTitle from .set_chat_description import SetChatDescription from .set_chat_permissions import SetChatPermissions from .set_chat_photo import SetChatPhoto from .set_chat_sticker_set import SetChatStickerSet from .set_chat_title import SetChatTitle from .set_game_score import SetGameScore from .set_my_commands import SetMyCommands from .set_passport_data_errors import SetPassportDataErrors from .set_sticker_position_in_set import SetStickerPositionInSet from .set_sticker_set_thumb import SetStickerSetThumb from .set_webhook import SetWebhook from .shipping_address import ShippingAddress from .shipping_option import ShippingOption from .shipping_query import ShippingQuery from .sticker import Sticker from .sticker_set import StickerSet from .stop_message_live_location import StopMessageLiveLocation from .stop_poll import StopPoll from .successful_payment import SuccessfulPayment from .thumb_mime_type import ThumbMimeType from .unban_chat_member import UnbanChatMember from .unban_chat_sender_chat import UnbanChatSenderChat from .unpin_all_chat_messages import UnpinAllChatMessages from .unpin_chat_message import UnpinChatMessage from .update import Update from .update_type import UpdateType from .upload_sticker_file import UploadStickerFile from .user import User from .user_profile_photos import UserProfilePhotos from .venue import Venue from .video import Video from .video_note import VideoNote from .voice import Voice from .voice_chat_ended import VoiceChatEnded from .voice_chat_participants_invited import VoiceChatParticipantsInvited from .voice_chat_scheduled import VoiceChatScheduled from .voice_chat_started import VoiceChatStarted from .webhook_info import WebhookInfo
48.976852
91
0.896399
1,281
10,579
7.080406
0.289617
0.025358
0.03473
0.046307
0.046417
0
0
0
0
0
0
0.001236
0.082427
10,579
215
92
49.204651
0.933141
0.014935
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.066667
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
4
ead598df2d1b3c2a71c8ac8b0b628973e57288be
229
py
Python
dependencies/georeference maps/pythongis/vector/fileformats/thirdparty/__init__.py
karimbahgat/AutoMap
eae52f16b7ce71cb2b4b7ae67cf6e4680ea2194f
[ "MIT" ]
4
2015-12-05T14:31:55.000Z
2018-02-09T05:54:36.000Z
dependencies/georeference maps/pythongis/vector/fileformats/thirdparty/__init__.py
karimbahgat/AutoMap
eae52f16b7ce71cb2b4b7ae67cf6e4680ea2194f
[ "MIT" ]
1
2022-01-13T02:52:09.000Z
2022-01-13T02:52:09.000Z
dependencies/georeference maps/pythongis/vector/fileformats/thirdparty/__init__.py
karimbahgat/AutoMap
eae52f16b7ce71cb2b4b7ae67cf6e4680ea2194f
[ "MIT" ]
1
2018-10-24T01:08:11.000Z
2018-10-24T01:08:11.000Z
""" Special container for thirdparty format libs that are not publically available. In particular if we rely on some obscure or special forks of an existing lib, so users don't have to go hunting for them. """ from . import *
25.444444
79
0.755459
38
229
4.552632
0.947368
0
0
0
0
0
0
0
0
0
0
0
0.19214
229
8
80
28.625
0.935135
0.877729
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
d830a0d983ffeb2a2fe652b19bbe2b3a6fb9aea9
144
py
Python
test_files/whitespace3.expected.py
RamonWill/zimports
26f01fd1f7105b510f4723059af77531431b0bd8
[ "MIT" ]
65
2019-01-02T05:44:38.000Z
2021-11-08T11:47:09.000Z
test_files/whitespace3.expected.py
RamonWill/zimports
26f01fd1f7105b510f4723059af77531431b0bd8
[ "MIT" ]
32
2019-01-07T15:43:15.000Z
2022-02-09T20:36:32.000Z
test_files/whitespace3.expected.py
RamonWill/zimports
26f01fd1f7105b510f4723059af77531431b0bd8
[ "MIT" ]
7
2019-01-07T15:11:31.000Z
2020-07-08T17:42:13.000Z
# do things # and do other things # and now begin import bar import foo from hoho import bat3 class Line10MovesToLine8: pass foo bar bat3
10.285714
25
0.75
23
144
4.695652
0.652174
0.166667
0
0
0
0
0
0
0
0
0
0.044643
0.222222
144
13
26
11.076923
0.919643
0.298611
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.125
0.375
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
0
0
0
4
d8330b70fdabd90bceae18d588605b08c2fa7651
1,192
py
Python
we-web/utils/session_manager.py
h-qub/wordeater-web
9a336d1e467d08c6b3875bd8b83dea0dc3b9236d
[ "MIT" ]
null
null
null
we-web/utils/session_manager.py
h-qub/wordeater-web
9a336d1e467d08c6b3875bd8b83dea0dc3b9236d
[ "MIT" ]
3
2016-03-11T08:43:47.000Z
2016-11-01T13:07:48.000Z
we-web/utils/session_manager.py
h-qub/wordeater-web
9a336d1e467d08c6b3875bd8b83dea0dc3b9236d
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from flask import session __author__ = 'Glebov Boris' class UserSession: def __init__(self, user): self.user = user @staticmethod def create(user): """ Create session manager from a dto session user :param user: dto session user :return: UserSession instance """ return UserSession(user) def get(self): return self.user @property def id(self): return self.user['id'] @property def login(self): return self.user['login'] @property def token(self): return self.user['token'] @property def email(self): return self.user['email'] @property def remote_addr(self): return self.user['remote_addr'] @staticmethod def get_current_user(): """ Extract session from current user """ user = session.get('user') return user @staticmethod def check_user_auth(): """ Checks that user is authorized """ user = session.get('user') if user is None or not user['is_auth']: return False return True
19.225806
54
0.561242
133
1,192
4.917293
0.345865
0.097859
0.12844
0.165138
0
0
0
0
0
0
0
0.001266
0.337248
1,192
61
55
19.540984
0.826582
0.162752
0
0.285714
0
0
0.060307
0
0
0
0
0
0
1
0.285714
false
0
0.028571
0.171429
0.628571
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
dc4ac14d6cc00a9d336c8384b46bf6258d566c41
286
py
Python
6.00.1x.Introduction-to-Computer-Science-and-Programming-Using-Python/Solutions/Week 3/1.Radiation Exposure..py
nilesh-patil/MITx-Foundations-of-Computer-Science
2460d1a8d9d1d91db0e453b98353434320e5d9fb
[ "MIT" ]
55
2015-01-07T23:42:17.000Z
2022-02-21T14:01:27.000Z
6.00.1x.Introduction-to-Computer-Science-and-Programming-Using-Python/Solutions/Week 3/1.Radiation Exposure..py
nvngpt31/MITx-Foundations-of-Computer-Science
2460d1a8d9d1d91db0e453b98353434320e5d9fb
[ "MIT" ]
null
null
null
6.00.1x.Introduction-to-Computer-Science-and-Programming-Using-Python/Solutions/Week 3/1.Radiation Exposure..py
nvngpt31/MITx-Foundations-of-Computer-Science
2460d1a8d9d1d91db0e453b98353434320e5d9fb
[ "MIT" ]
50
2015-01-06T18:52:27.000Z
2022-03-29T17:11:46.000Z
def f(x): import math return 10*math.e**(math.log(0.5)/5.27 * x) def radiationExposure(start, stop, step): # FILL IN YOUR CODE HERE... if start >= stop - step: return f(stop-step) * step return (f(start) * step) + radiationExposure(start + step, stop, step)
31.777778
74
0.615385
44
286
4
0.5
0.181818
0.147727
0
0
0
0
0
0
0
0
0.031818
0.230769
286
9
74
31.777778
0.768182
0.087413
0
0
0
0
0
0
0
0
0
0.111111
0
1
0.285714
false
0
0.142857
0
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
0
1
0
0
4
dc6089e33b2f3d746f1a214261d072651c759f5e
95
py
Python
codedigger/codeforces/apps.py
jyothiprakashpanaik/Backend
9ab1b57436a0a1a6197777c0b36c842e71121d3a
[ "Apache-2.0" ]
17
2020-10-07T22:40:37.000Z
2022-01-20T07:19:09.000Z
codedigger/codeforces/apps.py
jyothiprakashpanaik/Backend
9ab1b57436a0a1a6197777c0b36c842e71121d3a
[ "Apache-2.0" ]
42
2021-06-03T01:58:04.000Z
2022-01-31T14:49:22.000Z
codedigger/codeforces/apps.py
jyothiprakashpanaik/Backend
9ab1b57436a0a1a6197777c0b36c842e71121d3a
[ "Apache-2.0" ]
25
2020-10-06T17:55:19.000Z
2021-12-09T07:56:50.000Z
from django.apps import AppConfig class CodeforcesConfig(AppConfig): name = 'codeforces'
15.833333
34
0.768421
10
95
7.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.157895
95
5
35
19
0.9125
0
0
0
0
0
0.105263
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
dc798f0f6892e86f285a40c0b2c6276432550d78
200
py
Python
ddi_search_engine/Bio/Writer.py
dbmi-pitt/DIKB-Evidence-analytics
9ffd629db30c41ced224ff2afdf132ce9276ae3f
[ "MIT" ]
3
2015-06-08T17:58:54.000Z
2022-03-10T18:49:44.000Z
ddi_search_engine/Bio/Writer.py
dbmi-pitt/DIKB-Evidence-analytics
9ffd629db30c41ced224ff2afdf132ce9276ae3f
[ "MIT" ]
null
null
null
ddi_search_engine/Bio/Writer.py
dbmi-pitt/DIKB-Evidence-analytics
9ffd629db30c41ced224ff2afdf132ce9276ae3f
[ "MIT" ]
null
null
null
class Writer: def __init__(self, outfile): self.outfile = outfile def writeHeader(self): pass def write(self, record): pass def writeFooter(self): pass
20
32
0.585
22
200
5.136364
0.5
0.19469
0
0
0
0
0
0
0
0
0
0
0.33
200
9
33
22.222222
0.843284
0
0
0.333333
0
0
0
0
0
0
0
0
0
1
0.444444
false
0.333333
0
0
0.555556
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
4
dcb89e9a780001e83deb03b52774e268f6a36cad
4,795
py
Python
ding/interaction/tests/interaction/test_simple.py
sailxjx/DI-engine
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
[ "Apache-2.0" ]
464
2021-07-08T07:26:33.000Z
2022-03-31T12:35:16.000Z
ding/interaction/tests/interaction/test_simple.py
sailxjx/DI-engine
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
[ "Apache-2.0" ]
177
2021-07-09T08:22:55.000Z
2022-03-31T07:35:22.000Z
ding/interaction/tests/interaction/test_simple.py
sailxjx/DI-engine
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
[ "Apache-2.0" ]
92
2021-07-08T12:16:37.000Z
2022-03-31T09:24:41.000Z
import pytest from requests import HTTPError from .bases import _TestInteractionBase, _random_slave_channel_and_port, _slave_endpoint, _get_master_endpoint from ..test_utils import random_port from ...master.task import TaskStatus @pytest.mark.unittest class TestInteractionSimple(_TestInteractionBase): @pytest.mark.execution_timeout(10.0, method='thread') def test_slave_launch(self): _slave_port, _channel = _random_slave_channel_and_port() slave_thread, open_slave_event, close_slave_event = _slave_endpoint(_slave_port, _channel) slave_thread.start() open_slave_event.wait() close_slave_event.set() slave_thread.join() @pytest.mark.execution_timeout(20.0, method='thread') def test_slave_simple_connection(self): _slave_port, _channel = _random_slave_channel_and_port() slave_thread, open_slave_event, close_slave_event = _slave_endpoint(_slave_port, _channel) slave_thread.start() open_slave_event.wait() try: _master_port = random_port() master = _get_master_endpoint(_master_port, _channel) with master: assert master.ping() with master.new_connection('conn', '127.0.0.1', _slave_port) as conn: assert conn.is_connected assert 'conn' in master assert master['conn'] == conn assert not conn.is_connected assert 'conn' not in master conn = master.new_connection('conn', '127.0.0.1', _slave_port) conn.connect() assert conn.is_connected assert 'conn' in master assert master['conn'] == conn conn.disconnect() assert not conn.is_connected assert 'conn' not in master conn = master.new_connection('conn', '127.0.0.1', _slave_port) conn.connect() assert conn.is_connected assert 'conn' in master assert master['conn'] == conn del master['conn'] assert not conn.is_connected assert 'conn' not in master finally: close_slave_event.set() slave_thread.join() @pytest.mark.execution_timeout(20.0, method='thread') def test_slave_simple_task(self): _slave_port, _channel = _random_slave_channel_and_port() slave_thread, open_slave_event, close_slave_event = _slave_endpoint(_slave_port, _channel) slave_thread.start() open_slave_event.wait() try: _master_port = random_port() master = _get_master_endpoint(_master_port, _channel) with master: with master.new_connection('conn', '127.0.0.1', _slave_port) as conn: task = conn.new_task({'a': 2, 'b': 3}) task.start().join() assert task.result == {'sum': 5} assert task.status == TaskStatus.COMPLETED _res_1, _res_2, _res_3 = None, None, None def _set_res_1(t, r): nonlocal _res_1 _res_1 = r['sum'] def _set_res_2(t, r): nonlocal _res_2 _res_2 = r def _set_res_3(t, r): nonlocal _res_3 _res_3 = r task = conn.new_task({'a': 2, 'b': 3}) \ .on_complete(_set_res_1).on_complete(_set_res_2) \ .on_fail(_set_res_3) task.start().join() assert task.result == {'sum': 5} assert task.status == TaskStatus.COMPLETED assert _res_1 == 5 assert _res_2 == {'sum': 5} assert _res_3 is None _res_1, _res_2, _res_3 = None, None, None task = conn.new_task({'a': 2, 'bb': 3}) \ .on_complete(_set_res_1).on_complete(_set_res_2) \ .on_fail(_set_res_3) task.start().join() assert task.result == {'message': 'ab not found'} assert task.status == TaskStatus.FAILED assert _res_1 is None assert _res_2 is None assert _res_3 == {'message': 'ab not found'} except HTTPError as err: print(err.response) print(err.response.content) print(err.request) raise err finally: close_slave_event.set() slave_thread.join()
36.603053
110
0.540563
536
4,795
4.453358
0.164179
0.050272
0.040218
0.052786
0.73607
0.73607
0.705907
0.705907
0.671135
0.651026
0
0.02324
0.371846
4,795
130
111
36.884615
0.769256
0
0
0.621359
0
0
0.034828
0
0
0
0
0
0.271845
1
0.058252
false
0
0.048544
0
0.116505
0.029126
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f4c964cd114e9760412d8cd89ccda43d27b5f493
269
py
Python
data/etl/001_merge_data/merge_app/utils/db.py
AleksNeStu/projects
1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb
[ "Apache-2.0" ]
2
2022-01-19T18:01:35.000Z
2022-02-06T06:54:38.000Z
data/etl/001_merge_data/merge_app/utils/db.py
AleksNeStu/projects
1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb
[ "Apache-2.0" ]
null
null
null
data/etl/001_merge_data/merge_app/utils/db.py
AleksNeStu/projects
1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb
[ "Apache-2.0" ]
null
null
null
def get_sql_lite_conn_str(db_file: str): db_file_stripped = db_file.strip() if not db_file or not db_file_stripped: # db_file = '../db/meet_app.db' raise Exception("SQL lite DB file is not specified.") return 'sqlite:///' + db_file_stripped
38.428571
61
0.680297
44
269
3.818182
0.477273
0.285714
0.25
0.190476
0.238095
0
0
0
0
0
0
0
0.208178
269
7
62
38.428571
0.788732
0.107807
0
0
0
0
0.1841
0
0
0
0
0
0
1
0.2
false
0
0
0
0.4
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f4e5b4f6fc998d8aee58fef8bd266f6447ada071
1,522
py
Python
BurstCube/tests/test_locStats.py
nkasmanoff/Simulation
38d47db79cebe8504a03424c564f2207ae2275ac
[ "MIT" ]
null
null
null
BurstCube/tests/test_locStats.py
nkasmanoff/Simulation
38d47db79cebe8504a03424c564f2207ae2275ac
[ "MIT" ]
15
2017-04-06T18:52:39.000Z
2019-08-15T17:48:40.000Z
BurstCube/tests/test_locStats.py
nkasmanoff/Simulation
38d47db79cebe8504a03424c564f2207ae2275ac
[ "MIT" ]
3
2017-06-13T17:54:29.000Z
2018-09-16T15:43:24.000Z
#!/usr/bin/env python import numpy as np from numpy.testing import assert_allclose try: from BurstCube.LocSim import Stats except ImportError: pass a = np.array([[[5.3, 55, 91, 100, 83, 42, 0, 0, 0, 0, 0, 0], [0.11, 0.44, 0.63, 0.68, 0.59, 0.37, 0, 0, 0, 0, 0, 0]], [[68, 85, 65, 11, 0, 0, 0, 0, 0, 0, 46, 100], [0.57, 0.67, 0.55, 0.19, 0, 0, 0, 0, 0, 0, 0.43, 0.76]]]) b = np.array([[[60, 59, 32, 0, 0, 0, 0, 0, 0, 19, 64, 100], [0.48, 0.48, 0.32, 0, 0, 0, 0, 0, 0, 0.23, 0.50, 0.69]], [[80, 72, 34, 0, 0, 0, 0, 0, 0, 20, 69, 100], [0.59, 0.54, 0.33, 0, 0, 0, 0, 0, 0, 0.24, 0.53, 0.69]]]) c = np.array([[[0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1], [1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1]], [[1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1], [1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1]]]) def test_calcNorms(): result = np.array([[198.99520095, 207.20060328], [51.33225107, 48.20788317]]) norms = Stats.calcNorms(a, b) assert_allclose(norms, result, 1e-6) def test_addErrors(): errors = Stats.addErrors(a, b) assert_allclose(errors, c, 1e-6) def test_calcNormsWithError(): result = np.array([[[199.7, 207.1], [52.3, 48.4]], [[198.3, 207.3], [50.4, 48.1]]]) norms_err = Stats.calcNormsWithError(a, b, c) assert_allclose(norms_err, result, 1e-3)
28.185185
72
0.450723
276
1,522
2.452899
0.300725
0.192024
0.23486
0.242245
0.153619
0.152142
0.125554
0.069424
0.069424
0.069424
0
0.294527
0.339685
1,522
53
73
28.716981
0.379104
0.013141
0
0
0
0
0
0
0
0
0
0
0.121212
1
0.090909
false
0.030303
0.121212
0
0.212121
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
f4fe5c0cc75ff4a55146fbb31caa331a8c0e5c8b
80
py
Python
run.py
pythonLearning-bigData/Snail
3d8aa4f23985bc7bd81ade94d0ad1c710e18cbe4
[ "BSD-3-Clause" ]
null
null
null
run.py
pythonLearning-bigData/Snail
3d8aa4f23985bc7bd81ade94d0ad1c710e18cbe4
[ "BSD-3-Clause" ]
null
null
null
run.py
pythonLearning-bigData/Snail
3d8aa4f23985bc7bd81ade94d0ad1c710e18cbe4
[ "BSD-3-Clause" ]
null
null
null
from web import views if __name__ == "__main__": views.app.run(debug=True)
16
29
0.7
12
80
4
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.175
80
4
30
20
0.727273
0
0
0
0
0
0.1
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
761c953f2c76fa9470d7ade6bed4fb35644c2fb3
325
py
Python
src/quiltz/id/testbuilders.py
qwaneu/quiltz
c6e5319848e44690de9ef292de88f6c71d53cf7f
[ "MIT" ]
null
null
null
src/quiltz/id/testbuilders.py
qwaneu/quiltz
c6e5319848e44690de9ef292de88f6c71d53cf7f
[ "MIT" ]
null
null
null
src/quiltz/id/testbuilders.py
qwaneu/quiltz
c6e5319848e44690de9ef292de88f6c71d53cf7f
[ "MIT" ]
null
null
null
import warnings warnings.warn("deprecated - use id.testbuilders from quiltz-domain instead", DeprecationWarning) from quiltz.id import ID import uuid def aValidUUID(simpleIdValue): return uuid.UUID("{:>32}".format(simpleIdValue).replace(' ', '1')) def aValidID(simpleIdValue): return ID(aValidUUID(simpleIdValue))
29.545455
96
0.763077
38
325
6.526316
0.578947
0.080645
0
0
0
0
0
0
0
0
0
0.010417
0.113846
325
10
97
32.5
0.850694
0
0
0
0
0
0.206154
0
0
0
0
0
0
1
0.25
false
0
0.375
0.25
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
4
761f3930d1350f03cce07c940434c88f15ed3ace
194
py
Python
psy/utils/__init__.py
cegfdb/IRT
20fcde3b385bce1644fecab7cdc8bda5beacda03
[ "MIT" ]
8
2018-11-25T12:57:53.000Z
2022-03-28T10:48:16.000Z
psy/utils/__init__.py
glan-wxl/IRT
1eed348fd3a8b9c27ea72d476ab2844113468eb8
[ "MIT" ]
null
null
null
psy/utils/__init__.py
glan-wxl/IRT
1eed348fd3a8b9c27ea72d476ab2844113468eb8
[ "MIT" ]
2
2019-09-28T09:09:41.000Z
2020-11-19T02:23:23.000Z
from probs import inverse_logistic, get_log_beta_pd, get_log_lognormal_pd, get_log_normal_pd, get_nodes_weights, \ r4beta from tools import cached_property from randoms import gen_item_bank
38.8
114
0.85567
32
194
4.71875
0.65625
0.119205
0.10596
0
0
0
0
0
0
0
0
0.005814
0.113402
194
4
115
48.5
0.872093
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
57ffe36d3610c7acb59eb994c5e7afa8af47680e
177
py
Python
crslab/data/dataloader/__init__.py
shubaoyu/CRSLab
a05730e8b2c03df278587be34923fa818945d4c4
[ "MIT" ]
1
2021-03-13T11:29:23.000Z
2021-03-13T11:29:23.000Z
crslab/data/dataloader/__init__.py
cy333/CRSLab
f4fded43c65b71045eb0ebb922c05753ec6c9db5
[ "MIT" ]
null
null
null
crslab/data/dataloader/__init__.py
cy333/CRSLab
f4fded43c65b71045eb0ebb922c05753ec6c9db5
[ "MIT" ]
1
2021-03-28T11:02:05.000Z
2021-03-28T11:02:05.000Z
from .base import BaseDataLoader from .kbrd import KBRDDataLoader from .kgsf import KGSFDataLoader from .redial import ReDialDataLoader from .tgredial import TGReDialDataLoader
29.5
40
0.858757
20
177
7.6
0.6
0
0
0
0
0
0
0
0
0
0
0
0.112994
177
5
41
35.4
0.968153
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
521de77f8050dbffc505370f87ff8bc71ec3c853
62
py
Python
toki/Kelas Pembelajaran Pemrograman/1. Pemrograman Dasar/12. Rekursi/E.py
andraantariksa/code-exercise-answer
69b7dbdc081cdb094cb110a72bc0c9242d3d344d
[ "MIT" ]
1
2019-11-06T15:17:48.000Z
2019-11-06T15:17:48.000Z
toki/Kelas Pembelajaran Pemrograman/1. Pemrograman Dasar/12. Rekursi/E.py
andraantariksa/code-exercise-answer
69b7dbdc081cdb094cb110a72bc0c9242d3d344d
[ "MIT" ]
null
null
null
toki/Kelas Pembelajaran Pemrograman/1. Pemrograman Dasar/12. Rekursi/E.py
andraantariksa/code-exercise-answer
69b7dbdc081cdb094cb110a72bc0c9242d3d344d
[ "MIT" ]
1
2018-11-13T08:43:26.000Z
2018-11-13T08:43:26.000Z
'''input 73 ''' print(str(bin(int(input('')))).split('0b')[1])
15.5
46
0.548387
10
62
3.4
0.9
0
0
0
0
0
0
0
0
0
0
0.067797
0.048387
62
4
46
15.5
0.508475
0.129032
0
0
0
0
0.041667
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
4
5227ae6e15bbe0f1ac6e44aba38db00732e259da
236
py
Python
emukit/quadrature/interfaces/__init__.py
ndalchau/emukit
eb6754ea016a7cd82b275bb4075676b5ed662634
[ "Apache-2.0" ]
152
2020-10-24T13:12:57.000Z
2022-03-25T11:35:41.000Z
emukit/quadrature/interfaces/__init__.py
Tony-Chiong/emukit
a068c8d5e06b2ae8b038f67bf2e4f66c4d91651a
[ "Apache-2.0" ]
87
2020-10-26T10:29:25.000Z
2022-03-04T11:17:59.000Z
emukit/quadrature/interfaces/__init__.py
Tony-Chiong/emukit
a068c8d5e06b2ae8b038f67bf2e4f66c4d91651a
[ "Apache-2.0" ]
41
2020-10-24T11:59:21.000Z
2022-03-22T17:08:30.000Z
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 from .base_gp import IBaseGaussianProcess # noqa: F401 from .standard_kernels import IRBF, IStandardKernel # noqa: F401
33.714286
73
0.779661
32
236
5.6875
0.875
0.087912
0
0
0
0
0
0
0
0
0
0.059113
0.139831
236
6
74
39.333333
0.837438
0.54661
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
525124f3a1434d6b9bd6f76c1f9e2d4003a7a72e
261
py
Python
ctapipe/image/muon/__init__.py
cta-sst-1m/ctapipe
10b058f8dcc166177d1eb5b2af638ca37722a021
[ "BSD-3-Clause" ]
1
2020-05-18T12:41:51.000Z
2020-05-18T12:41:51.000Z
ctapipe/image/muon/__init__.py
cta-sst-1m/ctapipe
10b058f8dcc166177d1eb5b2af638ca37722a021
[ "BSD-3-Clause" ]
null
null
null
ctapipe/image/muon/__init__.py
cta-sst-1m/ctapipe
10b058f8dcc166177d1eb5b2af638ca37722a021
[ "BSD-3-Clause" ]
1
2021-07-08T16:15:13.000Z
2021-07-08T16:15:13.000Z
from .fitting import kundu_chaudhuri_circle_fit from .features import * from .ring_fitter import MuonRingFitter from .intensity_fitter import MuonIntensityFitter __all__ = [ 'MuonIntensityFitter', 'MuonRingFitter', 'kundu_chaudhuri_circle_fit', ]
21.75
49
0.793103
27
261
7.222222
0.518519
0.14359
0.205128
0.235897
0
0
0
0
0
0
0
0
0.141762
261
11
50
23.727273
0.870536
0
0
0
0
0
0.226054
0.099617
0
0
0
0
0
1
0
false
0
0.444444
0
0.444444
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
525137fcb87c264c2e008ac31b18ca912ebefdb3
90
py
Python
codes_auto/547.friend-circles.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/547.friend-circles.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
codes_auto/547.friend-circles.py
smartmark-pro/leetcode_record
6504b733d892a705571eb4eac836fb10e94e56db
[ "MIT" ]
null
null
null
# # @lc app=leetcode.cn id=547 lang=python3 # # [547] friend-circles # None # @lc code=end
12.857143
41
0.666667
15
90
4
0.866667
0
0
0
0
0
0
0
0
0
0
0.092105
0.155556
90
7
42
12.857143
0.697368
0.811111
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
527b12a2ca994afd7b258a2182e877d6e5a6088d
1,390
py
Python
src/LivestockCV/core/transform/__init__.py
peschelgroup/LivestockCV
e5746af75935d5000ba3ad26d09b6868fae76b76
[ "MIT" ]
null
null
null
src/LivestockCV/core/transform/__init__.py
peschelgroup/LivestockCV
e5746af75935d5000ba3ad26d09b6868fae76b76
[ "MIT" ]
null
null
null
src/LivestockCV/core/transform/__init__.py
peschelgroup/LivestockCV
e5746af75935d5000ba3ad26d09b6868fae76b76
[ "MIT" ]
null
null
null
from LivestockCV.core.transform.color_correction import get_color_matrix from LivestockCV.core.transform.color_correction import get_matrix_m from LivestockCV.core.transform.color_correction import calc_transformation_matrix from LivestockCV.core.transform.color_correction import apply_transformation_matrix from LivestockCV.core.transform.color_correction import save_matrix from LivestockCV.core.transform.color_correction import load_matrix from LivestockCV.core.transform.color_correction import correct_color from LivestockCV.core.transform.color_correction import create_color_card_mask from LivestockCV.core.transform.color_correction import quick_color_check from LivestockCV.core.transform.color_correction import find_color_card from LivestockCV.core.transform.rescale import rescale from LivestockCV.core.transform.rotate import rotate from LivestockCV.core.transform.nonuniform_illumination import nonuniform_illumination from LivestockCV.core.transform.resize import resize, resize_factor from LivestockCV.core.transform.warp import warp __all__ = ["get_color_matrix", "get_matrix_m", "calc_transformation_matrix", "apply_transformation_matrix", "save_matrix", "load_matrix", "correct_color", "create_color_card_mask", "quick_color_check", "find_color_card", "rescale", "nonuniform_illumination", "resize", "resize_factor", "warp", "rotate"]
66.190476
107
0.84964
174
1,390
6.471264
0.172414
0.199822
0.253108
0.373002
0.492007
0.492007
0.492007
0.31794
0.122558
0
0
0
0.081295
1,390
20
108
69.5
0.881754
0
0
0
0
0
0.164748
0.070504
0
0
0
0
0
1
0
false
0
0.789474
0
0.789474
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
528ecbef039d0c804e5e9e379524884289148c4d
194
py
Python
backend/user/api/serializers.py
salva-imm/story-
6228634c18ebbf447add5069d39aa8ce040f3cd1
[ "MIT" ]
null
null
null
backend/user/api/serializers.py
salva-imm/story-
6228634c18ebbf447add5069d39aa8ce040f3cd1
[ "MIT" ]
null
null
null
backend/user/api/serializers.py
salva-imm/story-
6228634c18ebbf447add5069d39aa8ce040f3cd1
[ "MIT" ]
null
null
null
from datetime import datetime from utils.base import BaseOrmModelSerializer class UsersSerializer(BaseOrmModelSerializer): id: int username: str email: str join_date: datetime
19.4
46
0.773196
21
194
7.095238
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.185567
194
9
47
21.555556
0.943038
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.285714
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4